diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0175a7c --- /dev/null +++ b/.gitignore @@ -0,0 +1,48 @@ +# for Visual Studio +.vs +*.opensdf +*.sdf +*.user +*.ncb +*.vc.db +*.vc.opendb +*.aps +**/ipch + +# for Python +__pycache__ +*.pyc + + +# for pyCharm +**/.idea/workspace.xml +**/.idea/misc.xml +**/.idea/modules.xml +**/.idea/dictionaries +**/.idea/watcherTasks.xml +**/.idea/codeStyleSettings.xml +**/.idea/inspectionProfiles + +# for tmp folder or files. +/out +/external/_download_ +/external/jsoncpp +/external/mongoose +/external/openssl +/external/python + +# for dist folder +/dist/*.zip +/dist/*.tar.gz +/dist/installer/linux/server/_tmp_ +**/_tmp_ + +/server/share/data/ts_db.db +/server/share/data/replay +/server/www/teleport/.idea/vcs.xml +/server/www/teleport/static/js/var.js +/server/www/packages/packages-windows/x64 + +# for not finished code +/common/libex/test +/server/tp_core diff --git a/client/tp_assist/stdafx.h b/client/tp_assist/stdafx.h index f9f0f96..5d3df69 100644 --- a/client/tp_assist/stdafx.h +++ b/client/tp_assist/stdafx.h @@ -5,7 +5,7 @@ #include #include "ts_network.h" -#include "ts_log.h" -#include "ts_ini.h" +//#include "ts_log.h" +//#include "ts_ini.h" #include "ts_env.h" #include "ts_cfg.h" diff --git a/client/tp_assist/tp_assist.cpp b/client/tp_assist/tp_assist.cpp index 741a610..94f2b47 100644 --- a/client/tp_assist/tp_assist.cpp +++ b/client/tp_assist/tp_assist.cpp @@ -55,11 +55,16 @@ int APIENTRY wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmd WSACleanup(); return 0; } + g_env.init(); -// ex_astr temp; -// ex_wstr2astr(g_env.m_log_path, temp); -// TSLOG_INIT(TS_LOG_LEVEL_DEBUG, "tp_assist.log", temp.c_str()); - TSLOG_INIT(TS_LOG_LEVEL_DEBUG, L"tp_assist.log", g_env.m_log_path.c_str()); + +#ifdef EX_DEBUG + EXLOG_LEVEL(EX_LOG_LEVEL_DEBUG); +#else + EXLOG_LEVEL(EX_LOG_LEVEL_INFO); +#endif + + EXLOG_FILE(L"tp_assist.log", g_env.m_log_path.c_str(), 1024, 2); g_cfgSSH.init(); g_cfgScp.init(); diff --git a/client/tp_assist/tp_assist.vs2015.vcxproj b/client/tp_assist/tp_assist.vs2015.vcxproj index 8b1e2fb..328cb74 100644 --- a/client/tp_assist/tp_assist.vs2015.vcxproj +++ b/client/tp_assist/tp_assist.vs2015.vcxproj @@ -90,9 +90,12 @@ + + + @@ -106,13 +109,18 @@ - - - + + NotUsing + NotUsing + + + NotUsing + NotUsing + NotUsing NotUsing @@ -121,6 +129,10 @@ NotUsing NotUsing + + NotUsing + NotUsing + NotUsing NotUsing @@ -151,12 +163,7 @@ - - - NotUsing - - diff --git a/client/tp_assist/tp_assist.vs2015.vcxproj.filters b/client/tp_assist/tp_assist.vs2015.vcxproj.filters index 21a98ec..44d3832 100644 --- a/client/tp_assist/tp_assist.vs2015.vcxproj.filters +++ b/client/tp_assist/tp_assist.vs2015.vcxproj.filters @@ -13,21 +13,12 @@ main app - - main app - - - main app - main app main app - - main app - main app @@ -55,6 +46,15 @@ libex\src + + libex\src + + + libex\src + + + libex\src + @@ -75,18 +75,9 @@ main app - - main app - - - main app - main app - - main app - main app @@ -126,6 +117,15 @@ main app + + libex\header + + + libex\header + + + libex\header + diff --git a/client/tp_assist/ts_cfg.cpp b/client/tp_assist/ts_cfg.cpp index 16908f0..5673a9f 100644 --- a/client/tp_assist/ts_cfg.cpp +++ b/client/tp_assist/ts_cfg.cpp @@ -1,5 +1,5 @@ #include "stdafx.h" -#include "ts_ini.h" +//#include "ts_ini.h" #include "ts_cfg.h" #include "ts_env.h" @@ -50,11 +50,11 @@ bool TsClientCfgBase::_init(void) { client_set temp; - TsIniSection* cfg = NULL; + ExIniSection* cfg = NULL; cfg = m_ini.GetSection(_T("common")); if (NULL == cfg) { - TSLOGE("[ERROR] Invalid configuration, [common] section not found.\n"); + EXLOGE("[ERROR] Invalid configuration, [common] section not found.\n"); return false; } @@ -79,7 +79,7 @@ bool TsClientCfgBase::_init(void) cfg = m_ini.GetSection(sec_name); if (NULL == cfg) { - TSLOGE("[ERROR] Invalid configuration, [common] section not found.\n"); + EXLOGE("[ERROR] Invalid configuration, [common] section not found.\n"); return false; } @@ -108,7 +108,7 @@ bool TsClientCfgBase::_init(void) } temp.desc = _wstr; - temp.default = 0; + temp.is_default = false; m_clientsetmap[temp.name] = temp; m_client_list.push_back(temp.name); @@ -126,11 +126,11 @@ void TsClientCfgBase::set(ex_wstr sec_name, ex_wstr key, ex_wstr value) return; } - TsIniSection* cfg = NULL; + ExIniSection* cfg = NULL; cfg = m_ini.GetSection(sec_name); if (NULL == cfg) { - TSLOGE("[ERROR] Invalid configuration, [common] section not found.\n"); + EXLOGE("[ERROR] Invalid configuration, [common] section not found.\n"); return; } cfg->SetValue(key, value); @@ -162,14 +162,14 @@ bool TsCfgSSH::init(void) temp.path += _T("\\putty\\putty.exe"); temp.commandline = _T("-ssh -pw **** -P {host_port} -l {user_name} {host_ip}"); temp.desc = _T("PuTTY为开放源代码软件,主要由Simon Tatham维护,使用MIT licence授权。"); - temp.default = 1; + temp.is_default = true; m_clientsetmap[temp.name] = temp; m_client_list.push_back(temp.name); if (!m_ini.LoadFromFile(g_env.m_ssh_client_conf_file)) { - TSLOGE("can not load ssh config file.\n"); + EXLOGE("can not load ssh config file.\n"); return false; } @@ -196,13 +196,13 @@ bool TsCfgScp::init(void) temp.path += _T("\\winscp\\winscp.exe"); temp.commandline = _T("/sessionname=\"TP#{real_ip}\" {user_name}:****@{host_ip}:{host_port}"); temp.desc = _T("WinSCP是一个Windows环境下使用SSH的开源图形化SFTP客户端。同时支持SCP协议。它的主要功能就是在本地与远程计算机间安全的复制文件。"); - temp.default = 1; + temp.is_default = true; m_clientsetmap[temp.name] = temp; m_client_list.push_back(temp.name); if (!m_ini.LoadFromFile(g_env.m_scp_client_conf_file)) { - TSLOGE("can not load scp config file.\n"); + EXLOGE("can not load scp config file.\n"); return false; } @@ -230,13 +230,13 @@ bool TsCfgTelnet::init(void) temp.commandline = _T("telnet://{user_name}@{host_ip}:{host_port}"); temp.desc = _T("PuTTY为开放源代码软件,主要由Simon Tatham维护,使用MIT licence授权。"); - temp.default = 1; + temp.is_default = true; m_clientsetmap[temp.name] = temp; m_client_list.push_back(temp.name); if (!m_ini.LoadFromFile(g_env.m_telnet_client_conf_file)) { - TSLOGE("can not load telnet config file.\n"); + EXLOGE("can not load telnet config file.\n"); return false; } diff --git a/client/tp_assist/ts_cfg.h b/client/tp_assist/ts_cfg.h index 896d123..70719b0 100644 --- a/client/tp_assist/ts_cfg.h +++ b/client/tp_assist/ts_cfg.h @@ -13,7 +13,7 @@ struct client_set ex_wstr path; ex_wstr commandline; ex_wstr desc; - int default; + bool is_default; }; typedef std::map clientsetmap; @@ -35,7 +35,7 @@ protected: bool _init(void); protected: - TsIniFile m_ini; + ExIniFile m_ini; }; class TsCfgSSH : public TsClientCfgBase diff --git a/client/tp_assist/ts_http_rpc.cpp b/client/tp_assist/ts_http_rpc.cpp index 7113a9b..afc025b 100644 --- a/client/tp_assist/ts_http_rpc.cpp +++ b/client/tp_assist/ts_http_rpc.cpp @@ -96,16 +96,16 @@ void http_rpc_main_loop(void) { if (!g_http_interface.init(TS_HTTP_RPC_HOST, TS_HTTP_RPC_PORT)) { - TSLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT); + EXLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT); return; } - TSLOGV("======================================================\n"); - TSLOGV("[rpc] TeleportAssist-HTTP-RPC ready on %s:%d\n", TS_HTTP_RPC_HOST, TS_HTTP_RPC_PORT); + EXLOGV("======================================================\n"); + EXLOGV("[rpc] TeleportAssist-HTTP-RPC ready on %s:%d\n", TS_HTTP_RPC_HOST, TS_HTTP_RPC_PORT); g_http_interface.run(); - TSLOGV("[prc] main loop end.\n"); + EXLOGV("[prc] main loop end.\n"); } #define HEXTOI(x) (isdigit(x) ? x - '0' : x - 'W') @@ -185,7 +185,7 @@ bool TsHttpRpc::init(const char* ip, int port) nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler); if (nc == NULL) { - TSLOGE("[rpc] TsHttpRpc::init %s:%d\n", ip, port); + EXLOGE("[rpc] TsHttpRpc::init %s:%d\n", ip, port); return false; } nc->user_data = this; @@ -225,7 +225,7 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat TsHttpRpc* _this = (TsHttpRpc*)nc->user_data; if (NULL == _this) { - TSLOGE("[ERROR] invalid http request.\n"); + EXLOGE("[ERROR] invalid http request.\n"); return; } @@ -249,7 +249,7 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat else dbg_method = "UNSUPPORTED-HTTP-METHOD"; - TSLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str()); + EXLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str()); #endif ex_astr ret_buf; bool b_is_index = false; @@ -328,7 +328,7 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat unsigned int rv = _this->_parse_request(hm, method, json_param); if (0 != rv) { - TSLOGE("[ERROR] http-rpc got invalid request.\n"); + EXLOGE("[ERROR] http-rpc got invalid request.\n"); _this->_create_json_ret(ret_buf, rv); } else @@ -432,7 +432,7 @@ unsigned int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_c func_args = &sztmp[0]; } - TSLOGV("[rpc] method=%s, json_param=%s\n", func_cmd.c_str(), func_args.c_str()); + EXLOGV("[rpc] method=%s, json_param=%s\n", func_cmd.c_str(), func_args.c_str()); return TSR_OK; } @@ -469,7 +469,7 @@ void TsHttpRpc::_process_js_request(const ex_astr& func_cmd, const ex_astr& func } else { - TSLOGE("[rpc] got unknown command: %s\n", func_cmd.c_str()); + EXLOGE("[rpc] got unknown command: %s\n", func_cmd.c_str()); _create_json_ret(buf, TSR_NO_SUCH_METHOD); } } @@ -851,7 +851,7 @@ void TsHttpRpc::_rpc_func_create_ts_client(const ex_astr& func_args, ex_astr& bu if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)) { - TSLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); + EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); root_ret["code"] = TSR_CREATE_PROCESS_ERROR; _create_json_ret(buf, root_ret); return; @@ -1109,7 +1109,7 @@ void TsHttpRpc::_rpc_func_ts_rdp_play(const ex_astr& func_args, ex_astr& buf) ZeroMemory(&pi, sizeof(pi)); if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)) { - TSLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); + EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str()); root_ret["code"] = TSR_CREATE_PROCESS_ERROR; _create_json_ret(buf, root_ret); return; @@ -1165,7 +1165,7 @@ void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) ex_wstr2astr(it->second.desc, temp, EX_CODEPAGE_UTF8); config["desc"] = temp; - config["build_in"] = it->second.default; + config["build_in"] = it->second.is_default ? 1 : 0; if (it->first == g_cfgSSH.m_current_client) { config["current"] = 1; @@ -1211,7 +1211,7 @@ void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) ex_wstr2astr(it->second.alias_name, temp, EX_CODEPAGE_UTF8); config["alias_name"] = temp; - config["build_in"] = it->second.default; + config["build_in"] = it->second.is_default ? 1 : 0; if (it->first == g_cfgScp.m_current_client) config["current"] = 1; @@ -1252,7 +1252,7 @@ void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf) ex_wstr2astr(it->second.alias_name, temp, EX_CODEPAGE_UTF8); config["alias_name"] = temp; - config["build_in"] = it->second.default; + config["build_in"] = it->second.is_default ? 1 : 0; if (it->first == g_cfgTelnet.m_current_client) config["current"] = 1; @@ -1318,7 +1318,7 @@ void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) _create_json_ret(buf, TSR_INVALID_JSON_PARAM); return; } - if (it->second.default == 1) + if (it->second.is_default) { g_cfgSSH.set(_T("common"), _T("current_client"), w_name); g_cfgSSH.save(); @@ -1343,7 +1343,7 @@ void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) _create_json_ret(buf, TSR_INVALID_JSON_PARAM); return; } - if (it->second.default == 1) + if (it->second.is_default) { g_cfgScp.set(_T("common"), _T("current_client"), w_name); g_cfgScp.save(); @@ -1367,7 +1367,7 @@ void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf) _create_json_ret(buf, TSR_INVALID_JSON_PARAM); return; } - if (it->second.default == 1) + if (it->second.is_default) { g_cfgTelnet.set(_T("common"), _T("current_client"), w_name); g_cfgTelnet.save(); diff --git a/client/tp_assist/ts_log.cpp b/client/tp_assist/ts_log.cpp deleted file mode 100644 index 14f980e..0000000 --- a/client/tp_assist/ts_log.cpp +++ /dev/null @@ -1,626 +0,0 @@ -#include "stdafx.h" -#include "ts_log.h" -#include "ts_thread.h" - -#include -#include -#include - -#ifdef EX_OS_WIN32 -#include -#include -#include -#else -#include -#include -#endif - -#define LOG_PATH_MAX_LEN 1024 -#define LOG_CONTENT_MAX_LEN 2048 - -#define LOG_FILE_MAX_SIZE 1024*1024*10 -#define LOG_FILE_MAX_COUNT 10 -typedef enum TS_COLORS -{ - TS_COLOR_BLACK = 0, - TS_COLOR_BLUE = 1, - TS_COLOR_GREEN = 2, - TS_COLOR_CYAN = 3, - TS_COLOR_RED = 4, - TS_COLOR_MAGENTA = 5, - TS_COLOR_YELLOW = 6, - TS_COLOR_LIGHT_GRAY = 7, - TS_COLOR_GRAY = 8, - TS_COLOR_LIGHT_BLUE = 9, - TS_COLOR_LIGHT_GREEN = 10, - TS_COLOR_LIGHT_CYAN = 11, - TS_COLOR_LIGHT_RED = 12, - TS_COLOR_LIGHT_MAGENTA = 13, - TS_COLOR_LIGHT_YELLOW = 14, - TS_COLOR_WHITE = 15, - - TS_COLOR_NORMAL = 0xFF, -}TS_COLORS; - -#ifdef EX_OS_WIN32 -static HANDLE g_hConsole = NULL; -#endif - -int g_log_min_level = TS_LOG_LEVEL_INFO; -ex_wstr g_log_path; -ex_wstr g_log_name; -TsThreadLock g_log_lock; - -class TSLogFile -{ -public: - TSLogFile() { - m_hFile = NULL; - m_nMaxFileLength = LOG_FILE_MAX_SIZE; - m_nMaxFileCount = LOG_FILE_MAX_COUNT; - } - ~TSLogFile() { - } - bool WriteData(int level, char* buf, int len); - bool Init(const ex_astr& log_path, const ex_astr& log_name) - { - m_Log_Path = log_path; -#ifdef EX_OS_WIN32 - m_Log_Path += "\\"; -#else - m_Log_Path += "//"; -#endif - m_Log_Path += log_name; - - m_log_name = log_name; - - m_log_file_dir = log_path; - - load_file_list(); - return true; - } -protected: - bool open_file(); - bool backup_file(); - bool load_file_list(); - -protected: - typedef std::deque log_file_deque; - FILE* m_hFile; - - unsigned int m_nMaxFileLength; - unsigned int m_nMaxFileCount; - std::string m_Log_Path; - std::string m_log_name; - std::string m_log_file_dir; - log_file_deque m_log_file_list; -private: - -}; -TSLogFile g_log_file; - -void TSLOG_INIT(int min_level, const wchar_t*log_file_name, const wchar_t* log_path) -{ - g_log_min_level = min_level; - -#ifdef EX_OS_WIN32 - if (NULL == g_hConsole) - g_hConsole = GetStdHandle(STD_OUTPUT_HANDLE); -#endif - - if (log_file_name) - { - g_log_name = log_file_name; - } - else - { - g_log_name = L"main.log"; - } - - if (log_path) - { - g_log_path = log_path; - } - else - { - ex_exec_file(g_log_path); - ex_dirname(g_log_path); - ex_path_join(g_log_path, false, L"log"); - } - - ex_mkdirs(g_log_path); - - ex_astr _path, _file; - ex_wstr2astr(g_log_path, _path); - ex_wstr2astr(g_log_name, _file); - - g_log_file.Init(_path, _file); -} - -static void _ts_printf_a(int level,TS_COLORS clrBackGround, const char* fmt, va_list valist) -{ - if (NULL == fmt || 0 == strlen(fmt)) - return; - if (g_log_min_level > level) - return; - TS_COLORS clrForeGround = TS_COLOR_NORMAL; - switch (level) - { - case TS_LOG_LEVEL_DEBUG: - { - clrForeGround = TS_COLOR_GRAY; - } - break; - case TS_LOG_LEVEL_VERBOSE: - { - clrForeGround = TS_COLOR_LIGHT_GRAY; - } - break; - case TS_LOG_LEVEL_INFO: - { - clrForeGround = TS_COLOR_LIGHT_MAGENTA; - } - break; - case TS_LOG_LEVEL_WARN: - { - clrForeGround = TS_COLOR_LIGHT_RED; - } - break; - case TS_LOG_LEVEL_ERROR: - { - clrForeGround = TS_COLOR_LIGHT_RED; - } - break; - default: - break; - } - if (TS_COLOR_NORMAL == clrForeGround) - clrForeGround = TS_COLOR_LIGHT_GRAY; - if (TS_COLOR_NORMAL == clrBackGround) - clrBackGround = TS_COLOR_BLACK; - - char szTmp[4096] = { 0 }; - -#ifdef EX_OS_WIN32 - vsnprintf_s(szTmp, 4096, 4095, fmt, valist); - if (NULL != g_hConsole) - { - SetConsoleTextAttribute(g_hConsole, (WORD)((clrBackGround << 4) | clrForeGround)); - printf_s("%s", szTmp); - fflush(stdout); - SetConsoleTextAttribute(g_hConsole, TS_COLOR_GRAY); - } - else { - OutputDebugStringA(szTmp); - } -#else - vsnprintf(szTmp, 4095, fmt, valist); - printf("%s", szTmp); - fflush(stdout); -#endif - g_log_file.WriteData(level, szTmp, strlen(szTmp)); -} - -static void _ts_printf_w(int level, TS_COLORS clrBackGround, const wchar_t* fmt, va_list valist) -{ - if (NULL == fmt || 0 == wcslen(fmt)) - return; - if (g_log_min_level > level) - return; - - TS_COLORS clrForeGround = TS_COLOR_NORMAL; - switch (level) - { - case TS_LOG_LEVEL_DEBUG: - { - clrForeGround = TS_COLOR_GRAY; - } - break; - case TS_LOG_LEVEL_VERBOSE: - { - clrForeGround = TS_COLOR_LIGHT_GRAY; - } - break; - case TS_LOG_LEVEL_INFO: - { - clrForeGround = TS_COLOR_LIGHT_MAGENTA; - } - break; - case TS_LOG_LEVEL_WARN: - { - clrForeGround = TS_COLOR_LIGHT_RED; - } - break; - case TS_LOG_LEVEL_ERROR: - { - clrForeGround = TS_COLOR_LIGHT_RED; - } - break; - default: - break; - } - if (TS_COLOR_NORMAL == clrForeGround) - clrForeGround = TS_COLOR_LIGHT_GRAY; - if (TS_COLOR_NORMAL == clrBackGround) - clrBackGround = TS_COLOR_BLACK; - - wchar_t szTmp[4096] = { 0 }; - -#ifdef EX_OS_WIN32 - _vsnwprintf_s(szTmp, 4096, 4095, fmt, valist); - if (NULL != g_hConsole) - { - SetConsoleTextAttribute(g_hConsole, (WORD)((clrBackGround << 4) | clrForeGround)); - wprintf_s(_T("%s"), szTmp); - fflush(stdout); - SetConsoleTextAttribute(g_hConsole, TS_COLOR_GRAY); - } - else { - OutputDebugStringW(szTmp); - } -#else - vswprintf(szTmp, 4095, fmt, valist); - wprintf(L"%s", szTmp); - fflush(stdout); -#endif - -} - -#define TS_PRINTF_X(fn, level) \ -void fn(const char* fmt, ...) \ -{ \ - TsThreadSmartLock locker(g_log_lock); \ - va_list valist; \ - va_start(valist, fmt); \ - _ts_printf_a(level, TS_COLOR_BLACK, fmt, valist); \ - va_end(valist); \ -} \ -void fn(const wchar_t* fmt, ...) \ -{ \ - TsThreadSmartLock locker(g_log_lock); \ - va_list valist; \ - va_start(valist, fmt); \ - _ts_printf_w(level, TS_COLOR_BLACK, fmt, valist); \ - va_end(valist); \ -} - -TS_PRINTF_X(ts_printf_d, TS_LOG_LEVEL_DEBUG) -TS_PRINTF_X(ts_printf_v, TS_LOG_LEVEL_VERBOSE) -TS_PRINTF_X(ts_printf_i, TS_LOG_LEVEL_INFO) -TS_PRINTF_X(ts_printf_w, TS_LOG_LEVEL_WARN) -TS_PRINTF_X(ts_printf_e, TS_LOG_LEVEL_ERROR) - -#ifdef EX_OS_WIN32 -void ts_printf_e_lasterror(const char* fmt, ...) -{ - TsThreadSmartLock locker(g_log_lock); - - va_list valist; - va_start(valist, fmt); - _ts_printf_a(TS_COLOR_LIGHT_RED, TS_COLOR_BLACK, fmt, valist); - va_end(valist); - - //========================================= - - LPVOID lpMsgBuf; - DWORD dw = GetLastError(); - - FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, - NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), - (LPSTR)&lpMsgBuf, 0, NULL); - - ts_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf); - LocalFree(lpMsgBuf); -} -#endif - -void ts_printf_bin(ex_u8* bin_data, size_t bin_size, const char* fmt, ...) -{ - TsThreadSmartLock locker(g_log_lock); - - va_list valist; - va_start(valist, fmt); - _ts_printf_a(TS_COLOR_GRAY, TS_COLOR_BLACK, fmt, valist); - va_end(valist); - - ts_printf_d(" (%d/0x%02x Bytes)\n", bin_size, bin_size); - - const ex_u8* line = bin_data; - size_t thisline = 0; - size_t offset = 0; - unsigned int i = 0; - - char szTmp[128] = { 0 }; - int _offset = 0; - - while (offset < bin_size) - { - memset(szTmp, 0, 128); - _offset = 0; - - snprintf(szTmp + _offset, 128 - _offset, "%06x ", (int)offset); - _offset += 8; - - thisline = bin_size - offset; - if (thisline > 16) - thisline = 16; - - for (i = 0; i < thisline; i++) - { - snprintf(szTmp + _offset, 128 - _offset, "%02x ", line[i]); - _offset += 3; - } - - snprintf(szTmp + _offset, 128 - _offset, " "); - _offset += 2; - - for (; i < 16; i++) - { - snprintf(szTmp + _offset, 128 - _offset, " "); - _offset += 3; - } - - for (i = 0; i < thisline; i++) - { - snprintf(szTmp + _offset, 128 - _offset, "%c", (line[i] >= 0x20 && line[i] < 0x7f) ? line[i] : '.'); - _offset += 1; - } - - snprintf(szTmp + _offset, 128 - _offset, "\n"); - _offset += 1; - - ts_printf_d("%s", szTmp); - - offset += thisline; - line += thisline; - } - - fflush(stdout); -} - -bool TSLogFile::open_file() -{ - if (m_hFile) - { - fclose(m_hFile); - m_hFile = 0; - } - - // 注意:这里必须使用 _fsopen 来指定共享读方式打开日志文件,否则进程推出前无法查看日志文件内容。 - m_hFile = _fsopen(m_Log_Path.c_str(), "a", _SH_DENYWR); - if (NULL == m_hFile) - return false; - - fseek(m_hFile, 0, SEEK_END); - unsigned long file_size = ftell(m_hFile); - if (file_size > (unsigned long)m_nMaxFileLength) - { - //备份文件 - if (backup_file()) - { - //打开文件 - return open_file(); - } - } - return true; -} - -bool TSLogFile::backup_file() -{ - char szNewFileLogName[LOG_PATH_MAX_LEN] = {0}; - char szBaseNewFileLogName[LOG_PATH_MAX_LEN] = { 0 }; -#ifdef EX_OS_WIN32 - SYSTEMTIME st; - GetLocalTime(&st); - sprintf_s(szNewFileLogName, LOG_PATH_MAX_LEN, "%s\\%04d%02d%02d%02d%02d%02d.log", - m_log_file_dir.c_str(),st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); - - sprintf_s(szBaseNewFileLogName, LOG_PATH_MAX_LEN, "%04d%02d%02d%02d%02d%02d", - st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); -#else - time_t timep; - struct tm *p; - time(&timep); - p = localtime(&timep); //get server's time - if (p == NULL) - { - return NULL; - } - sprintf(szNewFileLogName, "%s//%04d%02d%02d%02d%02d%02d.log", - m_log_file_dir.c_str(),p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); - sprintf(szBaseNewFileLogName, "%04d%02d%02d%02d%02d%02d", - p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); -#endif - if (m_hFile) - { - fclose(m_hFile); - m_hFile = 0; - } -#ifdef EX_OS_WIN32 - if (!MoveFileA(m_Log_Path.c_str(), szNewFileLogName)) - { - DWORD dwError = GetLastError(); - - DeleteFileA(szNewFileLogName); - - MoveFileA(m_Log_Path.c_str(), szNewFileLogName); - } -#else - if (rename(m_Log_Path.c_str(), szNewFileLogName) != 0) - { - remove(szNewFileLogName); - - rename(m_Log_Path.c_str(), szNewFileLogName); - } -#endif - unsigned long long value = atoll(szBaseNewFileLogName); - if (value !=0 ) - { - m_log_file_list.push_back(value); - } - int try_count = 0; - while ((m_log_file_list.size() > m_nMaxFileCount)) - { - unsigned long long value = m_log_file_list.front(); - char szDeleteFile[256] = { 0 }; -#ifdef EX_OS_WIN32 - sprintf_s(szDeleteFile, 256, "%s\\%llu.log", m_log_file_dir.c_str(), value); - if (DeleteFileA(szDeleteFile)) - { - m_log_file_list.pop_front(); - } -#else - sprintf(szDeleteFile, "%s//%llu.log", m_log_file_dir.c_str(), value); - if (remove(szDeleteFile) == 0) - { - m_log_file_list.pop_front(); - } -#endif - else - { - if (try_count > 5) - { - break; - } - try_count++; - } - - } - - return true; -} - -bool TSLogFile::WriteData(int level, char* buf, int len) -{ - if (len > LOG_CONTENT_MAX_LEN) - { - return false; - } - - // TODO: 这里每次写日志时都会导致判断文件大小来决定是否新开一个日志文件,效率低下。应该改为缓存文件大小,每次写入完毕后更新大小值,超过阀值则新开日志文件。 - if (!open_file()) - { - return false; - } - - -#ifdef EX_OS_WIN32 - unsigned long _tid = GetCurrentThreadId(); -#else - unsigned long _tid = pthread_self(); -#endif -#ifdef EX_OS_WIN32 - unsigned long now = GetTickCount(); -#else -// unsigned long now = 0; - struct timeval tv; - if (gettimeofday(&tv, NULL /* tz */) != 0) return false; - unsigned long now = (double)tv.tv_sec + (((double)tv.tv_usec) / 1000.0); -#endif - - char szLog[LOG_CONTENT_MAX_LEN + 100] = {0}; -#ifdef EX_OS_WIN32 - SYSTEMTIME st; - GetLocalTime(&st); - sprintf_s(szLog, LOG_CONTENT_MAX_LEN + 100, "[%04d-%02d-%02d %02d:%02d:%02d] %s", - st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond, buf); -#else - time_t timep; - struct tm *p; - time(&timep); - p = localtime(&timep); //get server's time - if (p == NULL) - { - return NULL; - } - sprintf(szLog, "[%04d-%02d-%02d %02d:%02d:%02d] %s", - p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec, buf); -#endif - // TODO: 在这里统计文件大小 - fwrite(szLog, strlen(szLog), 1, m_hFile); - fflush(m_hFile); - return true; -} - -bool TSLogFile::load_file_list() -{ -#ifdef EX_OS_WIN32 - struct _finddata_t data; - std::string log_match = m_log_file_dir; - log_match += "\\"; - log_match += "*.log"; - long hnd = _findfirst(log_match.c_str(), &data); - if (hnd < 0) - { - return false; - } - int nRet = (hnd <0) ? -1 : 1; - while (nRet > 0) - { - if (data.attrib == _A_SUBDIR) - printf(" [%s]*\n", data.name); - else { - - if (m_log_name.compare(data.name) == 0) - { - } - else { - char* match = strrchr(data.name, '.'); - if (match != NULL) - { - *match = '\0'; - } - unsigned long long value = atoll(data.name); - if (value == 0) - { - continue; - } - m_log_file_list.push_back(value); - } - - - } - - nRet = _findnext(hnd, &data); - } - _findclose(hnd); -#else - DIR *dir; - - struct dirent *ptr; - - dir = opendir(m_log_file_dir.c_str()); - - while ((ptr = readdir(dir)) != NULL) - { - if(ptr->d_type == 8) - { - char temp_file_name[PATH_MAX] = {0}; - strcpy(temp_file_name,ptr->d_name); - if (m_log_name.compare(temp_file_name) == 0) - { - - }else{ - char* match = strrchr(temp_file_name, '.'); - if (match != NULL) - { - *match = '\0'; - } - unsigned long long value = atoll(temp_file_name); - if (value == 0) - { - continue; - } - m_log_file_list.push_back(value); - } - } -// printf("d_name: %s d_type: %d\n", ptr->d_name, ptr->d_type); - } - - - closedir(dir); -#endif // EX_OS_WIN32 - - std::sort(m_log_file_list.begin(), m_log_file_list.end(), std::less()); - return true; -} diff --git a/client/tp_assist/ts_log.h b/client/tp_assist/ts_log.h deleted file mode 100644 index b282362..0000000 --- a/client/tp_assist/ts_log.h +++ /dev/null @@ -1,50 +0,0 @@ -#ifndef __TS_LOG_H__ -#define __TS_LOG_H__ - -#include - -#define TS_LOG_LEVEL_DEBUG 0 -#define TS_LOG_LEVEL_VERBOSE 1 -#define TS_LOG_LEVEL_INFO 2 -#define TS_LOG_LEVEL_WARN 3 -#define TS_LOG_LEVEL_ERROR 4 - -void TSLOG_INIT(int min_level, const wchar_t* log_file_name, const wchar_t* log_path = NULL); - -#define TSLOGV ts_printf_v -#define TSLOGI ts_printf_i -#define TSLOGW ts_printf_w -#define TSLOGE ts_printf_e - -#ifdef TS_DEBUG -# define TSLOGD ts_printf_d -# define TSLOG_BIN ts_printf_bin -#else -# define TSLOGD -# define TSLOG_BIN -#endif - -#ifdef EX_OS_WIN32 -#define TSLOGE_WIN ts_printf_e_lasterror -void ts_printf_e_lasterror(const char* fmt, ...); -void ts_printf_e_lasterror(const wchar_t* fmt, ...); -#endif - - -void ts_printf_d(const char* fmt, ...); -void ts_printf_v(const char* fmt, ...); -void ts_printf_i(const char* fmt, ...); -void ts_printf_w(const char* fmt, ...); -void ts_printf_e(const char* fmt, ...); - -void ts_printf_d(const wchar_t* fmt, ...); -void ts_printf_v(const wchar_t* fmt, ...); -void ts_printf_i(const wchar_t* fmt, ...); -void ts_printf_w(const wchar_t* fmt, ...); -void ts_printf_e(const wchar_t* fmt, ...); - -void ts_printf_bin(ex_u8* bin_data, size_t bin_size, const char* fmt, ...); -void ts_printf_bin(ex_u8* bin_data, size_t bin_size, const wchar_t* fmt, ...); - - -#endif // __TS_LOG_H__ diff --git a/common/libex/include/ex.h b/common/libex/include/ex.h index 4aca862..68166fb 100644 --- a/common/libex/include/ex.h +++ b/common/libex/include/ex.h @@ -11,6 +11,12 @@ #include "ex/ex_util.h" #include "ex/ex_str.h" #include "ex/ex_path.h" +#include "ex/ex_thread.h" +#include "ex/ex_log.h" +#include "ex/ex_ini.h" +#ifdef EX_OS_WIN32 +# include "ex/ex_winsrv.h" +#endif #endif // __LIB_EX_H__ diff --git a/common/libex/include/ex/ex_const.h b/common/libex/include/ex/ex_const.h index 7a39020..30604a7 100644 --- a/common/libex/include/ex/ex_const.h +++ b/common/libex/include/ex/ex_const.h @@ -37,4 +37,21 @@ #define EX_CURRENT_DIR_STR L"." #define EX_NULL_END L'\0' + + +//==================================================== +// error code. +//==================================================== +#define EXRV_OK 0 +#define EXRV_SYS_ERR 1 // 系统错误,可以使用GetLastError或者errno来获取具体错误值 +#define EXRV_FAILED 2 // 操作失败 + +//#define EXRV_CANNOT_FOUND 9 +#define EXRV_CANNOT_CREATE 10 +#define EXRV_CANNOT_OPEN 11 +#define EXRV_CANNOT_SET 12 +#define EXRV_CANNOT_REMOVE 13 +#define EXRV_NOT_START 14 +#define EXRV_NOT_EXISTS 14 + #endif // __LIB_EX_CONST_H__ diff --git a/client/tp_assist/ts_ini.h b/common/libex/include/ex/ex_ini.h similarity index 58% rename from client/tp_assist/ts_ini.h rename to common/libex/include/ex/ex_ini.h index aac1081..9d9f3c5 100644 --- a/client/tp_assist/ts_ini.h +++ b/common/libex/include/ex/ex_ini.h @@ -1,16 +1,27 @@ -#ifndef __TS_INI_H__ -#define __TS_INI_H__ +#ifndef __EX_INI_H__ +#define __EX_INI_H__ -#include +/* +特别注意: -typedef std::map ts_ini_kvs; +1. 以 分号';' 或者 井号'#' 作为注释行的第一个字符 +2. 不支持行内注释 +3. 值对以第一个等号分隔,等号前后如果有空格会被忽略,之后的空格会保留,包括行尾空格 +4. 如果有不属于某个小节的值对,可以使用GetDumySection()获取 + DumySection主要是为了能够兼容简单的Python文件做配置文件。 +*/ -class TsIniSection +#include "ex_str.h" +#include + +typedef std::map ex_ini_kvs; + +class ExIniSection { public: - TsIniSection(); - TsIniSection(const ex_wstr& strSectionName); - ~TsIniSection(); + ExIniSection(); + ExIniSection(const ex_wstr& strSectionName); + ~ExIniSection(); void ClearUp(void); @@ -27,14 +38,14 @@ public: bool SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool bAddIfNotExists = false); - ts_ini_kvs& GetKeyValues(void) { return m_kvs; } + ex_ini_kvs& GetKeyValues(void) { return m_kvs; } int Count(void) const { return m_kvs.size(); } void Save(FILE* file, int codepage); -#ifdef _DEBUG +#ifdef EX_DEBUG void Dump(void); #endif @@ -43,14 +54,14 @@ protected: private: ex_wstr m_strName; - ts_ini_kvs m_kvs; + ex_ini_kvs m_kvs; }; -typedef std::map ts_ini_sections; +typedef std::map ex_ini_sections; // Ini file -class TsIniFile +class ExIniFile { public: enum PARSE_RV @@ -63,8 +74,8 @@ public: }; public: - TsIniFile(); - ~TsIniFile(); + ExIniFile(); + ~ExIniFile(); void ClearUp(void); @@ -72,24 +83,26 @@ public: bool LoadFromFile(const ex_wstr& strFileName, bool bClearOld = true); bool LoadFromMemory(const ex_wstr& strData, bool bClearOld = true); - TsIniSection* GetSection(const ex_wstr& strName, bool bCreateIfNotExists = false); + ExIniSection* GetSection(const ex_wstr& strName, bool bCreateIfNotExists = false); + ExIniSection* GetDumySection(void) { return &m_dumy_sec; } int Count(void) const { return m_secs.size(); } void Save(int codepage = EX_CODEPAGE_UTF8); -#ifdef _DEBUG +#ifdef EX_DEBUG void Dump(void); #endif protected: static PARSE_RV _ParseLine(const ex_wstr& strLine, ex_wstr& strKey, ex_wstr& strValue); - bool _ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection); + bool _ProcessLine(const ex_wstr strLine, ExIniSection** pCurSection); private: - ts_ini_sections m_secs; + ex_ini_sections m_secs; + ExIniSection m_dumy_sec; ex_wstr m_file_path; }; -#endif // __TS_INI_H__ +#endif // __EX_INI_H__ diff --git a/common/libex/include/ex/ex_log.h b/common/libex/include/ex/ex_log.h new file mode 100644 index 0000000..b322b5e --- /dev/null +++ b/common/libex/include/ex/ex_log.h @@ -0,0 +1,53 @@ +#ifndef __EX_LOG_H__ +#define __EX_LOG_H__ + +#include "ex_types.h" + +#define EX_LOG_LEVEL_DEBUG 0 +#define EX_LOG_LEVEL_VERBOSE 1 +#define EX_LOG_LEVEL_INFO 2 +#define EX_LOG_LEVEL_WARN 3 +#define EX_LOG_LEVEL_ERROR 4 + +#define EX_LOG_FILE_MAX_SIZE 1024*1024*10 +#define EX_LOG_FILE_MAX_COUNT 10 + + +void EXLOG_LEVEL(int min_level); + +// 设定日志文件名及路径,如未指定路径,则为可执行程序所在目录下的log目录。 +void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path = NULL, ex_u32 max_filesize = EX_LOG_FILE_MAX_SIZE, ex_u8 max_filecount = EX_LOG_FILE_MAX_COUNT); + +void EXLOG_CONSOLE(bool output_to_console); + +#define EXLOGV ex_printf_v +#define EXLOGI ex_printf_i +#define EXLOGW ex_printf_w +#define EXLOGE ex_printf_e +#define EXLOGD ex_printf_d +#define EXLOG_BIN ex_printf_bin + +#ifdef EX_OS_WIN32 +#define EXLOGE_WIN ex_printf_e_lasterror +void ex_printf_e_lasterror(const char* fmt, ...); +void ex_printf_e_lasterror(const wchar_t* fmt, ...); +#endif + + +void ex_printf_d(const char* fmt, ...); +void ex_printf_v(const char* fmt, ...); +void ex_printf_i(const char* fmt, ...); +void ex_printf_w(const char* fmt, ...); +void ex_printf_e(const char* fmt, ...); + +void ex_printf_d(const wchar_t* fmt, ...); +void ex_printf_v(const wchar_t* fmt, ...); +void ex_printf_i(const wchar_t* fmt, ...); +void ex_printf_w(const wchar_t* fmt, ...); +void ex_printf_e(const wchar_t* fmt, ...); + +void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const char* fmt, ...); +void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const wchar_t* fmt, ...); + + +#endif // __EX_LOG_H__ diff --git a/common/libex/include/ex/ex_path.h b/common/libex/include/ex/ex_path.h index b3884cf..81331aa 100644 --- a/common/libex/include/ex/ex_path.h +++ b/common/libex/include/ex/ex_path.h @@ -36,6 +36,10 @@ bool ex_dirname(ex_wstr& inout_filename); bool ex_path_join(ex_wstr& inout_path, bool auto_abspath, ...); bool ex_abspath_to(const ex_wstr& base_abs_path, const ex_wstr& relate_path, ex_wstr& out_path); bool ex_mkdirs(const ex_wstr& in_path); + +// 获取文件名中的扩展名部分(不包括.,例如abc.py,返回 py) +bool ex_path_ext_name(const ex_wstr& in_filename, ex_wstr& out_ext); + #endif #endif // __LIB_EX_PATH_H__ diff --git a/common/libex/include/ex/ex_platform.h b/common/libex/include/ex/ex_platform.h index b22e9b5..0426a43 100644 --- a/common/libex/include/ex/ex_platform.h +++ b/common/libex/include/ex/ex_platform.h @@ -3,20 +3,12 @@ #if defined(_WIN32) || defined(WIN32) # define EX_OS_WIN32 -// # define EX_OS_NAME L"windows" -// # ifdef _CONSOLE -// # define EX_CONSOLE -// # endif #elif defined(__linux__) # define EX_OS_LINUX # define EX_OS_UNIX -// # define EX_OS_NAME L"linux" -// # define EX_CONSOLE #elif defined(__APPLE__) # define EX_OS_MACOS # define EX_OS_UNIX -// # define EX_OS_NAME L"macos" -// # define PYS_CONSOLE #else # error unsupported platform. #endif @@ -110,8 +102,6 @@ # endif #endif - - #ifdef EX_OS_WIN32 # pragma comment(lib, "shlwapi.lib") #endif diff --git a/common/libex/include/ex/ex_str.h b/common/libex/include/ex/ex_str.h index 4e8b2b6..0c5ec7a 100644 --- a/common/libex/include/ex/ex_str.h +++ b/common/libex/include/ex/ex_str.h @@ -4,7 +4,12 @@ #include "ex_types.h" #define EX_CODEPAGE_ACP 0 -#define EX_CODEPAGE_UTF8 1 +#define EX_CODEPAGE_UTF8 1 +#ifdef EX_OS_WIN32 +# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_ACP +#else +# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_UTF8 +#endif #define EX_RSC_BEGIN 0x01 #define EX_RSC_END 0x02 @@ -53,10 +58,10 @@ typedef std::wstring ex_wstr; typedef std::vector ex_astrs; typedef std::vector ex_wstrs; -bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_ACP); -bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_ACP); -bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_ACP); -bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_ACP); +bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT); +bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT); +bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT); +bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT); bool ex_only_white_space(const ex_astr& str_check); bool ex_only_white_space(const ex_wstr& str_check); diff --git a/client/tp_assist/ts_thread.h b/common/libex/include/ex/ex_thread.h similarity index 51% rename from client/tp_assist/ts_thread.h rename to common/libex/include/ex/ex_thread.h index 0fd0544..d442e0b 100644 --- a/client/tp_assist/ts_thread.h +++ b/common/libex/include/ex/ex_thread.h @@ -1,24 +1,26 @@ -#ifndef __TS_THREAD_H__ -#define __TS_THREAD_H__ +#ifndef __EX_THREAD_H__ +#define __EX_THREAD_H__ + +//#include "ts_common.h" +#include -#include #include #ifdef EX_OS_WIN32 # include -typedef HANDLE TS_THREAD_HANDLE; +typedef HANDLE EX_THREAD_HANDLE; #else # include -typedef pthread_t TS_THREAD_HANDLE; +typedef pthread_t EX_THREAD_HANDLE; #endif -class TsThreadManager; +class ExThreadManager; -class TsThreadBase +class ExThreadBase { public: - TsThreadBase(TsThreadManager* tm, const char* thread_name); - virtual ~TsThreadBase(); + ExThreadBase(ExThreadManager* tm, const char* thread_name); + virtual ~ExThreadBase(); bool is_running(void) { return m_is_running; } @@ -41,21 +43,24 @@ protected: static void* _thread_func(void * pParam); #endif + // 线程挂起ms毫秒 + // void _sleep_ms(int ms); + protected: - TsThreadManager* m_thread_manager; + ExThreadManager* m_thread_manager; ex_astr m_thread_name; - TS_THREAD_HANDLE m_handle; + EX_THREAD_HANDLE m_handle; bool m_is_running; bool m_stop_by_request; }; // 线程锁(进程内使用) -class TsThreadLock +class ExThreadLock { public: - TsThreadLock(); - virtual ~TsThreadLock(); + ExThreadLock(); + virtual ~ExThreadLock(); void lock(void); void unlock(void); @@ -69,47 +74,47 @@ private: }; // 线程锁辅助类 -class TsThreadSmartLock +class ExThreadSmartLock { public: - TsThreadSmartLock(TsThreadLock& lock) : m_lock(lock) + ExThreadSmartLock(ExThreadLock& lock) : m_lock(lock) { m_lock.lock(); } - ~TsThreadSmartLock() + ~ExThreadSmartLock() { m_lock.unlock(); } private: - TsThreadLock& m_lock; + ExThreadLock& m_lock; }; -typedef std::list ts_threads; +typedef std::list ex_threads; -class TsThreadManager +class ExThreadManager { - friend class TsThreadBase; + friend class ExThreadBase; public: - TsThreadManager(); - virtual ~TsThreadManager(); + ExThreadManager(); + virtual ~ExThreadManager(); void stop_all(void); private: - void _add_thread(TsThreadBase* tb); - void _remove_thread(TsThreadBase* tb); + void _add_thread(ExThreadBase* tb); + void _remove_thread(ExThreadBase* tb); private: - TsThreadLock m_lock; - ts_threads m_threads; + ExThreadLock m_lock; + ex_threads m_threads; }; // 原子操作 -int ts_atomic_add(volatile int* pt, int t); -int ts_atomic_inc(volatile int* pt); -int ts_atomic_dec(volatile int* pt); +int ex_atomic_add(volatile int* pt, int t); +int ex_atomic_inc(volatile int* pt); +int ex_atomic_dec(volatile int* pt); -#endif // __TS_THREAD_H__ +#endif // __EX_THREAD_H__ diff --git a/common/libex/include/ex/ex_types.h b/common/libex/include/ex/ex_types.h index 9656800..cd17676 100644 --- a/common/libex/include/ex/ex_types.h +++ b/common/libex/include/ex/ex_types.h @@ -31,4 +31,6 @@ typedef int EX_BOOL; typedef std::vector ex_bin; typedef std::vector ex_chars; +typedef ex_u32 ex_rv; + #endif // __LIB_EX_TYPE_H__ diff --git a/common/libex/include/ex/ex_winsrv.h b/common/libex/include/ex/ex_winsrv.h new file mode 100644 index 0000000..91938ec --- /dev/null +++ b/common/libex/include/ex/ex_winsrv.h @@ -0,0 +1,21 @@ +#ifndef __EX_WINSRV_H__ +#define __EX_WINSRV_H__ + +#include "ex_str.h" + +#ifdef EX_OS_WIN32 + +ex_rv ex_winsrv_install(const ex_wstr& srv_name, const ex_wstr& disp_name, const ex_wstr& exec_path); +ex_rv ex_winsrv_uninstall(const ex_wstr& srv_name); +bool ex_winsrv_is_exists(const ex_wstr& srv_name); +ex_rv ex_winsrv_start(const ex_wstr& srv_name); +ex_rv ex_winsrv_stop(const ex_wstr& srv_name); +ex_rv ex_winsrv_status(const ex_wstr& srv_name, ex_ulong& status); +ex_rv ex_winsrv_pause(const ex_wstr& srv_name); +ex_rv ex_winsrv_resume(const ex_wstr& srv_name); +ex_rv ex_winsrv_config(const ex_wstr& srv_name, QUERY_SERVICE_CONFIG& cfg); +ex_rv ex_winsrv_pid(const ex_wstr& srv_name, ex_ulong& pid); + +#endif + +#endif // __EX_WINSRV_H__ diff --git a/client/tp_assist/ts_ini.cpp b/common/libex/src/ex_ini.cpp similarity index 74% rename from client/tp_assist/ts_ini.cpp rename to common/libex/src/ex_ini.cpp index 3868c94..26031ea 100644 --- a/client/tp_assist/ts_ini.cpp +++ b/common/libex/src/ex_ini.cpp @@ -1,40 +1,40 @@ -#include "stdafx.h" -#include "ts_ini.h" +#include +#include -TsIniSection::TsIniSection(const ex_wstr& strSectionName) +ExIniSection::ExIniSection(const ex_wstr& strSectionName) { m_kvs.clear(); m_strName = strSectionName; } -TsIniSection::TsIniSection() +ExIniSection::ExIniSection() { m_kvs.clear(); m_strName = _T("N/A"); } -TsIniSection::~TsIniSection() +ExIniSection::~ExIniSection() { m_kvs.clear(); } -bool TsIniSection::_IsKeyExists(const ex_wstr& strKey) +bool ExIniSection::_IsKeyExists(const ex_wstr& strKey) { return (m_kvs.end() != m_kvs.find(strKey)); } -void TsIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue, const ex_wstr& strDefault) +void ExIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue, const ex_wstr& strDefault) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (m_kvs.end() == it) strValue = strDefault; else strValue = (*it).second; } -bool TsIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue) +bool ExIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (m_kvs.end() == it) return false; @@ -42,9 +42,9 @@ bool TsIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue) return true; } -void TsIniSection::GetInt(const ex_wstr& strKey, int& iValue, int iDefault) +void ExIniSection::GetInt(const ex_wstr& strKey, int& iValue, int iDefault) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (m_kvs.end() == it) { iValue = iDefault; @@ -58,9 +58,9 @@ void TsIniSection::GetInt(const ex_wstr& strKey, int& iValue, int iDefault) #endif } -bool TsIniSection::GetInt(const ex_wstr& strKey, int& iValue) +bool ExIniSection::GetInt(const ex_wstr& strKey, int& iValue) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (m_kvs.end() == it) return false; @@ -73,9 +73,9 @@ bool TsIniSection::GetInt(const ex_wstr& strKey, int& iValue) return true; } -void TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue, bool bDefault) +void ExIniSection::GetBool(const ex_wstr& strKey, bool& bValue, bool bDefault) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (m_kvs.end() == it) { bValue = bDefault; @@ -95,9 +95,9 @@ void TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue, bool bDefault) bValue = false; } -bool TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue) +bool ExIniSection::GetBool(const ex_wstr& strKey, bool& bValue) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (m_kvs.end() == it) return false; @@ -117,9 +117,9 @@ bool TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue) } -bool TsIniSection::SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool bAddIfNotExists) +bool ExIniSection::SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool bAddIfNotExists) { - ts_ini_kvs::iterator it = m_kvs.find(strKey); + ex_ini_kvs::iterator it = m_kvs.find(strKey); if (it != m_kvs.end()) { it->second = strValue; @@ -135,14 +135,14 @@ bool TsIniSection::SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool return false; } -void TsIniSection::ClearUp(void) +void ExIniSection::ClearUp(void) { m_kvs.clear(); } -void TsIniSection::Save(FILE* file, int codepage) +void ExIniSection::Save(FILE* file, int codepage) { - ts_ini_kvs::iterator it = m_kvs.begin(); + ex_ini_kvs::iterator it = m_kvs.begin(); for (; it != m_kvs.end(); ++it) { ex_wstr temp; @@ -157,27 +157,27 @@ void TsIniSection::Save(FILE* file, int codepage) return; } -#ifdef _DEBUG -void TsIniSection::Dump(void) +#ifdef EX_DEBUG +void ExIniSection::Dump(void) { - ts_ini_kvs::iterator it = m_kvs.begin(); + ex_ini_kvs::iterator it = m_kvs.begin(); for (; it != m_kvs.end(); ++it) { - TSLOGD(_T(" [%s]=[%s]\n"), it->first.c_str(), it->second.c_str()); + EXLOGD(_T(" [%s]=[%s]\n"), it->first.c_str(), it->second.c_str()); } } #endif -TsIniFile::TsIniFile() +ExIniFile::ExIniFile() { } -TsIniFile::~TsIniFile() +ExIniFile::~ExIniFile() { ClearUp(); } -bool TsIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld) +bool ExIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld) { #ifdef EX_OS_WIN32 HANDLE hFile = ::CreateFileW(strFileName.c_str(), GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); @@ -233,26 +233,21 @@ bool TsIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld) pOffset += 3; } // 配置文件均使用UTF8编码 -// #ifdef EX_OS_WIN32 ex_wstr fileData; if (!ex_astr2wstr(pOffset, fileData, EX_CODEPAGE_UTF8)) return false; -// #else -// ex_wstr fileData = pOffset; -// #endif return LoadFromMemory(fileData, bClearOld); - } -bool TsIniFile::LoadFromMemory(const ex_wstr& strData, bool bClearOld) +bool ExIniFile::LoadFromMemory(const ex_wstr& strData, bool bClearOld) { if (strData.empty()) return false; ex_wstr strAll(strData); bool bRet = true; - TsIniSection* pCurSection = NULL; + ExIniSection* pCurSection = NULL; do { // Clear old data. @@ -305,7 +300,7 @@ bool TsIniFile::LoadFromMemory(const ex_wstr& strData, bool bClearOld) return bRet; } -void TsIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/) +void ExIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/) { ex_astr temp; ex_wstr2astr(m_file_path, temp); @@ -318,10 +313,15 @@ void TsIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/) { return; } - ts_ini_sections::iterator it = m_secs.begin(); + + // 如果有不属于任何小节的值对,先保存之 + if (m_dumy_sec.Count() > 0) + m_dumy_sec.Save(file, codepage); + + ex_ini_sections::iterator it = m_secs.begin(); for (; it != m_secs.end(); ++it) { - TSLOGD(_T("{%s}\n"), it->first.c_str()); + EXLOGD(_T("{%s}\n"), it->first.c_str()); ex_wstr temp; temp += _T("["); temp += it->first.c_str(); @@ -335,21 +335,21 @@ void TsIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/) fclose(file); } -#ifdef _DEBUG -void TsIniFile::Dump(void) +#ifdef EX_DEBUG +void ExIniFile::Dump(void) { - ts_ini_sections::iterator it = m_secs.begin(); + ex_ini_sections::iterator it = m_secs.begin(); for (; it != m_secs.end(); ++it) { - TSLOGD(_T("{%s}\n"), it->first.c_str()); + EXLOGD(_T("{%s}\n"), it->first.c_str()); it->second->Dump(); } } #endif -void TsIniFile::ClearUp(void) +void ExIniFile::ClearUp(void) { - ts_ini_sections::iterator it = m_secs.begin(); + ex_ini_sections::iterator it = m_secs.begin(); for (; it != m_secs.end(); ++it) { delete it->second; @@ -357,9 +357,9 @@ void TsIniFile::ClearUp(void) m_secs.clear(); } -TsIniSection* TsIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExists) +ExIniSection* ExIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExists) { - ts_ini_sections::iterator it = m_secs.find(strName); + ex_ini_sections::iterator it = m_secs.find(strName); if (it != m_secs.end()) return it->second; @@ -368,7 +368,7 @@ TsIniSection* TsIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExi - TsIniSection* pSec = new TsIniSection(strName); + ExIniSection* pSec = new ExIniSection(strName); m_secs.insert(std::make_pair(strName, pSec)); return pSec; } @@ -377,7 +377,7 @@ TsIniSection* TsIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExi // 解析一行,返回值为 [节名/值对/注释/什么也不是/出错了] // 节名 => strKey = [section_name] // 值对 => strKey = strValue -TsIniFile::PARSE_RV TsIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& strKey, ex_wstr& strValue) +ExIniFile::PARSE_RV ExIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& strKey, ex_wstr& strValue) { // 首先去掉行首的空格或者 TAB 控制 ex_wstr strLine(strOrigLine); @@ -427,7 +427,7 @@ TsIniFile::PARSE_RV TsIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& s return PARSE_OTHER; } -bool TsIniFile::_ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection) +bool ExIniFile::_ProcessLine(const ex_wstr strLine, ExIniSection** pCurSection) { if (strLine.empty()) return true; @@ -449,7 +449,7 @@ bool TsIniFile::_ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection) case PARSE_SECTION: { // 创建一个节 - TsIniSection* pSection = GetSection(strKey, true); + ExIniSection* pSection = GetSection(strKey, true); if (NULL == pSection) { bError = true; @@ -460,17 +460,20 @@ bool TsIniFile::_ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection) } break; case PARSE_KEYVALUE: - // 创建一个值对 if (NULL == pCurSection || NULL == *pCurSection) { - bError = true; - break; + //bError = true; + //break; + *pCurSection = &m_dumy_sec; } + + // 创建一个值对 if (!(*pCurSection)->SetValue(strKey, strValue, true)) { bError = true; break; } + break; case PARSE_COMMENT: diff --git a/common/libex/src/ex_log.cpp b/common/libex/src/ex_log.cpp new file mode 100644 index 0000000..3e36156 --- /dev/null +++ b/common/libex/src/ex_log.cpp @@ -0,0 +1,701 @@ +#include +#include +#include +#include +#include +#include + +#ifdef EX_OS_WIN32 +#include +#include +#include +#else +#include +#include +#endif + +#define EX_LOG_CONTENT_MAX_LEN 2048 + +typedef enum EX_COLORS +{ + EX_COLOR_BLACK = 0, + EX_COLOR_BLUE = 1, + EX_COLOR_GREEN = 2, + EX_COLOR_CYAN = 3, + EX_COLOR_RED = 4, + EX_COLOR_MAGENTA = 5, + EX_COLOR_YELLOW = 6, + EX_COLOR_LIGHT_GRAY = 7, + EX_COLOR_GRAY = 8, + EX_COLOR_LIGHT_BLUE = 9, + EX_COLOR_LIGHT_GREEN = 10, + EX_COLOR_LIGHT_CYAN = 11, + EX_COLOR_LIGHT_RED = 12, + EX_COLOR_LIGHT_MAGENTA = 13, + EX_COLOR_LIGHT_YELLOW = 14, + EX_COLOR_WHITE = 15, + + EX_COLOR_NORMAL = 0xFF, +}EX_COLORS; + +ExThreadLock g_log_lock; + +typedef std::deque log_file_deque; + +class ExLogFile +{ +public: + ExLogFile() { + m_hFile = NULL; + m_filesize = 0; + } + ~ExLogFile() {} + + bool init(const ex_wstr& log_path, const ex_wstr& log_name, ex_u32 max_filesize, ex_u8 max_count); + + //bool write(int level, char* buf, int len); + bool write(int level, const char* buf); + bool write(int level, const wchar_t* buf); + +protected: + bool _open_file(); + //bool _backup_file(); + bool _rotate_file(void); // 将现有日志文件改名备份,然后新开一个日志文件 + //bool _load_file_list(); + +protected: + FILE* m_hFile; + ex_u32 m_filesize; + + ex_u32 m_max_filesize; + ex_u8 m_max_count; + ex_wstr m_path; + ex_wstr m_filename; + ex_wstr m_fullname; + log_file_deque m_log_file_list; +private: + +}; + + +typedef struct EX_LOG_CFG +{ + EX_LOG_CFG() + { + min_level = EX_LOG_LEVEL_INFO; + debug_mode = false; + to_console = true; + +#ifdef EX_OS_WIN32 + console_handle = GetStdHandle(STD_OUTPUT_HANDLE); +#endif + } + + int min_level; + bool debug_mode; + bool to_console; + +#ifdef EX_OS_WIN32 + HANDLE console_handle; +#endif + + ExLogFile logfile; +}EX_LOG_CFG; + +static EX_LOG_CFG g_log_cfg; + +void EXLOG_LEVEL(int min_level) +{ + g_log_cfg.min_level = min_level; +} + +void EXLOG_CONSOLE(bool output_to_console) +{ + g_log_cfg.to_console = output_to_console; +} + +void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path /*= NULL*/, ex_u32 max_filesize /*= EX_LOG_FILE_MAX_SIZE*/, ex_u8 max_filecount /*= EX_LOG_FILE_MAX_COUNT*/) +{ + ex_wstr _path; + if (NULL == log_path) + { + ex_exec_file(_path); + ex_dirname(_path); + ex_path_join(_path, false, L"log", NULL); + } + else + { + _path = log_path; + } + + g_log_cfg.logfile.init(_path, log_file, max_filesize, max_filecount); +} + +static void _ts_printf_a(int level, EX_COLORS clrBackGround, const char* fmt, va_list valist) +{ + if (NULL == fmt) + return; + + if (g_log_cfg.min_level > level) + return; + + EX_COLORS clrForeGround = EX_COLOR_NORMAL; + switch (level) + { + case EX_LOG_LEVEL_DEBUG: + if (!g_log_cfg.debug_mode) + return; + clrForeGround = EX_COLOR_GRAY; + break; + case EX_LOG_LEVEL_VERBOSE: + clrForeGround = EX_COLOR_LIGHT_GRAY; + break; + case EX_LOG_LEVEL_INFO: + clrForeGround = EX_COLOR_LIGHT_MAGENTA; + break; + case EX_LOG_LEVEL_WARN: + clrForeGround = EX_COLOR_LIGHT_RED; + break; + case EX_LOG_LEVEL_ERROR: + clrForeGround = EX_COLOR_LIGHT_RED; + break; + } + + if (EX_COLOR_NORMAL == clrForeGround) + clrForeGround = EX_COLOR_LIGHT_GRAY; + if (EX_COLOR_NORMAL == clrBackGround) + clrBackGround = EX_COLOR_BLACK; + + if (0 == strlen(fmt)) + return; + + char szTmp[4096] = { 0 }; + +#ifdef EX_OS_WIN32 + vsnprintf_s(szTmp, 4096, 4095, fmt, valist); + if (NULL != g_log_cfg.console_handle) + { + SetConsoleTextAttribute(g_log_cfg.console_handle, (WORD)((clrBackGround << 4) | clrForeGround)); + printf_s("%s", szTmp); + fflush(stdout); + SetConsoleTextAttribute(g_log_cfg.console_handle, EX_COLOR_GRAY); + } + else + { + OutputDebugStringA(szTmp); + } +#else + vsnprintf(szTmp, 4095, fmt, valist); + printf("%s", szTmp); + fflush(stdout); +#endif + + // #ifdef LOG_TO_FILE + // g_log_file.WriteData(level, szTmp, strlen(szTmp)); + // #endif + g_log_cfg.logfile.write(level, szTmp); +} + +static void _ts_printf_w(int level, EX_COLORS clrBackGround, const wchar_t* fmt, va_list valist) +{ + if (NULL == fmt || 0 == wcslen(fmt)) + return; + if (g_log_cfg.min_level > level) + return; + + EX_COLORS clrForeGround = EX_COLOR_NORMAL; + switch (level) + { + case EX_LOG_LEVEL_DEBUG: + if (!g_log_cfg.debug_mode) + return; + clrForeGround = EX_COLOR_GRAY; + break; + case EX_LOG_LEVEL_VERBOSE: + clrForeGround = EX_COLOR_LIGHT_GRAY; + break; + case EX_LOG_LEVEL_INFO: + clrForeGround = EX_COLOR_LIGHT_MAGENTA; + break; + case EX_LOG_LEVEL_WARN: + clrForeGround = EX_COLOR_LIGHT_RED; + break; + case EX_LOG_LEVEL_ERROR: + clrForeGround = EX_COLOR_LIGHT_RED; + break; + } + + if (EX_COLOR_NORMAL == clrForeGround) + clrForeGround = EX_COLOR_LIGHT_GRAY; + if (EX_COLOR_NORMAL == clrBackGround) + clrBackGround = EX_COLOR_BLACK; + + wchar_t szTmp[4096] = { 0 }; + +#ifdef EX_OS_WIN32 + _vsnwprintf_s(szTmp, 4096, 4095, fmt, valist); + if (NULL != g_log_cfg.console_handle) + { + SetConsoleTextAttribute(g_log_cfg.console_handle, (WORD)((clrBackGround << 4) | clrForeGround)); + wprintf_s(_T("%s"), szTmp); + fflush(stdout); + SetConsoleTextAttribute(g_log_cfg.console_handle, EX_COLOR_GRAY); + } + else + { + OutputDebugStringW(szTmp); + } +#else + vswprintf(szTmp, 4095, fmt, valist); + wprintf(L"%s", szTmp); + fflush(stdout); +#endif + + g_log_cfg.logfile.write(level, szTmp); +} + +#define EX_PRINTF_X(fn, level) \ +void fn(const char* fmt, ...) \ +{ \ + ExThreadSmartLock locker(g_log_lock); \ + va_list valist; \ + va_start(valist, fmt); \ + _ts_printf_a(level, EX_COLOR_BLACK, fmt, valist); \ + va_end(valist); \ +} \ +void fn(const wchar_t* fmt, ...) \ +{ \ + ExThreadSmartLock locker(g_log_lock); \ + va_list valist; \ + va_start(valist, fmt); \ + _ts_printf_w(level, EX_COLOR_BLACK, fmt, valist); \ + va_end(valist); \ +} + +EX_PRINTF_X(ex_printf_d, EX_LOG_LEVEL_DEBUG) +EX_PRINTF_X(ex_printf_v, EX_LOG_LEVEL_VERBOSE) +EX_PRINTF_X(ex_printf_i, EX_LOG_LEVEL_INFO) +EX_PRINTF_X(ex_printf_w, EX_LOG_LEVEL_WARN) +EX_PRINTF_X(ex_printf_e, EX_LOG_LEVEL_ERROR) + + +#ifdef EX_OS_WIN32 +void ex_printf_e_lasterror(const char* fmt, ...) +{ + ExThreadSmartLock locker(g_log_lock); + + va_list valist; + va_start(valist, fmt); + _ts_printf_a(EX_COLOR_LIGHT_RED, EX_COLOR_BLACK, fmt, valist); + va_end(valist); + + //========================================= + + LPVOID lpMsgBuf; + DWORD dw = GetLastError(); + + FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + (LPSTR)&lpMsgBuf, 0, NULL); + + ex_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf); + LocalFree(lpMsgBuf); +} +#endif + +void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const char* fmt, ...) +{ + if (!g_log_cfg.debug_mode) + return; + + ExThreadSmartLock locker(g_log_lock); + + va_list valist; + va_start(valist, fmt); + _ts_printf_a(EX_COLOR_GRAY, EX_COLOR_BLACK, fmt, valist); + va_end(valist); + + ex_printf_d(" (%d/0x%02x Bytes)\n", bin_size, bin_size); + + const ex_u8* line = bin_data; + size_t thisline = 0; + size_t offset = 0; + unsigned int i = 0; + + char szTmp[128] = { 0 }; + int _offset = 0; + + while (offset < bin_size) + { + memset(szTmp, 0, 128); + _offset = 0; + + snprintf(szTmp + _offset, 128 - _offset, "%06x ", (int)offset); + _offset += 8; + + thisline = bin_size - offset; + if (thisline > 16) + thisline = 16; + + for (i = 0; i < thisline; i++) + { + snprintf(szTmp + _offset, 128 - _offset, "%02x ", line[i]); + _offset += 3; + } + + snprintf(szTmp + _offset, 128 - _offset, " "); + _offset += 2; + + for (; i < 16; i++) + { + snprintf(szTmp + _offset, 128 - _offset, " "); + _offset += 3; + } + + for (i = 0; i < thisline; i++) + { + snprintf(szTmp + _offset, 128 - _offset, "%c", (line[i] >= 0x20 && line[i] < 0x7f) ? line[i] : '.'); + _offset += 1; + } + + snprintf(szTmp + _offset, 128 - _offset, "\n"); + _offset += 1; + + ex_printf_d("%s", szTmp); + + offset += thisline; + line += thisline; + } + + fflush(stdout); +} + +bool ExLogFile::init(const ex_wstr& log_path, const ex_wstr& log_name, ex_u32 max_filesize, ex_u8 max_count) +{ + m_max_filesize = max_filesize; + m_max_count = max_count; + + m_filename = log_name; + + m_path = log_path; + ex_abspath(m_path); + + m_fullname = m_path; + ex_path_join(m_fullname, false, log_name.c_str(), NULL); + + return _open_file(); +} + + +bool ExLogFile::_open_file() +{ + if (m_hFile) + { + fclose(m_hFile); + m_hFile = NULL; + } + + ex_astr _fullname; + ex_wstr2astr(m_fullname, _fullname); +#ifdef EX_OS_WIN32 + // 注意:这里必须使用 _fsopen 来指定共享读方式打开日志文件,否则进程退出前别的进程无法查看日志文件内容。 + m_hFile = _fsopen(_fullname.c_str(), "a", _SH_DENYWR); +#else + m_hFile = fopen(_fullname.c_str(), "a"); +#endif + + if (NULL == m_hFile) + { + return false; +} + fseek(m_hFile, 0, SEEK_END); + m_filesize = ftell(m_hFile); + + return _rotate_file(); +} + +bool ExLogFile::_rotate_file(void) +{ + if (m_filesize < m_max_filesize) + return true; + + if (m_hFile) + { + fclose(m_hFile); + m_hFile = NULL; + } + + //if (!_backup_file()) + // return false; + + // make a name for backup file. + wchar_t _tmpname[64] = { 0 }; +#ifdef EX_OS_WIN32 + SYSTEMTIME st; + GetLocalTime(&st); + //StringCbPrintf(_tmpname, 64, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); + swprintf_s(_tmpname, 64, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); + // sprintf_s(szBaseNewFileLogName, EX_LOG_PATH_MAX_LEN, "%04d%02d%02d%02d%02d%02d", + // st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); +#else + time_t timep; + time(&timep); + struct tm *p = localtime(&timep); + if (p == NULL) + return false; + + swprintf(_tmpname, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); + // sprintf(szBaseNewFileLogName, "%04d%02d%02d%02d%02d%02d", + // p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); +#endif + + ex_wstr _new_fullname(m_path); + ex_path_join(_new_fullname, false, _tmpname, NULL); + +#ifdef EX_OS_WIN32 + if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str())) + { + EXLOGE_WIN("can not rename log file, remove old one and try again."); + DeleteFileW(_new_fullname.c_str()); + if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str())) + return false; + } +#else + ex_astr _a_fullname; + ex_astr _a_new_fullname; + ex_wstr2astr(m_fullname, _a_fullname); + ex_wstr2astr(_new_fullname, _a_new_fullname); + + if (rename(_a_fullname.c_str(), _a_new_fullname.c_str()) != 0) + { + remove(_a_new_fullname.c_str()); + if (0 != (rename(_a_fullname.c_str(), _a_new_fullname.c_str()))) + return false; + } +#endif + + return _open_file(); +} + +#if 0 +bool ExLogFile::_backup_file() +{ + char szNewFileLogName[EX_LOG_PATH_MAX_LEN] = { 0 }; + char szBaseNewFileLogName[EX_LOG_PATH_MAX_LEN] = { 0 }; +#ifdef EX_OS_WIN32 + SYSTEMTIME st; + GetLocalTime(&st); + sprintf_s(szNewFileLogName, EX_LOG_PATH_MAX_LEN, "%s\\%04d%02d%02d%02d%02d%02d.log", + m_log_file_dir.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); + + sprintf_s(szBaseNewFileLogName, EX_LOG_PATH_MAX_LEN, "%04d%02d%02d%02d%02d%02d", + st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); +#else + time_t timep; + struct tm *p; + time(&timep); + p = localtime(&timep); //get server's time + if (p == NULL) + { + return NULL; + } + sprintf(szNewFileLogName, "%s/%04d%02d%02d%02d%02d%02d.log", + m_log_file_dir.c_str(), p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); + sprintf(szBaseNewFileLogName, "%04d%02d%02d%02d%02d%02d", + p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); +#endif + if (m_hFile) + { + fclose(m_hFile); + m_hFile = 0; + } +#ifdef EX_OS_WIN32 + if (!MoveFileA(m_path.c_str(), szNewFileLogName)) + { + DWORD dwError = GetLastError(); + + DeleteFileA(szNewFileLogName); + + MoveFileA(m_path.c_str(), szNewFileLogName); + } +#else + if (rename(m_path.c_str(), szNewFileLogName) != 0) + { + remove(szNewFileLogName); + + rename(m_path.c_str(), szNewFileLogName); + } +#endif + unsigned long long value = atoll(szBaseNewFileLogName); + if (value != 0) + { + m_log_file_list.push_back(value); + } + int try_count = 0; + while ((m_log_file_list.size() > m_max_count)) + { + unsigned long long value = m_log_file_list.front(); + char szDeleteFile[256] = { 0 }; +#ifdef EX_OS_WIN32 + sprintf_s(szDeleteFile, 256, "%s\\%llu.log", m_log_file_dir.c_str(), value); + if (DeleteFileA(szDeleteFile)) + { + m_log_file_list.pop_front(); + } +#else + sprintf(szDeleteFile, "%s/%llu.log", m_log_file_dir.c_str(), value); + if (remove(szDeleteFile) == 0) + { + m_log_file_list.pop_front(); + } +#endif + else + { + if (try_count > 5) + { + break; + } + try_count++; + } + + } + + return true; +} +#endif // if 0 + +bool ExLogFile::write(int level, const char* buf) +{ + if (NULL == m_hFile) + return false; + + size_t len = strlen(buf); + + if (len > EX_LOG_CONTENT_MAX_LEN) + return false; + + char szTime[100] = { 0 }; +#ifdef EX_OS_WIN32 + SYSTEMTIME st; + GetLocalTime(&st); + sprintf_s(szTime, 100, "[%04d-%02d-%02d %02d:%02d:%02d] ", st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); +#else + time_t timep; + struct tm *p; + time(&timep); + p = localtime(&timep); + if (p == NULL) + return false; + sprintf(szTime, "[%04d-%02d-%02d %02d:%02d:%02d] , p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec); +#endif + + int lenTime = strlen(szTime); + fwrite(szTime, lenTime, 1, m_hFile); + m_filesize += lenTime; + fwrite(buf, len, 1, m_hFile); + m_filesize += len; + + fflush(m_hFile); + + return _rotate_file(); +} + +bool ExLogFile::write(int level, const wchar_t* buf) +{ + ex_astr _buf; + ex_wstr2astr(buf, _buf, EX_CODEPAGE_UTF8); + return write(level, _buf.c_str()); +} + + +#if 0 +bool ExLogFile::_load_file_list() +{ +#ifdef EX_OS_WIN32 + struct _finddata_t data; + std::string log_match = m_log_file_dir; + log_match += "\\*.log"; + //log_match += "*.log"; + long hnd = _findfirst(log_match.c_str(), &data); // find the first file match `*.log` + if (hnd < 0) + { + return false; + } + int nRet = (hnd < 0) ? -1 : 1; + int count = 0; + while (nRet > 0) + { + if (data.attrib == _A_SUBDIR) + { + // do nothing to a folder. + } + else + { + if (m_filename.compare(data.name) == 0) + { + } + else + { + char* match = strrchr(data.name, '.'); + if (match != NULL) + { + *match = '\0'; + } + unsigned long long value = atoll(data.name); + if (value == 0) + { + continue; + } + m_log_file_list.push_back(value); + } + } + + nRet = _findnext(hnd, &data); + count++; + if (count > 100) + { + break; + } + } + _findclose(hnd); +#else + DIR *dir; + + struct dirent *ptr; + + dir = opendir(m_log_file_dir.c_str()); + + while ((ptr = readdir(dir)) != NULL) + { + if (ptr->d_type == 8) + { + char temp_file_name[PATH_MAX] = { 0 }; + strcpy(temp_file_name, ptr->d_name); + if (m_filename.compare(temp_file_name) == 0) + { + + } + else + { + char* match = strrchr(temp_file_name, '.'); + if (match != NULL) + { + *match = '\0'; + } + unsigned long long value = atoll(temp_file_name); + if (value == 0) + { + continue; + } + m_log_file_list.push_back(value); + } + } + } + + closedir(dir); +#endif // EX_OS_WIN32 + + std::sort(m_log_file_list.begin(), m_log_file_list.end(), std::less()); + return true; +} +#endif // if 0 diff --git a/common/libex/src/ex_path.cpp b/common/libex/src/ex_path.cpp index d0cd0e0..86c2b36 100644 --- a/common/libex/src/ex_path.cpp +++ b/common/libex/src/ex_path.cpp @@ -106,6 +106,7 @@ bool ex_dirname(ex_wstr& inout_filename) { *match = EX_NULL_END; inout_filename = ret; + ex_free(ret); return true; } else @@ -113,7 +114,6 @@ bool ex_dirname(ex_wstr& inout_filename) ex_free(ret); inout_filename = EX_CURRENT_DIR_STR; return true; - //return ex_wcsdup(EX_CURRENT_DIR_STR); } ex_free(ret); @@ -341,7 +341,6 @@ bool ex_path_join(ex_wstr& inout_path, bool auto_abspath, ...) if (!ex_abspath(_path)) return false; - //return ex_wcsdup(_path.c_str()); inout_path = _path; return true; } @@ -403,11 +402,11 @@ bool ex_mkdirs(const ex_wstr& in_path) ex_astr _path; #ifdef EX_OS_WIN32 - ex_wstr2astr(in_path, _path, EX_CODEPAGE_ACP); + ex_wstr2astr(in_path, _path); if (0 == _mkdir(_path.c_str())) return true; #else - ex_wstr2astr(in_path, _path, EX_CODEPAGE_UTF8); + ex_wstr2astr(in_path, _path); int status = mkdir(_path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH); if (0 != status) return false; @@ -416,3 +415,14 @@ bool ex_mkdirs(const ex_wstr& in_path) return true; } +bool ex_path_ext_name(const ex_wstr& in_filename, ex_wstr& out_ext) +{ + ex_wstr::size_type pos_dot = in_filename.rfind(L'.'); + ex_wstr::size_type pos_sep = in_filename.rfind(EX_SEP); + + if (pos_dot == ex_wstr::npos || pos_dot <= pos_sep) + return false; + + out_ext.assign(in_filename, pos_dot + 1, in_filename.length() - pos_dot - 1); + return true; +} diff --git a/common/libex/src/ex_str.cpp b/common/libex/src/ex_str.cpp index 23fc446..364a92f 100644 --- a/common/libex/src/ex_str.cpp +++ b/common/libex/src/ex_str.cpp @@ -184,7 +184,7 @@ wchar_t** ex_make_wargv(int argc, char** argv) for (i = 0; i < argc; ++i) { - ret[i] = ex_str2wcs_alloc(argv[i], EX_CODEPAGE_ACP); + ret[i] = ex_str2wcs_alloc(argv[i], EX_CODEPAGE_DEFAULT); if (NULL == ret[i]) goto err; } @@ -223,12 +223,12 @@ EX_BOOL ex_wcs_only_white_space(const char* src) #ifdef __cplusplus -bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_ACP*/) +bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) { return ex_wstr2astr(in_str.c_str(), out_str, code_page); } -bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_ACP*/) +bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) { char* astr = ex_wcs2str_alloc(in_str, code_page); if (NULL == astr) @@ -239,12 +239,12 @@ bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_ return true; } -bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_ACP*/) +bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) { return ex_astr2wstr(in_str.c_str(), out_str, code_page); } -bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_ACP*/) +bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/) { wchar_t* wstr = ex_str2wcs_alloc(in_str, code_page); if (NULL == wstr) diff --git a/client/tp_assist/ts_thread.cpp b/common/libex/src/ex_thread.cpp similarity index 63% rename from client/tp_assist/ts_thread.cpp rename to common/libex/src/ex_thread.cpp index 712f2de..e2d57d1 100644 --- a/client/tp_assist/ts_thread.cpp +++ b/common/libex/src/ex_thread.cpp @@ -1,5 +1,5 @@ -#include "stdafx.h" -#include "ts_thread.h" +#include +#include //========================================================= // @@ -7,9 +7,9 @@ #ifdef EX_OS_WIN32 -unsigned int WINAPI TsThreadBase::_thread_func(LPVOID lpParam) +unsigned int WINAPI ExThreadBase::_thread_func(LPVOID lpParam) { - TsThreadBase* p = (TsThreadBase*)lpParam; + ExThreadBase* p = (ExThreadBase*)lpParam; p->m_is_running = true; p->_thread_loop(); p->m_is_running = false; @@ -19,9 +19,9 @@ unsigned int WINAPI TsThreadBase::_thread_func(LPVOID lpParam) return 0; } #else -void* TsThreadBase::_thread_func(void* pParam) +void* ExThreadBase::_thread_func(void* pParam) { - TsThreadBase* p = (TsThreadBase*)pParam; + ExThreadBase* p = (ExThreadBase*)pParam; p->m_is_running = true; p->_thread_loop(); p->m_is_running = false; @@ -31,7 +31,7 @@ void* TsThreadBase::_thread_func(void* pParam) } #endif -TsThreadBase::TsThreadBase(TsThreadManager* tm, const char* thread_name) : +ExThreadBase::ExThreadBase(ExThreadManager* tm, const char* thread_name) : m_thread_manager(tm), m_handle(0), m_is_running(false), @@ -41,13 +41,13 @@ TsThreadBase::TsThreadBase(TsThreadManager* tm, const char* thread_name) : m_thread_manager->_add_thread(this); } -TsThreadBase::~TsThreadBase() +ExThreadBase::~ExThreadBase() { } -bool TsThreadBase::start(void) +bool ExThreadBase::start(void) { - TSLOGV(" -- thread [%s] starting.\n", m_thread_name.c_str()); + EXLOGV(" -- thread [%s] starting.\n", m_thread_name.c_str()); #ifdef WIN32 HANDLE h = (HANDLE)_beginthreadex(NULL, 0, _thread_func, (void*)this, 0, NULL); @@ -70,13 +70,13 @@ bool TsThreadBase::start(void) return true; } -bool TsThreadBase::stop(void) +bool ExThreadBase::stop(void) { - TSLOGV(" . try to stop thread [%s].\n", m_thread_name.c_str()); + EXLOGV(" . try to stop thread [%s].\n", m_thread_name.c_str()); m_stop_by_request = true; _set_stop_flag(); - TSLOGV(" . wait thread [%s] end.\n", m_thread_name.c_str()); + EXLOGV(" . wait thread [%s] end.\n", m_thread_name.c_str()); #ifdef EX_OS_WIN32 if (WaitForSingleObject(m_handle, INFINITE) != WAIT_OBJECT_0) @@ -89,12 +89,12 @@ bool TsThreadBase::stop(void) return false; } #endif - TSLOGV(" ## thread [%s] end.\n", m_thread_name.c_str()); + EXLOGV(" ## thread [%s] end.\n", m_thread_name.c_str()); return true; } -bool TsThreadBase::terminate(void) +bool ExThreadBase::terminate(void) { #ifdef EX_OS_WIN32 return TerminateThread(m_handle, 1) ? true : false; @@ -103,28 +103,42 @@ bool TsThreadBase::terminate(void) #endif } +// void ExThreadBase::_thread_loop(void) +// { +// EXLOGE("--------thread-loop-not-impl-------\n"); +// } + +// void ExThreadBase::_sleep_ms(int ms) +// { +// #ifdef EX_OS_WIN32 +// Sleep(ms); +// #else +// usleep(ms * 1000); +// #endif +// } + //========================================================= // //========================================================= -TsThreadManager::TsThreadManager() +ExThreadManager::ExThreadManager() {} -TsThreadManager::~TsThreadManager() +ExThreadManager::~ExThreadManager() { if (m_threads.size() > 0) { - TSLOGE("[ERROR] when destroy thread manager, there are %d thread not exit.\n", m_threads.size()); + EXLOGE("[ERROR] when destroy thread manager, there are %d thread not exit.\n", m_threads.size()); stop_all(); } } -void TsThreadManager::stop_all(void) +void ExThreadManager::stop_all(void) { - TsThreadSmartLock locker(m_lock); + ExThreadSmartLock locker(m_lock); - ts_threads::iterator it = m_threads.begin(); + ex_threads::iterator it = m_threads.begin(); for (; it != m_threads.end(); ++it) { (*it)->stop(); @@ -133,16 +147,16 @@ void TsThreadManager::stop_all(void) m_threads.clear(); } -void TsThreadManager::_add_thread(TsThreadBase* tb) +void ExThreadManager::_add_thread(ExThreadBase* tb) { - TsThreadSmartLock locker(m_lock); + ExThreadSmartLock locker(m_lock); - ts_threads::iterator it = m_threads.begin(); + ex_threads::iterator it = m_threads.begin(); for (; it != m_threads.end(); ++it) { if ((*it) == tb) { - TSLOGE("[ERROR] when add thread to manager, it already exist.\n"); + EXLOGE("[ERROR] when add thread to manager, it already exist.\n"); return; } } @@ -150,11 +164,11 @@ void TsThreadManager::_add_thread(TsThreadBase* tb) m_threads.push_back(tb); } -void TsThreadManager::_remove_thread(TsThreadBase* tb) +void ExThreadManager::_remove_thread(ExThreadBase* tb) { - TsThreadSmartLock locker(m_lock); + ExThreadSmartLock locker(m_lock); - ts_threads::iterator it = m_threads.begin(); + ex_threads::iterator it = m_threads.begin(); for (; it != m_threads.end(); ++it) { if ((*it) == tb) @@ -164,14 +178,14 @@ void TsThreadManager::_remove_thread(TsThreadBase* tb) return; } } - TSLOGE("[ERROR] when remove thread from manager, it not exist.\n"); + EXLOGE("[ERROR] when remove thread from manager, it not exist.\n"); } //========================================================= // //========================================================= -TsThreadLock::TsThreadLock() +ExThreadLock::ExThreadLock() { #ifdef EX_OS_WIN32 InitializeCriticalSection(&m_locker); @@ -184,7 +198,7 @@ TsThreadLock::TsThreadLock() #endif } -TsThreadLock::~TsThreadLock() +ExThreadLock::~ExThreadLock() { #ifdef EX_OS_WIN32 DeleteCriticalSection(&m_locker); @@ -193,7 +207,7 @@ TsThreadLock::~TsThreadLock() #endif } -void TsThreadLock::lock(void) +void ExThreadLock::lock(void) { #ifdef EX_OS_WIN32 EnterCriticalSection(&m_locker); @@ -202,7 +216,7 @@ void TsThreadLock::lock(void) #endif } -void TsThreadLock::unlock(void) +void ExThreadLock::unlock(void) { #ifdef EX_OS_WIN32 LeaveCriticalSection(&m_locker); @@ -215,7 +229,7 @@ void TsThreadLock::unlock(void) // //========================================================= -int ts_atomic_add(volatile int* pt, int t) +int ex_atomic_add(volatile int* pt, int t) { #ifdef EX_OS_WIN32 return (int)InterlockedExchangeAdd((long*)pt, (long)t); @@ -224,7 +238,7 @@ int ts_atomic_add(volatile int* pt, int t) #endif } -int ts_atomic_inc(volatile int* pt) +int ex_atomic_inc(volatile int* pt) { #ifdef EX_OS_WIN32 return (int)InterlockedIncrement((long*)pt); @@ -233,7 +247,7 @@ int ts_atomic_inc(volatile int* pt) #endif } -int ts_atomic_dec(volatile int* pt) +int ex_atomic_dec(volatile int* pt) { #ifdef EX_OS_WIN32 return (int)InterlockedDecrement((long*)pt); diff --git a/common/libex/src/ex_util.cpp b/common/libex/src/ex_util.cpp index f272bf9..1acb413 100644 --- a/common/libex/src/ex_util.cpp +++ b/common/libex/src/ex_util.cpp @@ -2,6 +2,8 @@ #include #include +// #include + EX_BOOL ex_initialize(const char* lc_ctype) { #ifdef EX_OS_UNIX diff --git a/common/libex/src/ex_winsrv.cpp b/common/libex/src/ex_winsrv.cpp new file mode 100644 index 0000000..4c909f4 --- /dev/null +++ b/common/libex/src/ex_winsrv.cpp @@ -0,0 +1,391 @@ +#include + +#ifdef EX_OS_WIN32 + +#include + +class winsrv_helper +{ +public: + winsrv_helper(SC_HANDLE scm, SC_HANDLE sc) : m_scm(scm), m_sc(sc) + { + } + ~winsrv_helper() + { + if(NULL != m_sc) + CloseServiceHandle(m_sc); + if(NULL != m_scm) + CloseServiceHandle(m_scm); + } + +protected: + SC_HANDLE m_sc; + SC_HANDLE m_scm; +}; + + +ex_rv ex_winsrv_install(const ex_wstr& srv_name, const ex_wstr& disp_name, const ex_wstr& exec_path) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + if (NULL == (sc = CreateServiceW(scm, srv_name.c_str(), disp_name.c_str(), + SERVICE_ALL_ACCESS, + SERVICE_WIN32_OWN_PROCESS, + SERVICE_AUTO_START, SERVICE_ERROR_NORMAL, exec_path.c_str(), NULL, NULL, NULL, NULL, NULL)) + ) + { + return EXRV_CANNOT_CREATE; + } + + SERVICE_FAILURE_ACTIONS failure_action; + failure_action.dwResetPeriod = 0; // reset failure count to zero 的时间,单位为秒 + failure_action.lpRebootMsg = NULL; // Message to broadcast to server users before rebooting + failure_action.lpCommand = NULL; // Command line of the process for the CreateProcess function to execute in response + failure_action.cActions = 3; // action数组的个数 + + SC_ACTION actionarray[3]; + actionarray[0].Type = SC_ACTION_RESTART; // 重新启动服务 + actionarray[0].Delay = 60000; // 单位为毫秒 + actionarray[1].Type = SC_ACTION_RESTART; + actionarray[1].Delay = 60000; + actionarray[2].Type = SC_ACTION_RESTART; + actionarray[2].Delay = 60000; + failure_action.lpsaActions = actionarray; + + ChangeServiceConfig2(sc, SERVICE_CONFIG_FAILURE_ACTIONS, &failure_action); + + return EXRV_OK; +} + +bool ex_winsrv_is_exists(const ex_wstr& srv_name) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return false; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_ALL_ACCESS); + if (NULL == sc) + return false; + + return true; +} + +ex_rv ex_winsrv_uninstall(const ex_wstr& srv_name) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_ALL_ACCESS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + if (!DeleteService(sc)) + return EXRV_CANNOT_REMOVE; + else + return EXRV_OK; +} + +ex_rv ex_winsrv_start(const ex_wstr& srv_name) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_START | SERVICE_QUERY_STATUS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + SERVICE_STATUS ss; + if (!QueryServiceStatus(sc, &ss)) + return EXRV_FAILED; + + if (ss.dwCurrentState == SERVICE_RUNNING) + return EXRV_OK; + + int i = 0; + if (ss.dwCurrentState == SERVICE_START_PENDING) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState != SERVICE_START_PENDING) + break; + } + } + + if (ss.dwCurrentState == SERVICE_STOPPED) + { + if (StartService(sc, 0, NULL)) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState == SERVICE_RUNNING) + return EXRV_OK; + } + } + } + + if (ss.dwCurrentState == SERVICE_RUNNING) + return EXRV_OK; + else + return EXRV_FAILED; +} + +ex_rv ex_winsrv_config(const ex_wstr& srv_name, QUERY_SERVICE_CONFIG& cfg) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_QUERY_CONFIG); + if (sc == NULL) + return EXRV_NOT_EXISTS; + + DWORD dwBytesNeeded; + if (!QueryServiceConfig(sc, &cfg, 4096, &dwBytesNeeded)) + return EXRV_FAILED; + else + return EXRV_OK; +} + +ex_rv ex_winsrv_status(const ex_wstr& srv_name, ex_ulong& status) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_QUERY_STATUS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + SERVICE_STATUS ss; + if (!QueryServiceStatus(sc, &ss)) + return EXRV_FAILED; + + status = ss.dwCurrentState; + return EXRV_OK; +} + +ex_rv ex_winsrv_stop(const ex_wstr& srv_name) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_STOP | SERVICE_QUERY_STATUS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + SERVICE_STATUS ss; + if (!QueryServiceStatus(sc, &ss)) + return EXRV_FAILED; + + if (ss.dwCurrentState == SERVICE_STOPPED) + return EXRV_OK; + + int i = 0; + + DWORD dwStatus = ss.dwCurrentState; + if (ss.dwCurrentState == SERVICE_START_PENDING || ss.dwCurrentState == SERVICE_PAUSE_PENDING || ss.dwCurrentState == SERVICE_CONTINUE_PENDING || ss.dwCurrentState == SERVICE_STOP_PENDING) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState != dwStatus) + break; + } + } + + if (ss.dwCurrentState == SERVICE_RUNNING || ss.dwCurrentState == SERVICE_PAUSED) + { + if (ControlService(sc, SERVICE_CONTROL_STOP, &ss)) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState == SERVICE_STOPPED) + return EXRV_OK; + } + } + } + + if (ss.dwCurrentState == SERVICE_STOPPED) + return EXRV_OK; + else + return EXRV_FAILED; +} + +ex_rv ex_winsrv_pause(const ex_wstr& srv_name) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_PAUSE_CONTINUE | SERVICE_QUERY_STATUS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + SERVICE_STATUS ss; + if(!QueryServiceStatus(sc, &ss)) + return EXRV_FAILED; + + if (ss.dwCurrentState == SERVICE_PAUSED) + return EXRV_OK; + + int i = 0; + + DWORD dwStatus = ss.dwCurrentState; + if (ss.dwCurrentState == SERVICE_START_PENDING || ss.dwCurrentState == SERVICE_PAUSE_PENDING || ss.dwCurrentState == SERVICE_CONTINUE_PENDING) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState != dwStatus) + break; + } + } + + if (ss.dwCurrentState == SERVICE_RUNNING) + { + if (ControlService(sc, SERVICE_CONTROL_PAUSE, &ss)) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState == SERVICE_PAUSED) + return EXRV_OK; + } + } + } + + if (ss.dwCurrentState == SERVICE_PAUSED) + return EXRV_OK; + else + return EXRV_FAILED; +} + +ex_rv ex_winsrv_resume(const ex_wstr& srv_name) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_PAUSE_CONTINUE | SERVICE_QUERY_STATUS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + SERVICE_STATUS ss; + if (!QueryServiceStatus(sc, &ss)) + return EXRV_FAILED; + + if (ss.dwCurrentState == SERVICE_RUNNING) + return EXRV_OK; + + int i = 0; + + DWORD dwStatus = ss.dwCurrentState; + if (ss.dwCurrentState == SERVICE_START_PENDING || ss.dwCurrentState == SERVICE_PAUSE_PENDING || ss.dwCurrentState == SERVICE_CONTINUE_PENDING) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState != dwStatus) + break; + } + } + + if (ss.dwCurrentState == SERVICE_PAUSED) + { + if (ControlService(sc, SERVICE_CONTROL_CONTINUE, &ss)) + { + for (i = 0; i < 100; ++i) + { + Sleep(100); + QueryServiceStatus(sc, &ss); + if (ss.dwCurrentState == SERVICE_RUNNING) + return EXRV_OK; + } + } + } + + if (ss.dwCurrentState == SERVICE_RUNNING) + return EXRV_OK; + else + return EXRV_FAILED; +} + +ex_rv ex_winsrv_pid(const ex_wstr& srv_name, ex_ulong& pid) +{ + SC_HANDLE sc = NULL; + SC_HANDLE scm = NULL; + winsrv_helper srv(scm, sc); + + scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS); + if (scm == NULL) + return EXRV_CANNOT_OPEN; + + sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_QUERY_STATUS); + if (NULL == sc) + return EXRV_NOT_EXISTS; + + DWORD byteneeded = 0; + ex_u8 buf[1024] = { 0 }; + QueryServiceStatusEx(sc, SC_STATUS_PROCESS_INFO, buf, 1024, &byteneeded); + + LPSERVICE_STATUS_PROCESS lp = (LPSERVICE_STATUS_PROCESS)buf; + if (lp->dwCurrentState != SERVICE_RUNNING) + return EXRV_NOT_START; + + pid = lp->dwProcessId; + + return EXRV_OK; +} +#endif diff --git a/common/pyshell/include/pys.h b/common/pyshell/include/pys.h new file mode 100644 index 0000000..9aa076b --- /dev/null +++ b/common/pyshell/include/pys.h @@ -0,0 +1,287 @@ +#ifndef __PYS_H__ +#define __PYS_H__ + +#include + +//========================================================================= +// Type define +//========================================================================= +#if defined(EX_OS_WIN32) +# define DYLIB_HANDLE HINSTANCE +#else +# define DYLIB_HANDLE void* +#endif + +//========================================================================= +// Python API +//========================================================================= +#define MS_NO_COREDLL 1 +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(EX_OS_WIN32) +# define PYS_USE_PYLIB_SHARED +# include +#elif defined(EX_OS_LINUX) +# define PYS_USE_PYLIB_STATIC +# include +#else +# error This platform not supported yet. +#endif + +#ifdef __cplusplus +} +#endif + +#ifdef PYS_USE_PYLIB_SHARED +//======================================================== +// WIN32 +//======================================================== +#define EXTDECLPROC(result, name, args) \ + typedef result (__cdecl* __PROC__ ## name) args; \ + extern __PROC__ ## name pylib_ ## name; + +#define EXTDECLVAR(vartyp, name) \ + typedef vartyp __VAR__ ## name; \ + extern __VAR__ ## name* pylib_ ## name; + + +EXTDECLVAR(int, Py_FrozenFlag); +EXTDECLVAR(int, Py_NoSiteFlag); +EXTDECLVAR(int, Py_OptimizeFlag); +EXTDECLVAR(const char*, Py_FileSystemDefaultEncoding); +EXTDECLVAR(int, Py_VerboseFlag); +EXTDECLVAR(int, Py_IgnoreEnvironmentFlag); +EXTDECLVAR(int, Py_DontWriteBytecodeFlag); +EXTDECLVAR(int, Py_NoUserSiteDirectory); + +EXTDECLPROC(void, Py_Initialize, (void)); +EXTDECLPROC(void, Py_Finalize, (void)); +EXTDECLPROC(void, Py_IncRef, (PyObject *)); +EXTDECLPROC(void, Py_DecRef, (PyObject *)); +EXTDECLPROC(void, Py_SetProgramName, (wchar_t *)); +EXTDECLPROC(void, Py_SetPythonHome, (wchar_t *)); +EXTDECLPROC(void, Py_SetPath, (wchar_t *)); /* new in Python 3 */ +EXTDECLPROC(int, PySys_SetArgvEx, (int, wchar_t **, int)); +EXTDECLPROC(PyObject *, PyImport_ImportModule, (const char *)); +EXTDECLPROC(PyObject *, PyObject_GetAttrString, (PyObject *, const char *)); + +// in python3.0~3.4, it is _Py_char2wchar, but renamed to Py_DecodeLocale in python3.5. WTF. +//EXTDECLPROC(wchar_t *, _Py_char2wchar, (char *, size_t *)); + +//EXTDECLPROC(PyObject*, PyUnicode_FromWideChar, (const wchar_t*, size_t size )); + +EXTDECLPROC(PyObject *, Py_BuildValue, (char *, ...)); + +EXTDECLPROC(void, PyErr_Clear, (void)); +EXTDECLPROC(PyObject *, PyErr_Occurred, (void)); +EXTDECLPROC(void, PyErr_Print, (void)); + +EXTDECLPROC(PyObject *, PyObject_Call, (PyObject *callable_object, PyObject *args, PyObject *kw)); +EXTDECLPROC(int, PyArg_Parse, (PyObject *, const char *, ...)); + +EXTDECLPROC(PyObject *, PyObject_CallFunction, (PyObject *, char *, ...)); +EXTDECLPROC(PyObject *, PyModule_GetDict, (PyObject *)); +EXTDECLPROC(PyObject *, PyDict_GetItemString, (PyObject *, char *)); +EXTDECLPROC(int, PyDict_SetItemString, (PyObject *dp, const char *key, PyObject *item)); +EXTDECLPROC(long, PyLong_AsLong, (PyObject *)); +EXTDECLPROC(PyObject *, PyLong_FromLong, (long)); +EXTDECLPROC(PyObject *, PyLong_FromUnsignedLong, (unsigned long)); +EXTDECLPROC(PyObject *, PyLong_FromUnsignedLongLong, (unsigned PY_LONG_LONG)); +EXTDECLPROC(PyObject *, PyBytes_FromString, (const char *)); +EXTDECLPROC(PyObject *, PyBytes_FromStringAndSize, (const char *, Py_ssize_t)); +EXTDECLPROC(PyObject *, PyUnicode_FromString, (const char *)); +EXTDECLPROC(PyObject *, PyBool_FromLong, (long)); + + +EXTDECLPROC(int, PyImport_ExtendInittab, (struct _inittab *newtab)); +EXTDECLPROC(PyObject *, PyModule_Create2, (struct PyModuleDef*, int apiver)); +EXTDECLPROC(int, PyArg_ParseTuple, (PyObject *, const char *, ...)); +EXTDECLPROC(PyObject *, PyTuple_Pack, (Py_ssize_t, ...)); + + +#else // for linux, link to static python lib. + +#define pylib_Py_FrozenFlag Py_FrozenFlag +#define pylib_Py_NoSiteFlag Py_NoSiteFlag +#define pylib_Py_OptimizeFlag Py_OptimizeFlag +#define pylib_Py_FileSystemDefaultEncoding Py_FileSystemDefaultEncoding +#define pylib_Py_VerboseFlag Py_VerboseFlag +#define pylib_Py_IgnoreEnvironmentFlag Py_IgnoreEnvironmentFlag +#define pylib_Py_DontWriteBytecodeFlag Py_DontWriteBytecodeFlag +#define pylib_Py_NoUserSiteDirectory Py_NoUserSiteDirectory +#define pylib_Py_Initialize Py_Initialize +#define pylib_Py_Finalize Py_Finalize +#define pylib_Py_IncRef Py_IncRef +#define pylib_Py_DecRef Py_DecRef +#define pylib_Py_SetProgramName Py_SetProgramName +#define pylib_Py_SetPythonHome Py_SetPythonHome +#define pylib_Py_SetPath Py_SetPath +#define pylib_PySys_SetArgvEx PySys_SetArgvEx +#define pylib_PyImport_ImportModule PyImport_ImportModule +#define pylib_PyObject_GetAttrString PyObject_GetAttrString +#define pylib_Py_BuildValue Py_BuildValue +#define pylib_PyErr_Clear PyErr_Clear +#define pylib_PyErr_Occurred PyErr_Occurred +#define pylib_PyErr_Print PyErr_Print +#define pylib_PyObject_Call PyObject_Call +#define pylib_PyArg_Parse PyArg_Parse +#define pylib_PyObject_CallFunction PyObject_CallFunction +#define pylib_PyModule_GetDict PyModule_GetDict +#define pylib_PyDict_GetItemString PyDict_GetItemString +#define pylib_PyDict_SetItemString PyDict_SetItemString +#define pylib_PyLong_AsLong PyLong_AsLong +#define pylib_PyLong_FromLong PyLong_FromLong +#define pylib_PyLong_FromUnsignedLong PyLong_FromUnsignedLong +#define pylib_PyLong_FromUnsignedLongLong PyLong_FromUnsignedLongLong +#define pylib_PyBytes_FromString PyBytes_FromString +#define pylib_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +#define pylib_PyUnicode_FromString PyUnicode_FromString +#define pylib_PyBool_FromLong PyBool_FromLong +#define pylib_PyImport_ExtendInittab PyImport_ExtendInittab +#define pylib_PyModule_Create2 PyModule_Create2 +#define pylib_PyArg_ParseTuple PyArg_ParseTuple +#define pylib_PyTuple_Pack PyTuple_Pack + +#define pylib_Py_IncRef Py_IncRef +#define pylib_Py_DecRef Py_DecRef +#define pylib_PyBool_FromLong PyBool_FromLong +#define pylib_PyBool_FromLong PyBool_FromLong + +#endif + +#define PYLIB_XINCREF(o) pylib_Py_IncRef(o) +#define PYLIB_XDECREF(o) pylib_Py_DecRef(o) +#define PYLIB_DECREF(o) PYLIB_XDECREF(o) +#define PYLIB_INCREF(o) PYLIB_XINCREF(o) + +#define PYLIB_RETURN_TRUE return pylib_PyBool_FromLong(1) +#define PYLIB_RETURN_FALSE return pylib_PyBool_FromLong(0) + + +typedef int PYS_BOOL; +#define PYS_TRUE 1 +#define PYS_FALSE 0 + + +//========================================================================= +// PyShell API +//========================================================================= +typedef unsigned long PYS_RET; +#define PYSR_OK 0x00000000 +#define PYSR_FAILED 0x00000005 + +#if 0 +#ifdef EX_OS_WIN32 +# ifdef EX_DEBUG +# if defined(_M_X64) +# pragma comment(lib, "pys_64d.lib") +# elif defined(_M_IX86) +# pragma comment(lib, "pys_32d.lib") +# else +# error unsupport platform. +# endif +# else +# if defined(_M_X64) +# pragma comment(lib, "pys_64.lib") +# elif defined(_M_IX86) +# pragma comment(lib, "pys_32.lib") +# else +# error unsupport platform. +# endif +# endif +#endif +#endif + +#ifdef __cplusplus +extern "C" { +#endif + + typedef void* PYS_HANDLE; + + // 创建一个PyShell句柄,所有操作均对应此句柄进行(一个进程仅有一个句柄) + PYS_HANDLE pys_create(void); + // 销毁一个PyShell句柄 + void pys_destroy(PYS_HANDLE* pysh); + + // 使用指定的运行时路径进行初始化(运行时路径中包含pythonXX.dll/python.zip/modules等等) + PYS_BOOL pys_init_runtime(PYS_HANDLE pysh, const wchar_t* exec_file, const wchar_t* runtime_path); + + // 设置python包搜索路径,可多次调用进行追加(可省略) + PYS_BOOL pys_add_search_path(PYS_HANDLE pysh, const wchar_t* path); + + // 设置python运行时的命令行参数(可省略) + void pys_set_argv(PYS_HANDLE pysh, int argc, wchar_t** argv); + // 追加python运行时的命令行参数(可省略) + void pys_add_arg(PYS_HANDLE pysh, const wchar_t* arg); + // 设置python解释器名称(可省略,默认为当前可执行程序文件名的绝对路径) + void pys_set_program(PYS_HANDLE pysh, const wchar_t* program_name); + + // 设置入口脚本文件名,可以是一个.py文件,也可以是一个.zip文件 + void pys_set_startup_file(PYS_HANDLE pysh, const wchar_t* filename); + + // 设置启动模块名和入口函数名,func_name为NULL时默认执行指定模块中的main函数 + // 本函数可以省略,默认情况下: + // 如果startup_file是一个.py文件,则默认module_name就是.py文件的文件名本身, + // 如果startup_file是一个.zip文件,则默认module_name是`pysmain`。 + void pys_set_bootstrap_module(PYS_HANDLE pysh, const char* module_name, const char* func_name); + + // 初始化模块的函数原型 + typedef PyObject* (*pys_init_module_func)(void); + + typedef struct PYS_BUILTIN_FUNC + { + const char* py_func_name; // Python中调用时使用的函数名 + PyCFunction c_func_addr; // 对应的C的函数 + PYS_BOOL have_args; // 此函数是否需要参数 + const char* py_func_desc; // 此函数的文档注释,可以为NULL。 + }PYS_BUILTIN_FUNC; + + typedef enum PYS_CONST_TYPE + { + PYS_CONST_BOOL, // Python中得到 True/False 的值 + PYS_CONST_LONG, // Python中得到一个整数 + PYS_CONST_STRING, // Python中得到一个字符串 + PYS_CONST_BYTES // Python中得到一个Bytes类型数据 + }PYS_CONST_TYPE; + + typedef struct PYS_BUILTIN_CONST + { + char* py_const_name; // Python中调用时使用的变量名 + PYS_CONST_TYPE type; // 常量类型 + size_t size; // 常量数据的长度 + void* buffer; // 常量数据的内容 + }PYS_BUILTIN_CONST; + + // 增加一个内建模块,其中,如果没有函数或常量,那么对应的funcs/consts可以为NULL。 + // 可多次调用本函数来创建多个内建模块。如果多次调用时使用相同的模块名,则函数和常量会追加到此模块中 + // 同一个模块中,函数名和常量名不能重复(但可以通过大小写区分) + PYS_BOOL pys_add_builtin_module(PYS_HANDLE pysh, const char* module_name, pys_init_module_func init_func); + + PyObject* pys_create_module(const char* module_name, PYS_BUILTIN_FUNC* funcs); + void pys_builtin_const_bool(PyObject* mod, const char* name, PYS_BOOL val); + void pys_builtin_const_long(PyObject* mod, const char* name, long val); + void pys_builtin_const_utf8(PyObject* mod, const char* name, const char* val); // val 必须是utf8编码的字符串 + void pys_builtin_const_wcs(PyObject* mod, const char* name, const wchar_t* val); + void pys_builtin_const_bin(PyObject* mod, const char* name, const ex_u8* val, size_t size); + + // 运行python解释器 + int pys_run(PYS_HANDLE pysh); + +#ifdef __cplusplus +} +#endif + +#ifdef __cplusplus +class PysHandleHolder +{ +public: + PysHandleHolder(PYS_HANDLE h) :m_handle(h) { } + ~PysHandleHolder() { pys_destroy(&m_handle); } +private: + PYS_HANDLE m_handle; +}; +#endif + +#endif // __PYS_H__ diff --git a/common/pyshell/src/pys_api.cpp b/common/pyshell/src/pys_api.cpp new file mode 100644 index 0000000..dd20841 --- /dev/null +++ b/common/pyshell/src/pys_api.cpp @@ -0,0 +1,205 @@ +#include +#include "pys_core.h" +#include "pys_util.h" + +#include + +PYS_HANDLE pys_create(void) +{ + pys::Core* core = new pys::Core; + return core; +} + +void pys_destroy(PYS_HANDLE* pysh) +{ + if (NULL == pysh) + return; + if (NULL == *pysh) + return; + pys::Core* core = (pys::Core*)*pysh; + delete core; + *pysh = NULL; +} + +PYS_BOOL pys_init_runtime(PYS_HANDLE pysh, const wchar_t* exec_file, const wchar_t* runtime_path) +{ + pys::Core* core = (pys::Core*)pysh; + if (!core->init(exec_file, runtime_path)) + return PYS_FALSE; + + return PYS_TRUE; +} + +int pys_run(PYS_HANDLE pysh) +{ + pys::Core* core = (pys::Core*)pysh; + return core->run(); +} + +PYS_BOOL pys_add_search_path(PYS_HANDLE pysh, const wchar_t* path) +{ + pys::Core* core = (pys::Core*)pysh; + core->add_search_path(path); + return PYS_TRUE; +} + + +void pys_set_program(PYS_HANDLE pysh, const wchar_t* program_name) +{ + pys::Core* core = (pys::Core*)pysh; + core->m_prog_name = program_name; +} + +void pys_set_startup_file(PYS_HANDLE pysh, const wchar_t* filename) +{ + pys::Core* core = (pys::Core*)pysh; + core->set_startup_file(filename); +} + +void pys_set_bootstrap_module(PYS_HANDLE pysh, const char* module_name, const char* func_name) +{ + pys::Core* core = (pys::Core*)pysh; + if(NULL != module_name) + core->m_bootstrap_module = module_name; + if (NULL != func_name) + core->m_bootstrap_func = func_name; +} + +void pys_set_argv(PYS_HANDLE pysh, int argc, wchar_t** argv) +{ + pys::Core* core = (pys::Core*)pysh; + core->m_py_args.clear(); + + int i = 0; + for (i = 0; i < argc; ++i) + { + core->m_py_args.push_back(argv[i]); + } +} + +void pys_add_arg(PYS_HANDLE pysh, const wchar_t* arg) +{ + if (NULL == arg) + return; + + pys::Core* core = (pys::Core*)pysh; + core->m_py_args.push_back(arg); +} + +PYS_BOOL pys_add_builtin_module(PYS_HANDLE pysh, const char* module_name, pys_init_module_func init_func) +{ + pys::Core* core = (pys::Core*)pysh; + if (!core->add_builtin_module(module_name, init_func)) + return PYS_FALSE; + return PYS_TRUE; +} + +PyObject* pys_create_module(const char* module_name, PYS_BUILTIN_FUNC* funcs) +{ + PyMethodDef* _method_def = NULL; + PyModuleDef* _module_def = NULL; + + int i = 0; + int func_count = 0; + + if (funcs != NULL) + { + for (i = 0; ; ++i) + { + if (funcs[i].py_func_name == NULL) + break; + func_count++; + } + } + + _method_def = new PyMethodDef[func_count + 1]; + memset(_method_def, 0, sizeof(PyMethodDef)*(func_count + 1)); + for (i = 0; i < func_count; ++i) + { + _method_def[i].ml_name = funcs[i].py_func_name; + _method_def[i].ml_meth = funcs[i].c_func_addr; + _method_def[i].ml_doc = funcs[i].py_func_desc; + if(funcs[i].have_args) + _method_def[i].ml_flags = METH_VARARGS; + else + _method_def[i].ml_flags = METH_NOARGS; + } + + _module_def = new PyModuleDef; + memset(_module_def, 0, sizeof(PyModuleDef)); + _module_def->m_name = module_name; + _module_def->m_size = -1; + _module_def->m_methods = _method_def; + + // 托管这两个动态分配的变量 + pys::g_builtin_module_info.add(_method_def, _module_def); + + PyObject* module = pylib_PyModule_Create2(_module_def, PYTHON_API_VERSION); + + if (NULL == module) + { + EXLOGE("[pys]: can not create builtin module `%s`.\n", module_name); + return NULL; + } + + return module; +} + +void pys_builtin_const_bool(PyObject* mod, const char* name, PYS_BOOL val) +{ + PyObject* dict = NULL; + PyObject* tmp_obj = NULL; + if (NULL == (dict = pylib_PyModule_GetDict(mod))) + return; + tmp_obj = pylib_PyBool_FromLong(val); + pylib_PyDict_SetItemString(dict, name, tmp_obj); + PYLIB_DECREF(tmp_obj); +} + +void pys_builtin_const_long(PyObject* mod, const char* name, long val) +{ + PyObject* dict = NULL; + PyObject* tmp_obj = NULL; + if (NULL == (dict = pylib_PyModule_GetDict(mod))) + return; + tmp_obj = pylib_PyLong_FromLong(val); + pylib_PyDict_SetItemString(dict, name, tmp_obj); + PYLIB_DECREF(tmp_obj); +} + +void pys_builtin_const_utf8(PyObject* mod, const char* name, const char* val) // val 必须是utf8编码的字符串 +{ + PyObject* dict = NULL; + PyObject* tmp_obj = NULL; + if (NULL == (dict = pylib_PyModule_GetDict(mod))) + return; + tmp_obj = pylib_PyUnicode_FromString(val); + pylib_PyDict_SetItemString(dict, name, tmp_obj); + PYLIB_DECREF(tmp_obj); +} + +void pys_builtin_const_wcs(PyObject* mod, const char* name, const wchar_t* val) +{ + ex_astr strval; + if (!ex_wstr2astr(val, strval, EX_CODEPAGE_UTF8)) + return; + + PyObject* dict = NULL; + PyObject* tmp_obj = NULL; + if (NULL == (dict = pylib_PyModule_GetDict(mod))) + return; + tmp_obj = pylib_PyUnicode_FromString(strval.c_str()); + pylib_PyDict_SetItemString(dict, name, tmp_obj); + PYLIB_DECREF(tmp_obj); +} + +void pys_builtin_const_bin(PyObject* mod, const char* name, const ex_u8* val, size_t size) +{ + PyObject* dict = NULL; + PyObject* tmp_obj = NULL; + if (NULL == (dict = pylib_PyModule_GetDict(mod))) + return; + tmp_obj = pylib_PyBytes_FromStringAndSize((char*)val, size); + pylib_PyDict_SetItemString(dict, name, tmp_obj); + PYLIB_DECREF(tmp_obj); +} diff --git a/common/pyshell/src/pys_core.cpp b/common/pyshell/src/pys_core.cpp new file mode 100644 index 0000000..78282dc --- /dev/null +++ b/common/pyshell/src/pys_core.cpp @@ -0,0 +1,604 @@ +#include +#include "pys_core.h" +#include "pys_util.h" + +#ifdef PYS_USE_PYLIB_SHARED +//======================================================== +// WIN32 +//======================================================== +#define DECLPROC(name) \ + __PROC__ ## name pylib_ ## name = NULL; + +#define GETPROCOPT(lib, name, sym) \ + pylib_ ## name = (__PROC__ ## name)GetProcAddress(lib, #sym) + +#define GETPROC(lib, name) \ + GETPROCOPT(lib, name, name); \ +if(!pylib_ ## name) { \ + EXLOGE("[pys] can not GetProcAddress for " #name "\n"); \ + return -1;\ +} + +#pragma warning(disable:4054) + +#define DECLVAR(name) \ + __VAR__ ## name* pylib_ ## name = NULL; +#define GETVAR(lib, name) \ + pylib_ ## name = (__VAR__ ## name*)GetProcAddress(lib, #name); \ + if (!pylib_ ## name) { \ + EXLOGE("[pys] can not GetProcAddress for " #name "\n"); \ + return -1; \ + } + + +static int _pys_map_python_lib(DYLIB_HANDLE handle); +static DYLIB_HANDLE _pys_dlopen(const wchar_t* dylib_path); + +static int pys_pylib_load(const wchar_t* lib_path) +{ + DYLIB_HANDLE lib = NULL; + + EXLOGD(L"[pys] py-lib: %ls\n", lib_path); + + lib = _pys_dlopen(lib_path); + if (NULL == lib) + return -1; + + if (0 != _pys_map_python_lib(lib)) + return -1; + + return 0; +} + + +DYLIB_HANDLE _pys_dlopen(const wchar_t* dylib_path) +{ + DYLIB_HANDLE handle = NULL; +#ifdef EX_OS_WIN32 + // PYSLOGW(L"[pys] py-lib: %ls\n", dylib_path); + handle = LoadLibraryExW(dylib_path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH); + if (NULL == handle) + { + EXLOGE(L"[pys] can not load python lib: %ls.\n", dylib_path); + return NULL; + } +#else + ex_astr path; + if (!ex_wstr2astr(dylib_path, path, EX_CODEPAGE_UTF8)) + { + EXLOGE("[pys] convert dylib_path failed.\n"); + return NULL; + } + + EXLOGD("[pys] py-lib-a: %s\n", path); + + handle = dlopen(path.c_str(), RTLD_NOW | RTLD_GLOBAL); + + if (NULL == handle) + { + EXLOGE("[pys] dlopen() failed: %s.\n", dlerror()); + return NULL; + } +#endif + + return handle; +} + + +int _pys_map_python_lib(DYLIB_HANDLE handle) +{ + GETVAR(handle, Py_DontWriteBytecodeFlag); + GETVAR(handle, Py_FileSystemDefaultEncoding); + GETVAR(handle, Py_FrozenFlag); + GETVAR(handle, Py_IgnoreEnvironmentFlag); + GETVAR(handle, Py_NoSiteFlag); + GETVAR(handle, Py_NoUserSiteDirectory); + GETVAR(handle, Py_OptimizeFlag); + GETVAR(handle, Py_VerboseFlag); + + + GETPROC(handle, Py_BuildValue); + GETPROC(handle, Py_DecRef); + GETPROC(handle, Py_Finalize); + GETPROC(handle, Py_IncRef); + GETPROC(handle, Py_Initialize); + GETPROC(handle, Py_SetPath); + GETPROC(handle, Py_SetProgramName); + GETPROC(handle, Py_SetPythonHome); + GETPROC(handle, PySys_SetArgvEx); + + GETPROC(handle, PyImport_ImportModule); + GETPROC(handle, PyObject_GetAttrString); + + //GETPROC(handle, _Py_char2wchar); + //GETPROC(handle, PyUnicode_FromWideChar); + + + GETPROC(handle, PyErr_Clear); + GETPROC(handle, PyErr_Occurred); + GETPROC(handle, PyErr_Print); + + //GETPROC(handle, PyMem_RawFree); + GETPROC(handle, PyObject_Call); + GETPROC(handle, PyArg_Parse); + + GETPROC(handle, PyObject_CallFunction); + GETPROC(handle, PyModule_GetDict); + GETPROC(handle, PyDict_GetItemString); + GETPROC(handle, PyDict_SetItemString); + GETPROC(handle, PyLong_AsLong); + GETPROC(handle, PyLong_FromLong); + GETPROC(handle, PyLong_FromUnsignedLong); + GETPROC(handle, PyLong_FromUnsignedLongLong); + GETPROC(handle, PyBytes_FromString); + GETPROC(handle, PyBytes_FromStringAndSize); + GETPROC(handle, PyUnicode_FromString); + GETPROC(handle, PyBool_FromLong); + + GETPROC(handle, PyImport_ExtendInittab); + GETPROC(handle, PyModule_Create2); + GETPROC(handle, PyArg_ParseTuple); + GETPROC(handle, PyTuple_Pack); + return 0; +} + + +DECLVAR(Py_DontWriteBytecodeFlag); +DECLVAR(Py_FileSystemDefaultEncoding); +DECLVAR(Py_FrozenFlag); +DECLVAR(Py_IgnoreEnvironmentFlag); +DECLVAR(Py_NoSiteFlag); +DECLVAR(Py_NoUserSiteDirectory); +DECLVAR(Py_OptimizeFlag); +DECLVAR(Py_VerboseFlag); + + +DECLPROC(Py_BuildValue); +DECLPROC(Py_DecRef); +DECLPROC(Py_Finalize); +DECLPROC(Py_IncRef); +DECLPROC(Py_Initialize); +DECLPROC(Py_SetPath); +DECLPROC(Py_SetProgramName); +DECLPROC(Py_SetPythonHome); +DECLPROC(PySys_SetArgvEx); + +DECLPROC(PyImport_ImportModule); +DECLPROC(PyObject_GetAttrString); + +//DECLPROC(_Py_char2wchar); +//DECLPROC(PyUnicode_FromWideChar); + +DECLPROC(PyErr_Clear); +DECLPROC(PyErr_Occurred); +DECLPROC(PyErr_Print); + +//DECLPROC(PyMem_RawFree); +DECLPROC(PyObject_Call); +DECLPROC(PyArg_Parse); + +DECLPROC(PyObject_CallFunction); +DECLPROC(PyModule_GetDict); +DECLPROC(PyDict_GetItemString); +DECLPROC(PyDict_SetItemString); +DECLPROC(PyLong_AsLong); +DECLPROC(PyLong_FromLong); +DECLPROC(PyLong_FromUnsignedLong); +DECLPROC(PyLong_FromUnsignedLongLong); +DECLPROC(PyBytes_FromString); +DECLPROC(PyBytes_FromStringAndSize); +DECLPROC(PyUnicode_FromString); +DECLPROC(PyBool_FromLong); + +DECLPROC(PyImport_ExtendInittab); +DECLPROC(PyModule_Create2); +DECLPROC(PyArg_ParseTuple); +DECLPROC(PyTuple_Pack); + + +#else +int pys_pylib_load(const wchar_t* lib_path) +{ + EXLOGD("[pys] link to python static lib.\n"); + return 0; +} + +#endif + + +//================================================================ +// +//================================================================ + +namespace pys +{ + BuiltinModuleInfo g_builtin_module_info; + + BuiltinModuleInfo::BuiltinModuleInfo() + {} + + BuiltinModuleInfo::~BuiltinModuleInfo() + { + builtin_module_infos::iterator it = m_infos.begin(); + for (; it != m_infos.end(); ++it) + { + delete[] (*it)->method_def; + delete (*it)->module_def; + delete (*it); + } + m_infos.clear(); + } + + void BuiltinModuleInfo::add(PyMethodDef* method_def, PyModuleDef* module_def) + { + BUILTIN_MODULE_INFO* info = new BUILTIN_MODULE_INFO; + info->method_def = method_def; + info->module_def = module_def; + m_infos.push_back(info); + } + + //================================================================ + // + //================================================================ + + Core::Core() + { + m_init_tab = NULL; + } + + Core::~Core() + { + if (NULL != m_init_tab) + delete[] m_init_tab; + } + + bool Core::init(const wchar_t* exec_file, const wchar_t* runtime_path) + { +// if (!ex_exec_file(m_exec_file)) +// return false; + + m_exec_file = exec_file; + + m_exec_path = m_exec_file; + if (!ex_dirname(m_exec_path)) + return false; + + m_runtime_path = runtime_path; + return _load_dylib(); + } + + bool Core::set_startup_file(const wchar_t* filename) + { + if (NULL == filename) + return false; + ex_wstr fname = filename; + if (!ex_is_abspath(fname.c_str())) + ex_abspath(fname); + if (!ex_is_file_exists(fname.c_str())) + return false; + + ex_wstr ext; + if (!ex_path_ext_name(fname, ext)) + return false; + + m_start_file = fname; + + if (ext == L"zip") + { + m_is_zipped_app = true; + // 将.zip文件加入搜索路径 + m_search_path.push_back(m_start_file); + } + else + { + m_is_zipped_app = false; + + // 将.py文件所在路径加入搜索路径 + ex_wstr tmp_path(m_start_file); + ex_dirname(tmp_path); + m_search_path.push_back(tmp_path); + + // 如果尚未设置启动模块名称,则以.py文件的文件名作为启动模块名称 + if (m_bootstrap_module.empty()) + { + ex_wstr wmod(m_start_file); + wmod.assign(m_start_file, tmp_path.length() + 1, m_start_file.length() - tmp_path.length() - 1 - 3); + ex_wstr2astr(wmod, m_bootstrap_module); + } + } + + return true; + } + + bool Core::add_builtin_module(const char* module_name, pys_init_module_func init_func) + { + builtin_modules::iterator it = m_builtin_modules.find(module_name); + if (it != m_builtin_modules.end()) + return false; + + m_builtin_modules.insert(std::make_pair(module_name, init_func)); + return true; + } + + bool Core::get_builtin_module_by_init_func(pys_init_module_func init_func, ex_astr& module_name) + { + builtin_modules::iterator it = m_builtin_modules.begin(); + for (; it != m_builtin_modules.end(); ++it) + { + if (init_func == it->second) + { + module_name = it->first; + return true; + } + } + + return false; + } + + bool Core::_load_dylib(void) + { +#ifdef PYS_USE_PYLIB_SHARED + ex_wstr ver_file = m_runtime_path; + if (!ex_path_join(ver_file, true, L"python.ver", NULL)) + return false; + FILE* f = pys_open_file(ver_file.c_str(), L"rb"); + if (NULL == f) + { + EXLOGE(L"[pys] can not open file: %ls\n", ver_file.c_str()); + return false; + } + fseek(f, 0L, SEEK_SET); + char dll_name[64] = { 0 }; + size_t read_size = fread(dll_name, 1, 64, f); + fclose(f); + if (64 != read_size) + { + EXLOGE(L"[pys] read file failed, need 64B, read %dB\n", read_size); + return false; + } + + ex_wstr wstr_dll; + if (!ex_astr2wstr(dll_name, wstr_dll)) + return false; + + ex_wstr dll_file = m_runtime_path; + if (!ex_path_join(dll_file, true, wstr_dll.c_str(), NULL)) + return false; + + if (0 != pys_pylib_load(dll_file.c_str())) + { + return false; + } +#endif + return true; + } + + bool Core::add_search_path(const wchar_t* wpath) + { + ex_wstr wstr_path = wpath; + if (!ex_abspath(wstr_path)) + { + EXLOGE(L"can not get abspath of `%ls`.\n", wpath); + return false; + } + + pys_wstr_list::iterator it = m_search_path.begin(); + for (; it != m_search_path.end(); ++it) + { + // TODO: windows平台不区分大小写比较 + if (wstr_path == (*it)) + return false; + } + + m_search_path.push_back(wstr_path); + return true; + } + + bool Core::add_search_path(const char* apath, int code_page) + { + ex_wstr wstr_path; + if (!ex_astr2wstr(apath, wstr_path, code_page)) + return false; + return add_search_path(wstr_path.c_str()); + } + + + bool Core::_run_prepare(void) + { + if(m_bootstrap_module.empty()) + m_bootstrap_module = "pysmain"; + if(m_bootstrap_func.empty()) + m_bootstrap_func = "main"; + +#ifdef PYS_USE_PYLIB_SHARED + *pylib_Py_NoSiteFlag = 1; + *pylib_Py_OptimizeFlag = 2; // 进行操作码优化(编译成操作码,去掉assert及doc-string) + *pylib_Py_FrozenFlag = 1; + *pylib_Py_DontWriteBytecodeFlag = 1; // 对于加载的.py脚本,内存中编译为操作码,但不要保存.pyo缓存文件 + *pylib_Py_NoUserSiteDirectory = 1; + *pylib_Py_IgnoreEnvironmentFlag = 1; + *pylib_Py_VerboseFlag = 0; +#else + pylib_Py_NoSiteFlag = 1; + pylib_Py_OptimizeFlag = 2; + pylib_Py_FrozenFlag = 1; + pylib_Py_DontWriteBytecodeFlag = 1; + pylib_Py_NoUserSiteDirectory = 1; + pylib_Py_IgnoreEnvironmentFlag = 1; + pylib_Py_VerboseFlag = 0; +#endif + + ex_wstr tmp_path = m_runtime_path; + ex_path_join(tmp_path, true, L"modules", NULL); + add_search_path(tmp_path.c_str()); + + tmp_path = m_runtime_path; + ex_path_join(tmp_path, true, L"python.zip", NULL); + add_search_path(tmp_path.c_str()); + + if (m_search_path.size() > 0) + { + pys_wstr_list::iterator it = m_search_path.begin(); + for (; it != m_search_path.end(); ++it) + { + add_search_path(it->c_str()); + } + } + + return true; + } + + void Core::_run_set_program(void) + { + if(m_prog_name.empty()) + pylib_Py_SetProgramName((wchar_t*)m_exec_file.c_str()); + else + pylib_Py_SetProgramName((wchar_t*)m_prog_name.c_str()); + } + + void Core::_run_set_path(void) + { + pys_wstr_list::iterator it = m_search_path.begin(); + for (; it != m_search_path.end(); ++it) + { + if (!m_search_path_tmp.empty()) + m_search_path_tmp += EX_PATH_SEP_STR; + m_search_path_tmp += (*it); + } + + EXLOGD(L"[pys] search path: %ls\n", m_search_path_tmp.c_str()); + pylib_Py_SetPath((wchar_t*)m_search_path_tmp.c_str()); + } + + void Core::_run_set_argv(void) + { + int tmp_argc = m_py_args.size(); + wchar_t** tmp_wargv = (wchar_t**)calloc(tmp_argc + 1, sizeof(wchar_t*)); + if (!tmp_wargv) + return; + + int i = 0; + pys_wstr_list::iterator it = m_py_args.begin(); + for (; it != m_py_args.end(); ++it) + { + tmp_wargv[i] = ex_wcsdup(it->c_str()); + i++; + } + + pylib_PySys_SetArgvEx(tmp_argc, tmp_wargv, 0); + + ex_free_wargv(tmp_argc, tmp_wargv); + } + + bool Core::_run_init_builtin_modules(void) + { + m_init_tab = NULL; + int cnt = m_builtin_modules.size(); + if (0 == cnt) + return true; + + m_init_tab = new struct _inittab[cnt + 1]; + memset(m_init_tab, 0, sizeof(struct _inittab)*(cnt + 1)); + int i = 0; + builtin_modules::iterator it = m_builtin_modules.begin(); + for (; it != m_builtin_modules.end(); ++it, ++i) + { + m_init_tab[i].name = it->first.c_str(); + m_init_tab[i].initfunc = it->second; + } + + if (-1 == pylib_PyImport_ExtendInittab(m_init_tab)) + { + EXLOGE("[pys] can not init builtin module.\n"); + return false; + } + + return true; + } + + + int Core::run(void) + { + int ret = 0; + + PyObject* pModule = NULL; + PyObject* pDict = NULL; + PyObject* pFunc = NULL; + PyObject* pModuleName = NULL; + PyObject* pRunArgs = NULL; + PyObject* pyRet = NULL; + PYS_BOOL has_error = PYS_TRUE; + + if (!_run_init_builtin_modules()) + return PYSR_FAILED; + + if (!_run_prepare()) + return PYSR_FAILED; + _run_set_program(); + _run_set_path(); + + // Py_Initialize()必须在初始化内建模块之后进行 + pylib_Py_Initialize(); + + _run_set_argv(); + + for (;;) + { + pModule = pylib_PyImport_ImportModule(m_bootstrap_module.c_str()); + if (pModule == NULL) + { + EXLOGE("[pys] can not import module: %s\n", m_bootstrap_module.c_str()); + + ret = -1; + break; + } + + pDict = pylib_PyModule_GetDict(pModule); /* NO ref added */ + if (pDict == NULL) + { + EXLOGE("[pys] can not get module dict: %s\n", m_bootstrap_module.c_str()); + ret = -1; + break; + } + + pFunc = pylib_PyDict_GetItemString(pDict, (char*)m_bootstrap_func.c_str()); + if (pFunc == NULL) + { + EXLOGE("[pys] module [%s] have no function named `%s`.\n", m_bootstrap_module.c_str(), m_bootstrap_func.c_str()); + ret = -1; + break; + } + + pyRet = pylib_PyObject_CallFunction(pFunc, ""); + if (pyRet == NULL) + { + EXLOGE("[pys] %s.%s() return nothing.\n", m_bootstrap_module.c_str(), m_bootstrap_func.c_str()); + ret = -1; + break; + } + + pylib_PyErr_Clear(); + ret = pylib_PyLong_AsLong(pyRet); + + has_error = PYS_FALSE; + + break; + } + + if (pylib_PyErr_Occurred()) + pylib_PyErr_Print(); + pylib_PyErr_Clear(); + + if (pFunc) { PYLIB_DECREF(pFunc); } + if (pModule) { PYLIB_DECREF(pModule); } + if (pModuleName) { PYLIB_DECREF(pModuleName); } + if (pRunArgs) { PYLIB_DECREF(pRunArgs); } + if (pyRet) { PYLIB_DECREF(pyRet); } + + pylib_Py_Finalize(); + EXLOGD("[pys] python finalized. ExitCode=%d\n", ret); + + return ret; + } + +} diff --git a/common/pyshell/src/pys_core.h b/common/pyshell/src/pys_core.h new file mode 100644 index 0000000..4345f2e --- /dev/null +++ b/common/pyshell/src/pys_core.h @@ -0,0 +1,90 @@ +#ifndef __PYS_CORE_H__ +#define __PYS_CORE_H__ + +//#include "pys_str.h" + +#include "pys_util.h" +#include + +namespace pys +{ + typedef std::map builtin_modules; + typedef std::list pys_astr_list; + typedef std::list pys_wstr_list; + + class Core + { + public: + Core(); + ~Core(); + + // 调用各个成员函数设置必要信息之后再运行 + int run(void); + + // 初始化为默认设置 + bool init(const wchar_t* exec_file, const wchar_t* runtime_path); + + bool add_search_path(const wchar_t* wpath); + bool add_search_path(const char* apath, int code_page = EX_CODEPAGE_DEFAULT); + + bool set_startup_file(const wchar_t* filename); + bool add_builtin_module(const char* module_name, pys_init_module_func init_func); + bool get_builtin_module_by_init_func(pys_init_module_func init_func, ex_astr& module_name); + + private: + bool _load_dylib(void); + + bool _run_init_builtin_modules(void); + bool _run_prepare(void); + void _run_set_program(void); + void _run_set_path(void); + void _run_set_argv(void); + + + public: + ex_wstr m_prog_name; // 传递给Python解释器的,如果没有设置此值,则默认使用m_exec_file。 + ex_astr m_bootstrap_module; + ex_astr m_bootstrap_func; + pys_wstr_list m_py_args; // 传递给Python脚本的参数 + + private: + bool m_is_zipped_app; + + ex_wstr m_exec_file; // 当前可执行程序的文件名(绝对路径) + ex_wstr m_exec_path; // 当前可执行程序所在的路径(绝对路径) + ex_wstr m_runtime_path; // python运行环境路径,默认为可执行程序所在路径下的 `pysrt` 目录。 + ex_wstr m_start_file; + ex_wstr m_search_path_tmp; + + pys_wstr_list m_search_path; + + builtin_modules m_builtin_modules; + struct _inittab* m_init_tab; + }; + + + typedef struct BUILTIN_MODULE_INFO + { + PyMethodDef* method_def; + PyModuleDef* module_def; + }BUILTIN_MODULE_INFO; + + typedef std::list builtin_module_infos; + + class BuiltinModuleInfo + { + public: + BuiltinModuleInfo(); + ~BuiltinModuleInfo(); + + void add(PyMethodDef* method_def, PyModuleDef* module_def); + + private: + builtin_module_infos m_infos; + }; + + extern BuiltinModuleInfo g_builtin_module_info; + +} // namespace pys + +#endif // __PYS_CORE_H__ diff --git a/common/pyshell/src/pys_util.cpp b/common/pyshell/src/pys_util.cpp new file mode 100644 index 0000000..6486a26 --- /dev/null +++ b/common/pyshell/src/pys_util.cpp @@ -0,0 +1,20 @@ +#include "pys_util.h" + +FILE* pys_open_file(const ex_wstr& file_name, const wchar_t* mode) +{ + FILE* f = NULL; +#ifdef EX_OS_WIN32 + errno_t err = 0; + err = _wfopen_s(&f, file_name.c_str(), mode); + if (0 == err) + return f; + else + return NULL; +#else + ex_astr _file_name, _mode; + ex_wstr2astr(file_name, _file_name, EX_CODEPAGE_UTF8); + ex_wstr2astr(mode, _mode, EX_CODEPAGE_UTF8); + f = fopen(_file_name.c_str(), _mode.c_str()); + return f; +#endif +} diff --git a/common/pyshell/src/pys_util.h b/common/pyshell/src/pys_util.h new file mode 100644 index 0000000..7f6e31b --- /dev/null +++ b/common/pyshell/src/pys_util.h @@ -0,0 +1,9 @@ +#ifndef __PYS_UTIL_H__ +#define __PYS_UTIL_H__ + +#include +#include + +FILE* pys_open_file(const ex_wstr& file_name, const wchar_t* mode); + +#endif // __PYS_UTIL_H__ diff --git a/server/share/data/main.sql b/server/share/data/main.sql new file mode 100644 index 0000000..7868c29 --- /dev/null +++ b/server/share/data/main.sql @@ -0,0 +1,88 @@ + +CREATE TABLE `ts_account` ( + `account_id` integer PRIMARY KEY AUTOINCREMENT, + `account_type` int(11) DEFAULT 0, + `account_name` varchar(32) DEFAULT NULL, + `account_pwd` varchar(32) DEFAULT NULL, + `account_status` int(11) DEFAULT 0, + `account_lock` int(11) DEFAULT 0, + `account_desc` varchar(255) +); + +INSERT INTO "main"."ts_account" VALUES (1, 100, 'admin', '8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918', 0, 0, '瓒呯骇绠$悊鍛'); + +CREATE TABLE "ts_auth"( +"auth_id" INTEGER PRIMARY KEY AUTOINCREMENT, +"account_name" varchar(256), +"host_id" INTEGER, +"host_auth_id" int(11) NOT NULL +); + +CREATE TABLE "ts_cert" ( +"cert_id" integer PRIMARY KEY AUTOINCREMENT, +"cert_name" varchar(256), +"cert_pub" varchar(2048) DEFAULT '', +"cert_pri" varchar(4096) DEFAULT '', +"cert_desc" varchar(256) +); + + +CREATE TABLE "ts_config" ( +"name" varchar(256) NOT NULL, +"value" varchar(256), +PRIMARY KEY ("name" ASC) +); + + +INSERT INTO "main"."ts_config" VALUES ('ts_server_ip', '127.0.0.1'); +INSERT INTO "main"."ts_config" VALUES ('ts_server_rpc_port', 52080); +INSERT INTO "main"."ts_config" VALUES ('ts_server_rdp_port', 52089); +INSERT INTO "main"."ts_config" VALUES ('ts_server_ssh_port', 52189); +INSERT INTO "main"."ts_config" VALUES ('ts_server_telnet_port', 52389); +INSERT INTO "main"."ts_config" VALUES ('ts_server_rpc_ip', '127.0.0.1'); + +CREATE TABLE `ts_group` ( + `group_id` integer PRIMARY KEY AUTOINCREMENT, + `group_name` varchar(255) DEFAULT'' +); + + +CREATE TABLE "ts_host_info"( +"host_id" integer PRIMARY KEY AUTOINCREMENT, +"group_id" int(11) DEFAULT 0, +"host_sys_type" int(11) DEFAULT 1, +"host_ip" varchar(32) DEFAULT '', +"host_port" int(11) DEFAULT 0, +"protocol" int(11) DEFAULT 0, +"host_lock" int(11) DEFAULT 0, +"host_desc" DEFAULT '' +); + +CREATE TABLE "ts_auth_info"( +"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, +"host_id" INTEGER, +"auth_mode" INTEGER, +"user_name" varchar(256), +"user_pswd" varchar(256), +"user_param" varchar(256), +"cert_id" INTEGER, +"encrypt" INTEGER, +"log_time" varchar(60) +); + + +CREATE TABLE "ts_log" ( +"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, +"session_id" varchar(32), +"account_name" varchar(64), +"host_ip" varchar(32), +"host_port" INTEGER, +"sys_type" INTEGER DEFAULT 0, +"auth_type" INTEGER, +"protocol" INTEGER, +"user_name" varchar(64), +"ret_code" INTEGER, +"begin_time" INTEGER, +"end_time" INTEGER, +"log_time" varchar(64) +); diff --git a/server/share/etc/web.conf b/server/share/etc/web.conf new file mode 100644 index 0000000..6be97fa --- /dev/null +++ b/server/share/etc/web.conf @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +server_port = 7190 + +log_file = 'E:\work\eomsoft\teleport-github\server\share\log\web.log' + +# log_level can be 0 ~ 4 +# LOG_LEVEL_DEBUG 0 log every-thing. +# LOG_LEVEL_VERBOSE 1 log every-thing but without debug message. +# LOG_LEVEL_INFO 2 log infomation/warning/error message. +# LOG_LEVEL_WARN 3 log warning and error message. +# LOG_LEVEL_ERROR 4 log error message only. +log_level = 0 \ No newline at end of file diff --git a/server/tp_web/src/main.cpp b/server/tp_web/src/main.cpp new file mode 100644 index 0000000..e8b0156 --- /dev/null +++ b/server/tp_web/src/main.cpp @@ -0,0 +1,499 @@ +#include "ts_env.h" +#include "ts_ver.h" + +#include +#include + +// 命令行参数说明(不带参数运行则以服务方式启动) +// tp_web [-i|-u|--version] [ [-d] start] [...] +// -d 启动程序并输出调试信息(不会运行为守护进程/服务模式) +// -i 安装服务然后退出(仅限Win平台) +// -u 卸载服务然后退出(仅限Win平台) +// --version 打印版本号然后退出 +// start 以服务方式运行 +// ... 剩余的所有参数均传递给python脚本 +// +// +// 执行指定的Python脚本: +// tp_web --py [-f FuncName] script_file.py ... +// --py 必须为第一个参数,表示本次执行为执行指定脚本 +// -f FuncName 指定入口函数,默认为main。 +// script-file.py 被执行的脚本文件 +// ... 剩余的所有参数均传递给Python脚本 + + +bool g_is_debug = false; +static ex_wstrs g_py_args; + +// 如果是执行指定脚本 +static ex_wstr g_py_script_file; +static ex_wstr g_py_main_func; + +#define RUN_UNKNOWN 0 +#define RUN_WEB 1 +#define RUN_PY_SCRIPT 2 +#define RUN_INSTALL_SRV 3 +#define RUN_UNINST_SRV 4 +static ex_u8 g_run_type = RUN_UNKNOWN; + +#define EOM_WEB_SERVICE_NAME L"EOM Teleport Web Service" + +static bool _run_daemon(void); + +#ifdef EX_OS_WIN32 +static int service_install() +{ + ex_wstr exec_file(g_env.m_exec_file); + exec_file += L" start"; + + if (EXRV_OK == ex_winsrv_install(EOM_WEB_SERVICE_NAME, EOM_WEB_SERVICE_NAME, exec_file)) + return 0; + else + return 1; +} + +static int service_uninstall() +{ + if (EXRV_OK != ex_winsrv_stop(EOM_WEB_SERVICE_NAME)) + return 1; + + if (EXRV_OK != ex_winsrv_uninstall(EOM_WEB_SERVICE_NAME)) + return 2; + + return 0; +} +#endif + +static bool _process_cmd_line(int argc, wchar_t** argv) +{ + if (argc <= 1) + { + EXLOGE("[tpweb] nothing to do.\n\n"); + return false; + } + + g_run_type = RUN_UNKNOWN; + bool is_py_arg = false; + + if (0 == wcscmp(argv[1], L"--version")) + { + EXLOGV("\nTeleport Web Server, version %ls.\n\n", TP_SERVER_VER); + return false; + } + else if (0 == wcscmp(argv[1], L"--py")) + { + g_run_type = RUN_PY_SCRIPT; + + for (int i = 2; i < argc; ++i) + { + if (is_py_arg) + { + g_py_args.push_back(argv[i]); + continue; + } + + if (0 == wcscmp(argv[i], L"-f")) + { + g_py_main_func = argv[i]; + continue; + } + + if (g_py_script_file.length() == 0) + { + g_py_script_file = argv[i]; + is_py_arg = true; + continue; + } + } + } + else if (0 == wcscmp(argv[1], L"-i")) + { + g_run_type = RUN_INSTALL_SRV; + } + else if (0 == wcscmp(argv[1], L"-u")) + { + g_run_type = RUN_UNINST_SRV; + } + else + { + for (int i = 1; i < argc; ++i) + { + if (is_py_arg) + { + g_py_args.push_back(argv[i]); + continue; + } + if (0 == wcscmp(argv[i], L"start")) + { + g_run_type = RUN_WEB; + is_py_arg = true; + continue; + } + + if (0 == wcscmp(argv[i], L"-d")) + { + g_is_debug = true; + continue; + } + + EXLOGE(L"[tpweb] Unknown option: %ls\n", argv[i]); + return false; + } + } + + if (g_run_type == RUN_UNKNOWN) + { + EXLOGE("[tpweb] nothing to do.\n\n"); + return false; + } + + return true; +} + + +static int _main_loop(void) +{ + PYS_HANDLE pysh = pys_create(); + if (NULL == pysh) + { + EXLOGE("pys_create() failed.\n"); + return 1; + } + PysHandleHolder hh(pysh); + + ex_wstr pysrt_path(g_env.m_exec_path); + if(!ex_path_join(pysrt_path, false, L"pysrt", NULL)) + { + EXLOGE("pysrt not exists.\n"); + return 1; + } + + if (!pys_init_runtime(pysh, g_env.m_exec_file.c_str(), pysrt_path.c_str())) + { + EXLOGE("pys_init_runtime() failed.\n"); + return 1; + } + + // 设置web的路径 + ex_wstr sf_path; + if (g_run_type == RUN_WEB) + { + sf_path = g_env.m_www_path; + + if (!ex_path_join(sf_path, false, L"teleport", L"app", L"eom_main.py", NULL)) + { + EXLOGE(L"[tpweb] invalid path [%ls].\n", sf_path.c_str()); + return 1; + } + + if (ex_is_file_exists(sf_path.c_str())) + { + pys_set_startup_file(pysh, sf_path.c_str()); + } + else + { + EXLOGE(L"[tpweb] teleport web app not found at [%ls].\n", sf_path.c_str()); + return 1; + } + } + else if (g_run_type == RUN_PY_SCRIPT) + { + sf_path = g_env.m_exec_path; + + if (!ex_is_file_exists(g_py_script_file.c_str())) + { + EXLOGE("[tpweb] file not found: [%s].\n", g_py_script_file.c_str()); + return 1; + } + + if (g_py_main_func.length() == 0) + { + pys_set_startup_file(pysh, g_py_script_file.c_str()); + } + else + { + ex_astr file_name; + ex_astr func_name; + ex_wstr2astr(g_py_script_file, file_name); + ex_wstr2astr(g_py_main_func, func_name); + + pys_set_bootstrap_module(pysh, file_name.c_str(), func_name.c_str()); + } + } + + ex_wstrs::const_iterator it = g_py_args.begin(); + for (; it != g_py_args.end(); ++it) + { + pys_add_arg(pysh, it->c_str()); + } + + return pys_run(pysh); +} + +int _app_main(int argc, wchar_t** argv) +{ + if (!_process_cmd_line(argc, argv)) + return 1; + + if (!g_env.init()) + { + EXLOGE("[tpweb] env init failed.\n"); + return 1; + } + +#ifdef EX_DEBUG + EXLOG_LEVEL(EX_LOG_LEVEL_DEBUG); +#endif + + if (g_run_type == RUN_PY_SCRIPT) + { + return _main_loop(); + } +#ifdef EX_OS_WIN32 + else if (g_run_type == RUN_INSTALL_SRV) + { + return service_install(); + } + else if(g_run_type == RUN_UNINST_SRV) + { + return service_uninstall(); + } +#endif + + if (!g_is_debug) + { + if (!_run_daemon()) + { + EXLOGE("[tpweb] can not run in daemon mode.\n"); + return 1; + } + +#ifdef EX_OS_WIN32 + return 0; +#endif + } + + return _main_loop(); +} + + + +#ifdef EX_OS_WIN32 + +// #ifdef EX_DEBUG +// #include +// #endif + +static SERVICE_STATUS g_ServiceStatus = { 0 }; +static SERVICE_STATUS_HANDLE g_hServiceStatusHandle = NULL; +HANDLE g_hWorkerThread = NULL; + +VOID WINAPI service_main(DWORD argc, wchar_t** argv); +void WINAPI service_handler(DWORD fdwControl); + +static DWORD WINAPI service_thread_func(LPVOID lpParam); + +int main() +{ + int ret = 0; + LPWSTR szCmdLine = (LPWSTR)::GetCommandLineW(); //获取命令行参数; + + int _argc = 0; + wchar_t** _argv = ::CommandLineToArgvW(szCmdLine, &_argc); //拆分命令行参数字符串; + + ret = _app_main(_argc, _argv); + + LocalFree(_argv); + _argv = NULL; + + return ret; +} + +static bool _run_daemon(void) +{ + SERVICE_TABLE_ENTRY DispatchTable[2]; + DispatchTable[0].lpServiceName = EOM_WEB_SERVICE_NAME; + DispatchTable[0].lpServiceProc = service_main; + DispatchTable[1].lpServiceName = NULL; + DispatchTable[1].lpServiceProc = NULL; + + if (!StartServiceCtrlDispatcher(DispatchTable)) + { + EXLOGE_WIN("StartServiceCtrlDispatcher()"); + return false; + } + + return true; +} + + +static DWORD WINAPI service_thread_func(LPVOID lpParam) +{ + int ret = _main_loop(); + + // 更新服务状态(如果服务还在运行,将其设置为停止状态) + g_ServiceStatus.dwWin32ExitCode = 0; + g_ServiceStatus.dwCurrentState = SERVICE_STOPPED; + g_ServiceStatus.dwCheckPoint = 0; + g_ServiceStatus.dwWaitHint = 0; + if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus)) + EXLOGE_WIN("SetServiceStatus()"); + + return ret; +} + +static void WINAPI service_handler(DWORD fdwControl) +{ + switch (fdwControl) + { + case SERVICE_CONTROL_STOP: + case SERVICE_CONTROL_SHUTDOWN: + { + if (g_hWorkerThread) + { + TerminateThread(g_hWorkerThread, 1); + g_hWorkerThread = NULL; + } + + g_ServiceStatus.dwWin32ExitCode = 0; + g_ServiceStatus.dwCurrentState = SERVICE_STOPPED; + g_ServiceStatus.dwCheckPoint = 0; + g_ServiceStatus.dwWaitHint = 0; + + }break; + + default: + return; + }; + + if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus)) + { + EXLOGE_WIN("SetServiceStatus(STOP)"); + return; + } +} + +VOID WINAPI service_main(DWORD argc, wchar_t** argv) +{ + g_ServiceStatus.dwServiceType = SERVICE_WIN32; + g_ServiceStatus.dwCurrentState = SERVICE_START_PENDING; + g_ServiceStatus.dwControlsAccepted = SERVICE_ACCEPT_STOP | SERVICE_ACCEPT_SHUTDOWN; + g_ServiceStatus.dwWin32ExitCode = 0; + g_ServiceStatus.dwServiceSpecificExitCode = 0; + g_ServiceStatus.dwCheckPoint = 0; + g_ServiceStatus.dwWaitHint = 0; + g_hServiceStatusHandle = RegisterServiceCtrlHandler(EOM_WEB_SERVICE_NAME, service_handler); + if (g_hServiceStatusHandle == 0) + { + EXLOGE_WIN("RegisterServiceCtrlHandler()"); + return; + } + + DWORD tid = 0; + g_hWorkerThread = CreateThread(NULL, 0, service_thread_func, NULL, 0, &tid); + if (NULL == g_hWorkerThread) + { + EXLOGE_WIN("CreateThread(python)"); + + g_ServiceStatus.dwWin32ExitCode = 0; + g_ServiceStatus.dwCurrentState = SERVICE_STOPPED; + g_ServiceStatus.dwCheckPoint = 0; + g_ServiceStatus.dwWaitHint = 0; + if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus)) + EXLOGE_WIN("SetServiceStatus()"); + + return; + } + + g_ServiceStatus.dwCurrentState = SERVICE_RUNNING; + g_ServiceStatus.dwCheckPoint = 0; + g_ServiceStatus.dwWaitHint = 9000; + if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus)) + { + EXLOGE_WIN("SetServiceStatus()"); + return; + } +} + +#else +// not EX_OS_WIN32 +#include "ts_util.h" +#include +#include + +static void _sig_handler(int signum, siginfo_t* info, void* ptr); +//static int _daemon(int nochdir, int noclose); + +int main(int argc, char** argv) +{ + struct sigaction act; + memset(&act, 0, sizeof(act)); + act.sa_sigaction = _sig_handler; + act.sa_flags = SA_SIGINFO; + sigaction(SIGINT, &act, NULL); + + wchar_t** wargv = ex_make_wargv(argc, argv); + int ret = _app_main(argc, wargv); + + ex_free_wargv(argc, wargv); + + return ret; +} + +void _sig_handler(int signum, siginfo_t* info, void* ptr) +{ + if (signum == SIGINT || signum == SIGTERM) + { + printf("[ts] received signal SIGINT, exit now.\n"); + exit(1); + } +} + +static bool _run_daemon(void) +{ + pid_t pid = fork(); + if (pid < 0) + { + printf("[ERROR] can not fork daemon.\n"); + exit(EXIT_FAILURE); + } + else if (pid > 0) + { + exit(EXIT_SUCCESS); // parent exit. + } + + // now I'm first children. + if (setsid() == -1) + { + printf("setsid() failed.\n"); + assert(0); + exit(EXIT_FAILURE); + } + + umask(0); + + pid = fork(); + if (pid < 0) + { + printf("[ERROR] can not fork daemon.\n"); + exit(EXIT_FAILURE); + } + else if (pid > 0) + { + exit(0); // first children exit. + } + + // now I'm second children. + int ret = chdir("/"); + close(STDIN_FILENO); + + int stdfd = open("/dev/null", O_RDWR); + close(STDOUT_FILENO); + close(STDERR_FILENO); + dup2(stdfd, STDOUT_FILENO); + dup2(stdfd, STDERR_FILENO); + + return true; +} + +#endif diff --git a/server/tp_web/src/res/tp_web.ico b/server/tp_web/src/res/tp_web.ico new file mode 100644 index 0000000..d538609 Binary files /dev/null and b/server/tp_web/src/res/tp_web.ico differ diff --git a/server/tp_web/src/resource.h b/server/tp_web/src/resource.h new file mode 100644 index 0000000..4627916 Binary files /dev/null and b/server/tp_web/src/resource.h differ diff --git a/server/tp_web/src/tp_web.rc b/server/tp_web/src/tp_web.rc new file mode 100644 index 0000000..c0e10b2 Binary files /dev/null and b/server/tp_web/src/tp_web.rc differ diff --git a/server/tp_web/src/tp_web.vs2015.sln b/server/tp_web/src/tp_web.vs2015.sln new file mode 100644 index 0000000..a8e2dc4 --- /dev/null +++ b/server/tp_web/src/tp_web.vs2015.sln @@ -0,0 +1,22 @@ +锘 +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 14 +VisualStudioVersion = 14.0.23107.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tp_web", "tp_web.vs2015.vcxproj", "{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x86 = Debug|x86 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.ActiveCfg = Debug|Win32 + {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.Build.0 = Debug|Win32 + {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.ActiveCfg = Release|Win32 + {6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/server/tp_web/src/tp_web.vs2015.vcxproj b/server/tp_web/src/tp_web.vs2015.vcxproj new file mode 100644 index 0000000..9aaad34 --- /dev/null +++ b/server/tp_web/src/tp_web.vs2015.vcxproj @@ -0,0 +1,210 @@ +锘 + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + {6548CB1D-A7BA-4A68-9B3F-A5129F77868B} + Win32Proj + tp_web + 8.1 + tp_web + + + + Application + true + v140_xp + Unicode + + + Application + false + v140_xp + true + Unicode + + + Application + true + v140 + Unicode + + + Application + false + v140 + true + Unicode + + + + + + + + + + + + + + + + + + + + + true + ..\..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + D:\apps\vld\include;$(IncludePath) + D:\apps\vld\lib\Win32;$(LibraryPath) + + + true + ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + + + false + ..\..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + ..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + + + false + ..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + ..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\ + + + + + + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;LIBSSH_STATIC;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) + true + ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include + MultiThreadedDebug + + + Console + ../../../common/pyshell/pys/lib + libcmt.lib + + + + + + + Level3 + Disabled + _DEBUG;_WINDOWS;%(PreprocessorDefinitions) + true + ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include + + + Windows + true + ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib + + + + + Level3 + + + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;LIBSSH_STATIC;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions) + true + ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include + MultiThreaded + + + Console + true + true + ../../../common/pyshell/pys/lib + + + + + Level3 + + + MaxSpeed + true + true + NDEBUG;_WINDOWS;%(PreprocessorDefinitions) + true + ../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include + + + Windows + true + true + true + ../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/server/tp_web/src/tp_web.vs2015.vcxproj.filters b/server/tp_web/src/tp_web.vs2015.vcxproj.filters new file mode 100644 index 0000000..4aff80b --- /dev/null +++ b/server/tp_web/src/tp_web.vs2015.vcxproj.filters @@ -0,0 +1,129 @@ +锘 + + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + {0155895f-d6be-4e0f-970d-9b6b5c759502} + + + {0da131e6-c187-4632-a82b-c9b84238b97a} + + + {ffe9fc8a-0268-4a71-8681-ab835e44fd83} + + + {f9606240-3c34-4d3d-8623-7913fe36b8b4} + + + {465c4847-7106-4020-ae5f-bcc649ae7ca9} + + + {4a9f6402-c1c7-4c13-a390-794b6ac77697} + + + {5696c8d5-f56a-429d-b058-cbe79a1a17ca} + + + + + main app + + + main app + + + libex\src + + + libex\src + + + libex\src + + + libex\src + + + libex\src + + + libex\src + + + libex\src + + + pyshell\src + + + pyshell\src + + + pyshell\src + + + + + main app + + + + libex\header + + + libex\header + + + libex\header + + + libex\header + + + libex\header + + + libex\header + + + libex\header + + + main app + + + libex\header + + + libex\header + + + libex\header + + + libex\header + + + pyshell\src + + + pyshell\src + + + pyshell\header + + + + + Resource Files + + + + + Resource Files + + + \ No newline at end of file diff --git a/server/tp_web/src/ts_env.cpp b/server/tp_web/src/ts_env.cpp new file mode 100644 index 0000000..76b625b --- /dev/null +++ b/server/tp_web/src/ts_env.cpp @@ -0,0 +1,94 @@ +#include "ts_env.h" + +TsEnv g_env; + +TsEnv::TsEnv() +{} + +TsEnv::~TsEnv() +{} + +bool TsEnv::init(void) +{ + EXLOG_LEVEL(EX_LOG_LEVEL_INFO); + + ex_exec_file(m_exec_file); + + m_exec_path = m_exec_file; + ex_dirname(m_exec_path); + + // 默认情况下,以上三个目录均位于本可执行程序的 ../ 相对位置, + // 如果不存在,则可能是开发调试模式,则尝试从源代码仓库根目录下的share目录中查找。 + ex_wstr base_path = m_exec_path; + ex_path_join(base_path, true, L"..", NULL); + + ex_wstr conf_file = base_path; + ex_path_join(conf_file, false, L"etc", L"web.conf", NULL); + + if (ex_is_file_exists(conf_file.c_str())) + { + m_www_path = base_path; + ex_path_join(conf_file, false, L"www", NULL); + } + else + { + EXLOGW("===== DEVELOPMENT MODE =====\n"); + base_path = m_exec_path; + ex_path_join(base_path, true, L"..", L"..", L"..", L"..", L"server", L"share", NULL); + + conf_file = base_path; + ex_path_join(conf_file, false, L"etc", L"web.conf", NULL); + + m_www_path = m_exec_path; + ex_path_join(m_www_path, true, L"..", L"..", L"..", L"..", L"server", L"www", NULL); + } + + if (!ex_is_file_exists(conf_file.c_str())) + { + EXLOGE("[tpweb] web.conf not found.\n"); + return false; + } + + ExIniFile cfg; + if (!cfg.LoadFromFile(conf_file)) + { + EXLOGE("[tpweb] can not load web.conf.\n"); + return false; + } + + ex_wstr log_file; + ExIniSection* ps = cfg.GetDumySection(); + if (!ps->GetStr(L"log_file", log_file)) + { + ex_wstr log_path = base_path; + ex_path_join(log_path, false, _T("log"), NULL); + EXLOG_FILE(L"tpweb.log", log_path.c_str()); + } + else + { + ex_remove_white_space(log_file); + if (log_file[0] == L'"' || log_file[0] == L'\'') + log_file.erase(0, 1); + if (log_file[ log_file.length() - 1 ] == L'"' || log_file[log_file.length() - 1] == L'\'') + log_file.erase(log_file.length() - 1, 1); + + ex_wstr log_path = log_file; + ex_dirname(log_path); + ex_wstr file_name; + file_name.assign(log_file, log_path.length() + 1, log_file.length()); + + EXLOG_FILE(file_name.c_str(), log_path.c_str()); + } + + int log_level = EX_LOG_LEVEL_INFO; + if (ps->GetInt(L"log_level", log_level)) + { + EXLOGV("[tpweb] log-level: %d\n", log_level); + EXLOG_LEVEL(log_level); + } + + EXLOGI("==============================\n"); + EXLOGI("[tpweb] start...\n"); + + return true; +} diff --git a/server/tp_web/src/ts_env.h b/server/tp_web/src/ts_env.h new file mode 100644 index 0000000..7b88284 --- /dev/null +++ b/server/tp_web/src/ts_env.h @@ -0,0 +1,23 @@ +#ifndef __TS_ENV_H__ +#define __TS_ENV_H__ + +//#include "ts_common.h" +#include + +class TsEnv +{ +public: + TsEnv(); + ~TsEnv(); + + bool init(void); + +public: + ex_wstr m_exec_file; + ex_wstr m_exec_path; + ex_wstr m_www_path; +}; + +extern TsEnv g_env; + +#endif // __TS_ENV_H__ diff --git a/server/tp_web/src/ts_ver.h b/server/tp_web/src/ts_ver.h new file mode 100644 index 0000000..d7f7ba3 --- /dev/null +++ b/server/tp_web/src/ts_ver.h @@ -0,0 +1,6 @@ +#ifndef __TS_SERVER_VER_H__ +#define __TS_SERVER_VER_H__ + +#define TP_SERVER_VER L"1.6.225.1" + +#endif // __TS_SERVER_VER_H__ diff --git a/server/www/packages/packages-common/backports_abc.py b/server/www/packages/packages-common/backports_abc.py new file mode 100644 index 0000000..c48b7b0 --- /dev/null +++ b/server/www/packages/packages-common/backports_abc.py @@ -0,0 +1,202 @@ +""" +Patch recently added ABCs into the standard lib module +``collections.abc`` (Py3) or ``collections`` (Py2). + +Usage:: + + import backports_abc + backports_abc.patch() + +or:: + + try: + from collections.abc import Generator + except ImportError: + from backports_abc import Generator +""" + +try: + import collections.abc as _collections_abc +except ImportError: + import collections as _collections_abc + + +def mk_gen(): + from abc import abstractmethod + + required_methods = ( + '__iter__', '__next__' if hasattr(iter(()), '__next__') else 'next', + 'send', 'throw', 'close') + + class Generator(_collections_abc.Iterator): + __slots__ = () + + if '__next__' in required_methods: + def __next__(self): + return self.send(None) + else: + def next(self): + return self.send(None) + + @abstractmethod + def send(self, value): + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError('generator ignored GeneratorExit') + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in required_methods: + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + generator = type((lambda: (yield))()) + Generator.register(generator) + return Generator + + +def mk_awaitable(): + from abc import abstractmethod, ABCMeta + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if '__await__' in B.__dict__: + if B.__dict__['__await__']: + return True + break + return NotImplemented + + # calling metaclass directly as syntax differs in Py2/Py3 + Awaitable = ABCMeta('Awaitable', (), { + '__slots__': (), + '__await__': __await__, + '__subclasshook__': __subclasshook__, + }) + + return Awaitable + + +def mk_coroutine(): + from abc import abstractmethod + + class Coroutine(Awaitable): + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError('coroutine ignored GeneratorExit') + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + return Coroutine + + +### +# make all ABCs available in this module + +try: + Generator = _collections_abc.Generator +except AttributeError: + Generator = mk_gen() + +try: + Awaitable = _collections_abc.Awaitable +except AttributeError: + Awaitable = mk_awaitable() + +try: + Coroutine = _collections_abc.Coroutine +except AttributeError: + Coroutine = mk_coroutine() + +try: + from inspect import isawaitable +except ImportError: + def isawaitable(obj): + return isinstance(obj, Awaitable) + + +### +# allow patching the stdlib + +PATCHED = {} + + +def patch(patch_inspect=True): + """ + Main entry point for patching the ``collections.abc`` and ``inspect`` + standard library modules. + """ + PATCHED['collections.abc.Generator'] = _collections_abc.Generator = Generator + PATCHED['collections.abc.Coroutine'] = _collections_abc.Coroutine = Coroutine + PATCHED['collections.abc.Awaitable'] = _collections_abc.Awaitable = Awaitable + + if patch_inspect: + import inspect + PATCHED['inspect.isawaitable'] = inspect.isawaitable = isawaitable diff --git a/server/www/packages/packages-common/mako/__init__.py b/server/www/packages/packages-common/mako/__init__.py new file mode 100644 index 0000000..74526ec --- /dev/null +++ b/server/www/packages/packages-common/mako/__init__.py @@ -0,0 +1,8 @@ +# mako/__init__.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +__version__ = '1.0.3' diff --git a/server/www/packages/packages-common/mako/_ast_util.py b/server/www/packages/packages-common/mako/_ast_util.py new file mode 100644 index 0000000..cc298d5 --- /dev/null +++ b/server/www/packages/packages-common/mako/_ast_util.py @@ -0,0 +1,851 @@ +# mako/_ast_util.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + ast + ~~~ + + The `ast` module helps Python applications to process trees of the Python + abstract syntax grammar. The abstract syntax itself might change with + each Python release; this module helps to find out programmatically what + the current grammar looks like and allows modifications of it. + + An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as + a flag to the `compile()` builtin function or by using the `parse()` + function from this module. The result will be a tree of objects whose + classes all inherit from `ast.AST`. + + A modified abstract syntax tree can be compiled into a Python code object + using the built-in `compile()` function. + + Additionally various helper functions are provided that make working with + the trees simpler. The main intention of the helper functions and this + module in general is to provide an easy to use interface for libraries + that work tightly with the python syntax (template engines for example). + + + :copyright: Copyright 2008 by Armin Ronacher. + :license: Python License. +""" +from _ast import * # noqa +from mako.compat import arg_stringname + +BOOLOP_SYMBOLS = { + And: 'and', + Or: 'or' +} + +BINOP_SYMBOLS = { + Add: '+', + Sub: '-', + Mult: '*', + Div: '/', + FloorDiv: '//', + Mod: '%', + LShift: '<<', + RShift: '>>', + BitOr: '|', + BitAnd: '&', + BitXor: '^' +} + +CMPOP_SYMBOLS = { + Eq: '==', + Gt: '>', + GtE: '>=', + In: 'in', + Is: 'is', + IsNot: 'is not', + Lt: '<', + LtE: '<=', + NotEq: '!=', + NotIn: 'not in' +} + +UNARYOP_SYMBOLS = { + Invert: '~', + Not: 'not', + UAdd: '+', + USub: '-' +} + +ALL_SYMBOLS = {} +ALL_SYMBOLS.update(BOOLOP_SYMBOLS) +ALL_SYMBOLS.update(BINOP_SYMBOLS) +ALL_SYMBOLS.update(CMPOP_SYMBOLS) +ALL_SYMBOLS.update(UNARYOP_SYMBOLS) + + +def parse(expr, filename='', mode='exec'): + """Parse an expression into an AST node.""" + return compile(expr, filename, mode, PyCF_ONLY_AST) + + +def to_source(node, indent_with=' ' * 4): + """ + This function can convert a node tree back into python sourcecode. This + is useful for debugging purposes, especially if you're dealing with custom + asts not generated by python itself. + + It could be that the sourcecode is evaluable when the AST itself is not + compilable / evaluable. The reason for this is that the AST contains some + more data than regular sourcecode does, which is dropped during + conversion. + + Each level of indentation is replaced with `indent_with`. Per default this + parameter is equal to four spaces as suggested by PEP 8, but it might be + adjusted to match the application's styleguide. + """ + generator = SourceGenerator(indent_with) + generator.visit(node) + return ''.join(generator.result) + + +def dump(node): + """ + A very verbose representation of the node passed. This is useful for + debugging purposes. + """ + def _format(node): + if isinstance(node, AST): + return '%s(%s)' % (node.__class__.__name__, + ', '.join('%s=%s' % (a, _format(b)) + for a, b in iter_fields(node))) + elif isinstance(node, list): + return '[%s]' % ', '.join(_format(x) for x in node) + return repr(node) + if not isinstance(node, AST): + raise TypeError('expected AST, got %r' % node.__class__.__name__) + return _format(node) + + +def copy_location(new_node, old_node): + """ + Copy the source location hint (`lineno` and `col_offset`) from the + old to the new node if possible and return the new one. + """ + for attr in 'lineno', 'col_offset': + if attr in old_node._attributes and attr in new_node._attributes \ + and hasattr(old_node, attr): + setattr(new_node, attr, getattr(old_node, attr)) + return new_node + + +def fix_missing_locations(node): + """ + Some nodes require a line number and the column offset. Without that + information the compiler will abort the compilation. Because it can be + a dull task to add appropriate line numbers and column offsets when + adding new nodes this function can help. It copies the line number and + column offset of the parent node to the child nodes without this + information. + + Unlike `copy_location` this works recursive and won't touch nodes that + already have a location information. + """ + def _fix(node, lineno, col_offset): + if 'lineno' in node._attributes: + if not hasattr(node, 'lineno'): + node.lineno = lineno + else: + lineno = node.lineno + if 'col_offset' in node._attributes: + if not hasattr(node, 'col_offset'): + node.col_offset = col_offset + else: + col_offset = node.col_offset + for child in iter_child_nodes(node): + _fix(child, lineno, col_offset) + _fix(node, 1, 0) + return node + + +def increment_lineno(node, n=1): + """ + Increment the line numbers of all nodes by `n` if they have line number + attributes. This is useful to "move code" to a different location in a + file. + """ + for node in zip((node,), walk(node)): + if 'lineno' in node._attributes: + node.lineno = getattr(node, 'lineno', 0) + n + + +def iter_fields(node): + """Iterate over all fields of a node, only yielding existing fields.""" + # CPython 2.5 compat + if not hasattr(node, '_fields') or not node._fields: + return + for field in node._fields: + try: + yield field, getattr(node, field) + except AttributeError: + pass + + +def get_fields(node): + """Like `iter_fiels` but returns a dict.""" + return dict(iter_fields(node)) + + +def iter_child_nodes(node): + """Iterate over all child nodes or a node.""" + for name, field in iter_fields(node): + if isinstance(field, AST): + yield field + elif isinstance(field, list): + for item in field: + if isinstance(item, AST): + yield item + + +def get_child_nodes(node): + """Like `iter_child_nodes` but returns a list.""" + return list(iter_child_nodes(node)) + + +def get_compile_mode(node): + """ + Get the mode for `compile` of a given node. If the node is not a `mod` + node (`Expression`, `Module` etc.) a `TypeError` is thrown. + """ + if not isinstance(node, mod): + raise TypeError('expected mod node, got %r' % node.__class__.__name__) + return { + Expression: 'eval', + Interactive: 'single' + }.get(node.__class__, 'expr') + + +def get_docstring(node): + """ + Return the docstring for the given node or `None` if no docstring can be + found. If the node provided does not accept docstrings a `TypeError` + will be raised. + """ + if not isinstance(node, (FunctionDef, ClassDef, Module)): + raise TypeError("%r can't have docstrings" % node.__class__.__name__) + if node.body and isinstance(node.body[0], Str): + return node.body[0].s + + +def walk(node): + """ + Iterate over all nodes. This is useful if you only want to modify nodes in + place and don't care about the context or the order the nodes are returned. + """ + from collections import deque + todo = deque([node]) + while todo: + node = todo.popleft() + todo.extend(iter_child_nodes(node)) + yield node + + +class NodeVisitor(object): + + """ + Walks the abstract syntax tree and call visitor functions for every node + found. The visitor functions may return values which will be forwarded + by the `visit` method. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `get_visitor` function. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + + Don't use the `NodeVisitor` if you want to apply changes to nodes during + traversing. For this a special visitor exists (`NodeTransformer`) that + allows modifications. + """ + + def get_visitor(self, node): + """ + Return the visitor function for this node or `None` if no visitor + exists for this node. In that case the generic visit function is + used instead. + """ + method = 'visit_' + node.__class__.__name__ + return getattr(self, method, None) + + def visit(self, node): + """Visit a node.""" + f = self.get_visitor(node) + if f is not None: + return f(node) + return self.generic_visit(node) + + def generic_visit(self, node): + """Called if no explicit visitor function exists for a node.""" + for field, value in iter_fields(node): + if isinstance(value, list): + for item in value: + if isinstance(item, AST): + self.visit(item) + elif isinstance(value, AST): + self.visit(value) + + +class NodeTransformer(NodeVisitor): + + """ + Walks the abstract syntax tree and allows modifications of nodes. + + The `NodeTransformer` will walk the AST and use the return value of the + visitor functions to replace or remove the old node. If the return + value of the visitor function is `None` the node will be removed + from the previous location otherwise it's replaced with the return + value. The return value may be the original node in which case no + replacement takes place. + + Here an example transformer that rewrites all `foo` to `data['foo']`:: + + class RewriteName(NodeTransformer): + + def visit_Name(self, node): + return copy_location(Subscript( + value=Name(id='data', ctx=Load()), + slice=Index(value=Str(s=node.id)), + ctx=node.ctx + ), node) + + Keep in mind that if the node you're operating on has child nodes + you must either transform the child nodes yourself or call the generic + visit function for the node first. + + Nodes that were part of a collection of statements (that applies to + all statement nodes) may also return a list of nodes rather than just + a single node. + + Usually you use the transformer like this:: + + node = YourTransformer().visit(node) + """ + + def generic_visit(self, node): + for field, old_value in iter_fields(node): + old_value = getattr(node, field, None) + if isinstance(old_value, list): + new_values = [] + for value in old_value: + if isinstance(value, AST): + value = self.visit(value) + if value is None: + continue + elif not isinstance(value, AST): + new_values.extend(value) + continue + new_values.append(value) + old_value[:] = new_values + elif isinstance(old_value, AST): + new_node = self.visit(old_value) + if new_node is None: + delattr(node, field) + else: + setattr(node, field, new_node) + return node + + +class SourceGenerator(NodeVisitor): + + """ + This visitor is able to transform a well formed syntax tree into python + sourcecode. For more details have a look at the docstring of the + `node_to_source` function. + """ + + def __init__(self, indent_with): + self.result = [] + self.indent_with = indent_with + self.indentation = 0 + self.new_lines = 0 + + def write(self, x): + if self.new_lines: + if self.result: + self.result.append('\n' * self.new_lines) + self.result.append(self.indent_with * self.indentation) + self.new_lines = 0 + self.result.append(x) + + def newline(self, n=1): + self.new_lines = max(self.new_lines, n) + + def body(self, statements): + self.new_line = True + self.indentation += 1 + for stmt in statements: + self.visit(stmt) + self.indentation -= 1 + + def body_or_else(self, node): + self.body(node.body) + if node.orelse: + self.newline() + self.write('else:') + self.body(node.orelse) + + def signature(self, node): + want_comma = [] + + def write_comma(): + if want_comma: + self.write(', ') + else: + want_comma.append(True) + + padding = [None] * (len(node.args) - len(node.defaults)) + for arg, default in zip(node.args, padding + node.defaults): + write_comma() + self.visit(arg) + if default is not None: + self.write('=') + self.visit(default) + if node.vararg is not None: + write_comma() + self.write('*' + arg_stringname(node.vararg)) + if node.kwarg is not None: + write_comma() + self.write('**' + arg_stringname(node.kwarg)) + + def decorators(self, node): + for decorator in node.decorator_list: + self.newline() + self.write('@') + self.visit(decorator) + + # Statements + + def visit_Assign(self, node): + self.newline() + for idx, target in enumerate(node.targets): + if idx: + self.write(', ') + self.visit(target) + self.write(' = ') + self.visit(node.value) + + def visit_AugAssign(self, node): + self.newline() + self.visit(node.target) + self.write(BINOP_SYMBOLS[type(node.op)] + '=') + self.visit(node.value) + + def visit_ImportFrom(self, node): + self.newline() + self.write('from %s%s import ' % ('.' * node.level, node.module)) + for idx, item in enumerate(node.names): + if idx: + self.write(', ') + self.write(item) + + def visit_Import(self, node): + self.newline() + for item in node.names: + self.write('import ') + self.visit(item) + + def visit_Expr(self, node): + self.newline() + self.generic_visit(node) + + def visit_FunctionDef(self, node): + self.newline(n=2) + self.decorators(node) + self.newline() + self.write('def %s(' % node.name) + self.signature(node.args) + self.write('):') + self.body(node.body) + + def visit_ClassDef(self, node): + have_args = [] + + def paren_or_comma(): + if have_args: + self.write(', ') + else: + have_args.append(True) + self.write('(') + + self.newline(n=3) + self.decorators(node) + self.newline() + self.write('class %s' % node.name) + for base in node.bases: + paren_or_comma() + self.visit(base) + # XXX: the if here is used to keep this module compatible + # with python 2.6. + if hasattr(node, 'keywords'): + for keyword in node.keywords: + paren_or_comma() + self.write(keyword.arg + '=') + self.visit(keyword.value) + if getattr(node, "starargs", None): + paren_or_comma() + self.write('*') + self.visit(node.starargs) + if getattr(node, "kwargs", None): + paren_or_comma() + self.write('**') + self.visit(node.kwargs) + self.write(have_args and '):' or ':') + self.body(node.body) + + def visit_If(self, node): + self.newline() + self.write('if ') + self.visit(node.test) + self.write(':') + self.body(node.body) + while True: + else_ = node.orelse + if len(else_) == 1 and isinstance(else_[0], If): + node = else_[0] + self.newline() + self.write('elif ') + self.visit(node.test) + self.write(':') + self.body(node.body) + else: + self.newline() + self.write('else:') + self.body(else_) + break + + def visit_For(self, node): + self.newline() + self.write('for ') + self.visit(node.target) + self.write(' in ') + self.visit(node.iter) + self.write(':') + self.body_or_else(node) + + def visit_While(self, node): + self.newline() + self.write('while ') + self.visit(node.test) + self.write(':') + self.body_or_else(node) + + def visit_With(self, node): + self.newline() + self.write('with ') + self.visit(node.context_expr) + if node.optional_vars is not None: + self.write(' as ') + self.visit(node.optional_vars) + self.write(':') + self.body(node.body) + + def visit_Pass(self, node): + self.newline() + self.write('pass') + + def visit_Print(self, node): + # XXX: python 2.6 only + self.newline() + self.write('print ') + want_comma = False + if node.dest is not None: + self.write(' >> ') + self.visit(node.dest) + want_comma = True + for value in node.values: + if want_comma: + self.write(', ') + self.visit(value) + want_comma = True + if not node.nl: + self.write(',') + + def visit_Delete(self, node): + self.newline() + self.write('del ') + for idx, target in enumerate(node): + if idx: + self.write(', ') + self.visit(target) + + def visit_TryExcept(self, node): + self.newline() + self.write('try:') + self.body(node.body) + for handler in node.handlers: + self.visit(handler) + + def visit_TryFinally(self, node): + self.newline() + self.write('try:') + self.body(node.body) + self.newline() + self.write('finally:') + self.body(node.finalbody) + + def visit_Global(self, node): + self.newline() + self.write('global ' + ', '.join(node.names)) + + def visit_Nonlocal(self, node): + self.newline() + self.write('nonlocal ' + ', '.join(node.names)) + + def visit_Return(self, node): + self.newline() + self.write('return ') + self.visit(node.value) + + def visit_Break(self, node): + self.newline() + self.write('break') + + def visit_Continue(self, node): + self.newline() + self.write('continue') + + def visit_Raise(self, node): + # XXX: Python 2.6 / 3.0 compatibility + self.newline() + self.write('raise') + if hasattr(node, 'exc') and node.exc is not None: + self.write(' ') + self.visit(node.exc) + if node.cause is not None: + self.write(' from ') + self.visit(node.cause) + elif hasattr(node, 'type') and node.type is not None: + self.visit(node.type) + if node.inst is not None: + self.write(', ') + self.visit(node.inst) + if node.tback is not None: + self.write(', ') + self.visit(node.tback) + + # Expressions + + def visit_Attribute(self, node): + self.visit(node.value) + self.write('.' + node.attr) + + def visit_Call(self, node): + want_comma = [] + + def write_comma(): + if want_comma: + self.write(', ') + else: + want_comma.append(True) + + self.visit(node.func) + self.write('(') + for arg in node.args: + write_comma() + self.visit(arg) + for keyword in node.keywords: + write_comma() + self.write(keyword.arg + '=') + self.visit(keyword.value) + if getattr(node, "starargs", None): + write_comma() + self.write('*') + self.visit(node.starargs) + if getattr(node, "kwargs", None): + write_comma() + self.write('**') + self.visit(node.kwargs) + self.write(')') + + def visit_Name(self, node): + self.write(node.id) + + def visit_NameConstant(self, node): + self.write(str(node.value)) + + def visit_arg(self, node): + self.write(node.arg) + + def visit_Str(self, node): + self.write(repr(node.s)) + + def visit_Bytes(self, node): + self.write(repr(node.s)) + + def visit_Num(self, node): + self.write(repr(node.n)) + + def visit_Tuple(self, node): + self.write('(') + idx = -1 + for idx, item in enumerate(node.elts): + if idx: + self.write(', ') + self.visit(item) + self.write(idx and ')' or ',)') + + def sequence_visit(left, right): + def visit(self, node): + self.write(left) + for idx, item in enumerate(node.elts): + if idx: + self.write(', ') + self.visit(item) + self.write(right) + return visit + + visit_List = sequence_visit('[', ']') + visit_Set = sequence_visit('{', '}') + del sequence_visit + + def visit_Dict(self, node): + self.write('{') + for idx, (key, value) in enumerate(zip(node.keys, node.values)): + if idx: + self.write(', ') + self.visit(key) + self.write(': ') + self.visit(value) + self.write('}') + + def visit_BinOp(self, node): + self.write('(') + self.visit(node.left) + self.write(' %s ' % BINOP_SYMBOLS[type(node.op)]) + self.visit(node.right) + self.write(')') + + def visit_BoolOp(self, node): + self.write('(') + for idx, value in enumerate(node.values): + if idx: + self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)]) + self.visit(value) + self.write(')') + + def visit_Compare(self, node): + self.write('(') + self.visit(node.left) + for op, right in zip(node.ops, node.comparators): + self.write(' %s ' % CMPOP_SYMBOLS[type(op)]) + self.visit(right) + self.write(')') + + def visit_UnaryOp(self, node): + self.write('(') + op = UNARYOP_SYMBOLS[type(node.op)] + self.write(op) + if op == 'not': + self.write(' ') + self.visit(node.operand) + self.write(')') + + def visit_Subscript(self, node): + self.visit(node.value) + self.write('[') + self.visit(node.slice) + self.write(']') + + def visit_Slice(self, node): + if node.lower is not None: + self.visit(node.lower) + self.write(':') + if node.upper is not None: + self.visit(node.upper) + if node.step is not None: + self.write(':') + if not (isinstance(node.step, Name) and node.step.id == 'None'): + self.visit(node.step) + + def visit_ExtSlice(self, node): + for idx, item in node.dims: + if idx: + self.write(', ') + self.visit(item) + + def visit_Yield(self, node): + self.write('yield ') + self.visit(node.value) + + def visit_Lambda(self, node): + self.write('lambda ') + self.signature(node.args) + self.write(': ') + self.visit(node.body) + + def visit_Ellipsis(self, node): + self.write('Ellipsis') + + def generator_visit(left, right): + def visit(self, node): + self.write(left) + self.visit(node.elt) + for comprehension in node.generators: + self.visit(comprehension) + self.write(right) + return visit + + visit_ListComp = generator_visit('[', ']') + visit_GeneratorExp = generator_visit('(', ')') + visit_SetComp = generator_visit('{', '}') + del generator_visit + + def visit_DictComp(self, node): + self.write('{') + self.visit(node.key) + self.write(': ') + self.visit(node.value) + for comprehension in node.generators: + self.visit(comprehension) + self.write('}') + + def visit_IfExp(self, node): + self.visit(node.body) + self.write(' if ') + self.visit(node.test) + self.write(' else ') + self.visit(node.orelse) + + def visit_Starred(self, node): + self.write('*') + self.visit(node.value) + + def visit_Repr(self, node): + # XXX: python 2.6 only + self.write('`') + self.visit(node.value) + self.write('`') + + # Helper Nodes + + def visit_alias(self, node): + self.write(node.name) + if node.asname is not None: + self.write(' as ' + node.asname) + + def visit_comprehension(self, node): + self.write(' for ') + self.visit(node.target) + self.write(' in ') + self.visit(node.iter) + if node.ifs: + for if_ in node.ifs: + self.write(' if ') + self.visit(if_) + + def visit_excepthandler(self, node): + self.newline() + self.write('except') + if node.type is not None: + self.write(' ') + self.visit(node.type) + if node.name is not None: + self.write(' as ') + self.visit(node.name) + self.write(':') + self.body(node.body) diff --git a/server/www/packages/packages-common/mako/ast.py b/server/www/packages/packages-common/mako/ast.py new file mode 100644 index 0000000..c55b29c --- /dev/null +++ b/server/www/packages/packages-common/mako/ast.py @@ -0,0 +1,191 @@ +# mako/ast.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for analyzing expressions and blocks of Python +code, as well as generating Python from AST nodes""" + +from mako import exceptions, pyparser, compat +import re + + +class PythonCode(object): + + """represents information about a string containing Python code""" + + def __init__(self, code, **exception_kwargs): + self.code = code + + # represents all identifiers which are assigned to at some point in + # the code + self.declared_identifiers = set() + + # represents all identifiers which are referenced before their + # assignment, if any + self.undeclared_identifiers = set() + + # note that an identifier can be in both the undeclared and declared + # lists. + + # using AST to parse instead of using code.co_varnames, + # code.co_names has several advantages: + # - we can locate an identifier as "undeclared" even if + # its declared later in the same block of code + # - AST is less likely to break with version changes + # (for example, the behavior of co_names changed a little bit + # in python version 2.5) + if isinstance(code, compat.string_types): + expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs) + else: + expr = code + + f = pyparser.FindIdentifiers(self, **exception_kwargs) + f.visit(expr) + + +class ArgumentList(object): + + """parses a fragment of code as a comma-separated list of expressions""" + + def __init__(self, code, **exception_kwargs): + self.codeargs = [] + self.args = [] + self.declared_identifiers = set() + self.undeclared_identifiers = set() + if isinstance(code, compat.string_types): + if re.match(r"\S", code) and not re.match(r",\s*$", code): + # if theres text and no trailing comma, insure its parsed + # as a tuple by adding a trailing comma + code += "," + expr = pyparser.parse(code, "exec", **exception_kwargs) + else: + expr = code + + f = pyparser.FindTuple(self, PythonCode, **exception_kwargs) + f.visit(expr) + + +class PythonFragment(PythonCode): + + """extends PythonCode to provide identifier lookups in partial control + statements + + e.g. + for x in 5: + elif y==9: + except (MyException, e): + etc. + """ + + def __init__(self, code, **exception_kwargs): + m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S) + if not m: + raise exceptions.CompileException( + "Fragment '%s' is not a partial control statement" % + code, **exception_kwargs) + if m.group(3): + code = code[:m.start(3)] + (keyword, expr) = m.group(1, 2) + if keyword in ['for', 'if', 'while']: + code = code + "pass" + elif keyword == 'try': + code = code + "pass\nexcept:pass" + elif keyword == 'elif' or keyword == 'else': + code = "if False:pass\n" + code + "pass" + elif keyword == 'except': + code = "try:pass\n" + code + "pass" + elif keyword == 'with': + code = code + "pass" + else: + raise exceptions.CompileException( + "Unsupported control keyword: '%s'" % + keyword, **exception_kwargs) + super(PythonFragment, self).__init__(code, **exception_kwargs) + + +class FunctionDecl(object): + + """function declaration""" + + def __init__(self, code, allow_kwargs=True, **exception_kwargs): + self.code = code + expr = pyparser.parse(code, "exec", **exception_kwargs) + + f = pyparser.ParseFunc(self, **exception_kwargs) + f.visit(expr) + if not hasattr(self, 'funcname'): + raise exceptions.CompileException( + "Code '%s' is not a function declaration" % code, + **exception_kwargs) + if not allow_kwargs and self.kwargs: + raise exceptions.CompileException( + "'**%s' keyword argument not allowed here" % + self.kwargnames[-1], **exception_kwargs) + + def get_argument_expressions(self, as_call=False): + """Return the argument declarations of this FunctionDecl as a printable + list. + + By default the return value is appropriate for writing in a ``def``; + set `as_call` to true to build arguments to be passed to the function + instead (assuming locals with the same names as the arguments exist). + """ + + namedecls = [] + + # Build in reverse order, since defaults and slurpy args come last + argnames = self.argnames[::-1] + kwargnames = self.kwargnames[::-1] + defaults = self.defaults[::-1] + kwdefaults = self.kwdefaults[::-1] + + # Named arguments + if self.kwargs: + namedecls.append("**" + kwargnames.pop(0)) + + for name in kwargnames: + # Keyword-only arguments must always be used by name, so even if + # this is a call, print out `foo=foo` + if as_call: + namedecls.append("%s=%s" % (name, name)) + elif kwdefaults: + default = kwdefaults.pop(0) + if default is None: + # The AST always gives kwargs a default, since you can do + # `def foo(*, a=1, b, c=3)` + namedecls.append(name) + else: + namedecls.append("%s=%s" % ( + name, pyparser.ExpressionGenerator(default).value())) + else: + namedecls.append(name) + + # Positional arguments + if self.varargs: + namedecls.append("*" + argnames.pop(0)) + + for name in argnames: + if as_call or not defaults: + namedecls.append(name) + else: + default = defaults.pop(0) + namedecls.append("%s=%s" % ( + name, pyparser.ExpressionGenerator(default).value())) + + namedecls.reverse() + return namedecls + + @property + def allargnames(self): + return tuple(self.argnames) + tuple(self.kwargnames) + + +class FunctionArgs(FunctionDecl): + + """the argument portion of a function declaration""" + + def __init__(self, code, **kwargs): + super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, + **kwargs) diff --git a/server/www/packages/packages-common/mako/cache.py b/server/www/packages/packages-common/mako/cache.py new file mode 100644 index 0000000..c7aabd2 --- /dev/null +++ b/server/www/packages/packages-common/mako/cache.py @@ -0,0 +1,240 @@ +# mako/cache.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import compat, util + +_cache_plugins = util.PluginLoader("mako.cache") + +register_plugin = _cache_plugins.register +register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl") + + +class Cache(object): + + """Represents a data content cache made available to the module + space of a specific :class:`.Template` object. + + .. versionadded:: 0.6 + :class:`.Cache` by itself is mostly a + container for a :class:`.CacheImpl` object, which implements + a fixed API to provide caching services; specific subclasses exist to + implement different + caching strategies. Mako includes a backend that works with + the Beaker caching system. Beaker itself then supports + a number of backends (i.e. file, memory, memcached, etc.) + + The construction of a :class:`.Cache` is part of the mechanics + of a :class:`.Template`, and programmatic access to this + cache is typically via the :attr:`.Template.cache` attribute. + + """ + + impl = None + """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`. + + This accessor allows a :class:`.CacheImpl` with additional + methods beyond that of :class:`.Cache` to be used programmatically. + + """ + + id = None + """Return the 'id' that identifies this cache. + + This is a value that should be globally unique to the + :class:`.Template` associated with this cache, and can + be used by a caching system to name a local container + for data specific to this template. + + """ + + starttime = None + """Epochal time value for when the owning :class:`.Template` was + first compiled. + + A cache implementation may wish to invalidate data earlier than + this timestamp; this has the effect of the cache for a specific + :class:`.Template` starting clean any time the :class:`.Template` + is recompiled, such as when the original template file changed on + the filesystem. + + """ + + def __init__(self, template, *args): + # check for a stale template calling the + # constructor + if isinstance(template, compat.string_types) and args: + return + self.template = template + self.id = template.module.__name__ + self.starttime = template.module._modified_time + self._def_regions = {} + self.impl = self._load_impl(self.template.cache_impl) + + def _load_impl(self, name): + return _cache_plugins.load(name)(self) + + def get_or_create(self, key, creation_function, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value.""" + + return self._ctx_get_or_create(key, creation_function, None, **kw) + + def _ctx_get_or_create(self, key, creation_function, context, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value.""" + + if not self.template.cache_enabled: + return creation_function() + + return self.impl.get_or_create( + key, + creation_function, + **self._get_cache_kw(kw, context)) + + def set(self, key, value, **kw): + """Place a value in the cache. + + :param key: the value's key. + :param value: the value. + :param \**kw: cache configuration arguments. + + """ + + self.impl.set(key, value, **self._get_cache_kw(kw, None)) + + put = set + """A synonym for :meth:`.Cache.set`. + + This is here for backwards compatibility. + + """ + + def get(self, key, **kw): + """Retrieve a value from the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. The + backend is configured using these arguments upon first request. + Subsequent requests that use the same series of configuration + values will use that same backend. + + """ + return self.impl.get(key, **self._get_cache_kw(kw, None)) + + def invalidate(self, key, **kw): + """Invalidate a value in the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. The + backend is configured using these arguments upon first request. + Subsequent requests that use the same series of configuration + values will use that same backend. + + """ + self.impl.invalidate(key, **self._get_cache_kw(kw, None)) + + def invalidate_body(self): + """Invalidate the cached content of the "body" method for this + template. + + """ + self.invalidate('render_body', __M_defname='render_body') + + def invalidate_def(self, name): + """Invalidate the cached content of a particular ``<%def>`` within this + template. + + """ + + self.invalidate('render_%s' % name, __M_defname='render_%s' % name) + + def invalidate_closure(self, name): + """Invalidate a nested ``<%def>`` within this template. + + Caching of nested defs is a blunt tool as there is no + management of scope -- nested defs that use cache tags + need to have names unique of all other nested defs in the + template, else their content will be overwritten by + each other. + + """ + + self.invalidate(name, __M_defname=name) + + def _get_cache_kw(self, kw, context): + defname = kw.pop('__M_defname', None) + if not defname: + tmpl_kw = self.template.cache_args.copy() + tmpl_kw.update(kw) + elif defname in self._def_regions: + tmpl_kw = self._def_regions[defname] + else: + tmpl_kw = self.template.cache_args.copy() + tmpl_kw.update(kw) + self._def_regions[defname] = tmpl_kw + if context and self.impl.pass_context: + tmpl_kw = tmpl_kw.copy() + tmpl_kw.setdefault('context', context) + return tmpl_kw + + +class CacheImpl(object): + + """Provide a cache implementation for use by :class:`.Cache`.""" + + def __init__(self, cache): + self.cache = cache + + pass_context = False + """If ``True``, the :class:`.Context` will be passed to + :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``. + """ + + def get_or_create(self, key, creation_function, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value. + + This function *must* return a value, either from + the cache, or via the given creation function. + If the creation function is called, the newly + created value should be populated into the cache + under the given key before being returned. + + :param key: the value's key. + :param creation_function: function that when called generates + a new value. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def set(self, key, value, **kw): + """Place a value in the cache. + + :param key: the value's key. + :param value: the value. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def get(self, key, **kw): + """Retrieve a value from the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def invalidate(self, key, **kw): + """Invalidate a value in the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() diff --git a/server/www/packages/packages-common/mako/cmd.py b/server/www/packages/packages-common/mako/cmd.py new file mode 100644 index 0000000..50d47fc --- /dev/null +++ b/server/www/packages/packages-common/mako/cmd.py @@ -0,0 +1,67 @@ +# mako/cmd.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from argparse import ArgumentParser +from os.path import isfile, dirname +import sys +from mako.template import Template +from mako.lookup import TemplateLookup +from mako import exceptions + + +def varsplit(var): + if "=" not in var: + return (var, "") + return var.split("=", 1) + + +def _exit(): + sys.stderr.write(exceptions.text_error_template().render()) + sys.exit(1) + + +def cmdline(argv=None): + + parser = ArgumentParser("usage: %prog [FILENAME]") + parser.add_argument( + "--var", default=[], action="append", + help="variable (can be used multiple times, use name=value)") + parser.add_argument( + "--template-dir", default=[], action="append", + help="Directory to use for template lookup (multiple " + "directories may be provided). If not given then if the " + "template is read from stdin, the value defaults to be " + "the current directory, otherwise it defaults to be the " + "parent directory of the file provided.") + parser.add_argument('input', nargs='?', default='-') + + options = parser.parse_args(argv) + if options.input == '-': + lookup_dirs = options.template_dir or ["."] + lookup = TemplateLookup(lookup_dirs) + try: + template = Template(sys.stdin.read(), lookup=lookup) + except: + _exit() + else: + filename = options.input + if not isfile(filename): + raise SystemExit("error: can't find %s" % filename) + lookup_dirs = options.template_dir or [dirname(filename)] + lookup = TemplateLookup(lookup_dirs) + try: + template = Template(filename=filename, lookup=lookup) + except: + _exit() + + kw = dict([varsplit(var) for var in options.var]) + try: + print(template.render(**kw)) + except: + _exit() + + +if __name__ == "__main__": + cmdline() diff --git a/server/www/packages/packages-common/mako/codegen.py b/server/www/packages/packages-common/mako/codegen.py new file mode 100644 index 0000000..bf86d79 --- /dev/null +++ b/server/www/packages/packages-common/mako/codegen.py @@ -0,0 +1,1255 @@ +# mako/codegen.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides functionality for rendering a parsetree constructing into module +source code.""" + +import time +import re +from mako.pygen import PythonPrinter +from mako import util, ast, parsetree, filters, exceptions +from mako import compat + + +MAGIC_NUMBER = 10 + +# names which are hardwired into the +# template and are not accessed via the +# context itself +TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"]) +RESERVED_NAMES = set(['context', 'loop']).union(TOPLEVEL_DECLARED) + + +def compile(node, + uri, + filename=None, + default_filters=None, + buffer_filters=None, + imports=None, + future_imports=None, + source_encoding=None, + generate_magic_comment=True, + disable_unicode=False, + strict_undefined=False, + enable_loop=True, + reserved_names=frozenset()): + """Generate module source code given a parsetree node, + uri, and optional source filename""" + + # if on Py2K, push the "source_encoding" string to be + # a bytestring itself, as we will be embedding it into + # the generated source and we don't want to coerce the + # result into a unicode object, in "disable_unicode" mode + if not compat.py3k and isinstance(source_encoding, compat.text_type): + source_encoding = source_encoding.encode(source_encoding) + + buf = util.FastEncodingBuffer() + + printer = PythonPrinter(buf) + _GenerateRenderMethod(printer, + _CompileContext(uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + disable_unicode, + strict_undefined, + enable_loop, + reserved_names), + node) + return buf.getvalue() + + +class _CompileContext(object): + + def __init__(self, + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + disable_unicode, + strict_undefined, + enable_loop, + reserved_names): + self.uri = uri + self.filename = filename + self.default_filters = default_filters + self.buffer_filters = buffer_filters + self.imports = imports + self.future_imports = future_imports + self.source_encoding = source_encoding + self.generate_magic_comment = generate_magic_comment + self.disable_unicode = disable_unicode + self.strict_undefined = strict_undefined + self.enable_loop = enable_loop + self.reserved_names = reserved_names + + +class _GenerateRenderMethod(object): + + """A template visitor object which generates the + full module source for a template. + + """ + + def __init__(self, printer, compiler, node): + self.printer = printer + self.compiler = compiler + self.node = node + self.identifier_stack = [None] + self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag)) + + if self.in_def: + name = "render_%s" % node.funcname + args = node.get_argument_expressions() + filtered = len(node.filter_args.args) > 0 + buffered = eval(node.attributes.get('buffered', 'False')) + cached = eval(node.attributes.get('cached', 'False')) + defs = None + pagetag = None + if node.is_block and not node.is_anonymous: + args += ['**pageargs'] + else: + defs = self.write_toplevel() + pagetag = self.compiler.pagetag + name = "render_body" + if pagetag is not None: + args = pagetag.body_decl.get_argument_expressions() + if not pagetag.body_decl.kwargs: + args += ['**pageargs'] + cached = eval(pagetag.attributes.get('cached', 'False')) + self.compiler.enable_loop = self.compiler.enable_loop or eval( + pagetag.attributes.get( + 'enable_loop', 'False') + ) + else: + args = ['**pageargs'] + cached = False + buffered = filtered = False + if args is None: + args = ['context'] + else: + args = [a for a in ['context'] + args] + + self.write_render_callable( + pagetag or node, + name, args, + buffered, filtered, cached) + + if defs is not None: + for node in defs: + _GenerateRenderMethod(printer, compiler, node) + + if not self.in_def: + self.write_metadata_struct() + + def write_metadata_struct(self): + self.printer.source_map[self.printer.lineno] = \ + max(self.printer.source_map) + struct = { + "filename": self.compiler.filename, + "uri": self.compiler.uri, + "source_encoding": self.compiler.source_encoding, + "line_map": self.printer.source_map, + } + self.printer.writelines( + '"""', + '__M_BEGIN_METADATA', + compat.json.dumps(struct), + '__M_END_METADATA\n' + '"""' + ) + + @property + def identifiers(self): + return self.identifier_stack[-1] + + def write_toplevel(self): + """Traverse a template structure for module-level directives and + generate the start of module-level code. + + """ + inherit = [] + namespaces = {} + module_code = [] + + self.compiler.pagetag = None + + class FindTopLevel(object): + + def visitInheritTag(s, node): + inherit.append(node) + + def visitNamespaceTag(s, node): + namespaces[node.name] = node + + def visitPageTag(s, node): + self.compiler.pagetag = node + + def visitCode(s, node): + if node.ismodule: + module_code.append(node) + + f = FindTopLevel() + for n in self.node.nodes: + n.accept_visitor(f) + + self.compiler.namespaces = namespaces + + module_ident = set() + for n in module_code: + module_ident = module_ident.union(n.declared_identifiers()) + + module_identifiers = _Identifiers(self.compiler) + module_identifiers.declared = module_ident + + # module-level names, python code + if self.compiler.generate_magic_comment and \ + self.compiler.source_encoding: + self.printer.writeline("# -*- coding:%s -*-" % + self.compiler.source_encoding) + + if self.compiler.future_imports: + self.printer.writeline("from __future__ import %s" % + (", ".join(self.compiler.future_imports),)) + self.printer.writeline("from mako import runtime, filters, cache") + self.printer.writeline("UNDEFINED = runtime.UNDEFINED") + self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING") + self.printer.writeline("__M_dict_builtin = dict") + self.printer.writeline("__M_locals_builtin = locals") + self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER) + self.printer.writeline("_modified_time = %r" % time.time()) + self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop) + self.printer.writeline( + "_template_filename = %r" % self.compiler.filename) + self.printer.writeline("_template_uri = %r" % self.compiler.uri) + self.printer.writeline( + "_source_encoding = %r" % self.compiler.source_encoding) + if self.compiler.imports: + buf = '' + for imp in self.compiler.imports: + buf += imp + "\n" + self.printer.writeline(imp) + impcode = ast.PythonCode( + buf, + source='', lineno=0, + pos=0, + filename='template defined imports') + else: + impcode = None + + main_identifiers = module_identifiers.branch(self.node) + module_identifiers.topleveldefs = \ + module_identifiers.topleveldefs.\ + union(main_identifiers.topleveldefs) + module_identifiers.declared.update(TOPLEVEL_DECLARED) + if impcode: + module_identifiers.declared.update(impcode.declared_identifiers) + + self.compiler.identifiers = module_identifiers + self.printer.writeline("_exports = %r" % + [n.name for n in + main_identifiers.topleveldefs.values()] + ) + self.printer.write_blanks(2) + + if len(module_code): + self.write_module_code(module_code) + + if len(inherit): + self.write_namespaces(namespaces) + self.write_inherit(inherit[-1]) + elif len(namespaces): + self.write_namespaces(namespaces) + + return list(main_identifiers.topleveldefs.values()) + + def write_render_callable(self, node, name, args, buffered, filtered, + cached): + """write a top-level render callable. + + this could be the main render() method or that of a top-level def.""" + + if self.in_def: + decorator = node.decorator + if decorator: + self.printer.writeline( + "@runtime._decorate_toplevel(%s)" % decorator) + + self.printer.start_source(node.lineno) + self.printer.writelines( + "def %s(%s):" % (name, ','.join(args)), + # push new frame, assign current frame to __M_caller + "__M_caller = context.caller_stack._push_frame()", + "try:" + ) + if buffered or filtered or cached: + self.printer.writeline("context._push_buffer()") + + self.identifier_stack.append( + self.compiler.identifiers.branch(self.node)) + if (not self.in_def or self.node.is_block) and '**pageargs' in args: + self.identifier_stack[-1].argument_declared.add('pageargs') + + if not self.in_def and ( + len(self.identifiers.locally_assigned) > 0 or + len(self.identifiers.argument_declared) > 0 + ): + self.printer.writeline("__M_locals = __M_dict_builtin(%s)" % + ','.join([ + "%s=%s" % (x, x) for x in + self.identifiers.argument_declared + ])) + + self.write_variable_declares(self.identifiers, toplevel=True) + + for n in self.node.nodes: + n.accept_visitor(self) + + self.write_def_finish(self.node, buffered, filtered, cached) + self.printer.writeline(None) + self.printer.write_blanks(2) + if cached: + self.write_cache_decorator( + node, name, + args, buffered, + self.identifiers, toplevel=True) + + def write_module_code(self, module_code): + """write module-level template code, i.e. that which + is enclosed in <%! %> tags in the template.""" + for n in module_code: + self.printer.start_source(n.lineno) + self.printer.write_indented_block(n.text) + + def write_inherit(self, node): + """write the module-level inheritance-determination callable.""" + + self.printer.writelines( + "def _mako_inherit(template, context):", + "_mako_generate_namespaces(context)", + "return runtime._inherit_from(context, %s, _template_uri)" % + (node.parsed_attributes['file']), + None + ) + + def write_namespaces(self, namespaces): + """write the module-level namespace-generating callable.""" + self.printer.writelines( + "def _mako_get_namespace(context, name):", + "try:", + "return context.namespaces[(__name__, name)]", + "except KeyError:", + "_mako_generate_namespaces(context)", + "return context.namespaces[(__name__, name)]", + None, None + ) + self.printer.writeline("def _mako_generate_namespaces(context):") + + for node in namespaces.values(): + if 'import' in node.attributes: + self.compiler.has_ns_imports = True + self.printer.start_source(node.lineno) + if len(node.nodes): + self.printer.writeline("def make_namespace():") + export = [] + identifiers = self.compiler.identifiers.branch(node) + self.in_def = True + + class NSDefVisitor(object): + + def visitDefTag(s, node): + s.visitDefOrBase(node) + + def visitBlockTag(s, node): + s.visitDefOrBase(node) + + def visitDefOrBase(s, node): + if node.is_anonymous: + raise exceptions.CompileException( + "Can't put anonymous blocks inside " + "<%namespace>", + **node.exception_kwargs + ) + self.write_inline_def(node, identifiers, nested=False) + export.append(node.funcname) + vis = NSDefVisitor() + for n in node.nodes: + n.accept_visitor(vis) + self.printer.writeline("return [%s]" % (','.join(export))) + self.printer.writeline(None) + self.in_def = False + callable_name = "make_namespace()" + else: + callable_name = "None" + + if 'file' in node.parsed_attributes: + self.printer.writeline( + "ns = runtime.TemplateNamespace(%r," + " context._clean_inheritance_tokens()," + " templateuri=%s, callables=%s, " + " calling_uri=_template_uri)" % + ( + node.name, + node.parsed_attributes.get('file', 'None'), + callable_name, + ) + ) + elif 'module' in node.parsed_attributes: + self.printer.writeline( + "ns = runtime.ModuleNamespace(%r," + " context._clean_inheritance_tokens()," + " callables=%s, calling_uri=_template_uri," + " module=%s)" % + ( + node.name, + callable_name, + node.parsed_attributes.get( + 'module', 'None') + ) + ) + else: + self.printer.writeline( + "ns = runtime.Namespace(%r," + " context._clean_inheritance_tokens()," + " callables=%s, calling_uri=_template_uri)" % + ( + node.name, + callable_name, + ) + ) + if eval(node.attributes.get('inheritable', "False")): + self.printer.writeline("context['self'].%s = ns" % (node.name)) + + self.printer.writeline( + "context.namespaces[(__name__, %s)] = ns" % repr(node.name)) + self.printer.write_blanks(1) + if not len(namespaces): + self.printer.writeline("pass") + self.printer.writeline(None) + + def write_variable_declares(self, identifiers, toplevel=False, limit=None): + """write variable declarations at the top of a function. + + the variable declarations are in the form of callable + definitions for defs and/or name lookup within the + function's context argument. the names declared are based + on the names that are referenced in the function body, + which don't otherwise have any explicit assignment + operation. names that are assigned within the body are + assumed to be locally-scoped variables and are not + separately declared. + + for def callable definitions, if the def is a top-level + callable then a 'stub' callable is generated which wraps + the current Context into a closure. if the def is not + top-level, it is fully rendered as a local closure. + + """ + + # collection of all defs available to us in this scope + comp_idents = dict([(c.funcname, c) for c in identifiers.defs]) + to_write = set() + + # write "context.get()" for all variables we are going to + # need that arent in the namespace yet + to_write = to_write.union(identifiers.undeclared) + + # write closure functions for closures that we define + # right here + to_write = to_write.union( + [c.funcname for c in identifiers.closuredefs.values()]) + + # remove identifiers that are declared in the argument + # signature of the callable + to_write = to_write.difference(identifiers.argument_declared) + + # remove identifiers that we are going to assign to. + # in this way we mimic Python's behavior, + # i.e. assignment to a variable within a block + # means that variable is now a "locally declared" var, + # which cannot be referenced beforehand. + to_write = to_write.difference(identifiers.locally_declared) + + if self.compiler.enable_loop: + has_loop = "loop" in to_write + to_write.discard("loop") + else: + has_loop = False + + # if a limiting set was sent, constraint to those items in that list + # (this is used for the caching decorator) + if limit is not None: + to_write = to_write.intersection(limit) + + if toplevel and getattr(self.compiler, 'has_ns_imports', False): + self.printer.writeline("_import_ns = {}") + self.compiler.has_imports = True + for ident, ns in self.compiler.namespaces.items(): + if 'import' in ns.attributes: + self.printer.writeline( + "_mako_get_namespace(context, %r)." + "_populate(_import_ns, %r)" % + ( + ident, + re.split(r'\s*,\s*', ns.attributes['import']) + )) + + if has_loop: + self.printer.writeline( + 'loop = __M_loop = runtime.LoopStack()' + ) + + for ident in to_write: + if ident in comp_idents: + comp = comp_idents[ident] + if comp.is_block: + if not comp.is_anonymous: + self.write_def_decl(comp, identifiers) + else: + self.write_inline_def(comp, identifiers, nested=True) + else: + if comp.is_root(): + self.write_def_decl(comp, identifiers) + else: + self.write_inline_def(comp, identifiers, nested=True) + + elif ident in self.compiler.namespaces: + self.printer.writeline( + "%s = _mako_get_namespace(context, %r)" % + (ident, ident) + ) + else: + if getattr(self.compiler, 'has_ns_imports', False): + if self.compiler.strict_undefined: + self.printer.writelines( + "%s = _import_ns.get(%r, UNDEFINED)" % + (ident, ident), + "if %s is UNDEFINED:" % ident, + "try:", + "%s = context[%r]" % (ident, ident), + "except KeyError:", + "raise NameError(\"'%s' is not defined\")" % + ident, + None, None + ) + else: + self.printer.writeline( + "%s = _import_ns.get" + "(%r, context.get(%r, UNDEFINED))" % + (ident, ident, ident)) + else: + if self.compiler.strict_undefined: + self.printer.writelines( + "try:", + "%s = context[%r]" % (ident, ident), + "except KeyError:", + "raise NameError(\"'%s' is not defined\")" % + ident, + None + ) + else: + self.printer.writeline( + "%s = context.get(%r, UNDEFINED)" % (ident, ident) + ) + + self.printer.writeline("__M_writer = context.writer()") + + def write_def_decl(self, node, identifiers): + """write a locally-available callable referencing a top-level def""" + funcname = node.funcname + namedecls = node.get_argument_expressions() + nameargs = node.get_argument_expressions(as_call=True) + + if not self.in_def and ( + len(self.identifiers.locally_assigned) > 0 or + len(self.identifiers.argument_declared) > 0): + nameargs.insert(0, 'context._locals(__M_locals)') + else: + nameargs.insert(0, 'context') + self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls))) + self.printer.writeline( + "return render_%s(%s)" % (funcname, ",".join(nameargs))) + self.printer.writeline(None) + + def write_inline_def(self, node, identifiers, nested): + """write a locally-available def callable inside an enclosing def.""" + + namedecls = node.get_argument_expressions() + + decorator = node.decorator + if decorator: + self.printer.writeline( + "@runtime._decorate_inline(context, %s)" % decorator) + self.printer.writeline( + "def %s(%s):" % (node.funcname, ",".join(namedecls))) + filtered = len(node.filter_args.args) > 0 + buffered = eval(node.attributes.get('buffered', 'False')) + cached = eval(node.attributes.get('cached', 'False')) + self.printer.writelines( + # push new frame, assign current frame to __M_caller + "__M_caller = context.caller_stack._push_frame()", + "try:" + ) + if buffered or filtered or cached: + self.printer.writelines( + "context._push_buffer()", + ) + + identifiers = identifiers.branch(node, nested=nested) + + self.write_variable_declares(identifiers) + + self.identifier_stack.append(identifiers) + for n in node.nodes: + n.accept_visitor(self) + self.identifier_stack.pop() + + self.write_def_finish(node, buffered, filtered, cached) + self.printer.writeline(None) + if cached: + self.write_cache_decorator(node, node.funcname, + namedecls, False, identifiers, + inline=True, toplevel=False) + + def write_def_finish(self, node, buffered, filtered, cached, + callstack=True): + """write the end section of a rendering function, either outermost or + inline. + + this takes into account if the rendering function was filtered, + buffered, etc. and closes the corresponding try: block if any, and + writes code to retrieve captured content, apply filters, send proper + return value.""" + + if not buffered and not cached and not filtered: + self.printer.writeline("return ''") + if callstack: + self.printer.writelines( + "finally:", + "context.caller_stack._pop_frame()", + None + ) + + if buffered or filtered or cached: + if buffered or cached: + # in a caching scenario, don't try to get a writer + # from the context after popping; assume the caching + # implemenation might be using a context with no + # extra buffers + self.printer.writelines( + "finally:", + "__M_buf = context._pop_buffer()" + ) + else: + self.printer.writelines( + "finally:", + "__M_buf, __M_writer = context._pop_buffer_and_writer()" + ) + + if callstack: + self.printer.writeline("context.caller_stack._pop_frame()") + + s = "__M_buf.getvalue()" + if filtered: + s = self.create_filter_callable(node.filter_args.args, s, + False) + self.printer.writeline(None) + if buffered and not cached: + s = self.create_filter_callable(self.compiler.buffer_filters, + s, False) + if buffered or cached: + self.printer.writeline("return %s" % s) + else: + self.printer.writelines( + "__M_writer(%s)" % s, + "return ''" + ) + + def write_cache_decorator(self, node_or_pagetag, name, + args, buffered, identifiers, + inline=False, toplevel=False): + """write a post-function decorator to replace a rendering + callable with a cached version of itself.""" + + self.printer.writeline("__M_%s = %s" % (name, name)) + cachekey = node_or_pagetag.parsed_attributes.get('cache_key', + repr(name)) + + cache_args = {} + if self.compiler.pagetag is not None: + cache_args.update( + ( + pa[6:], + self.compiler.pagetag.parsed_attributes[pa] + ) + for pa in self.compiler.pagetag.parsed_attributes + if pa.startswith('cache_') and pa != 'cache_key' + ) + cache_args.update( + ( + pa[6:], + node_or_pagetag.parsed_attributes[pa] + ) for pa in node_or_pagetag.parsed_attributes + if pa.startswith('cache_') and pa != 'cache_key' + ) + if 'timeout' in cache_args: + cache_args['timeout'] = int(eval(cache_args['timeout'])) + + self.printer.writeline("def %s(%s):" % (name, ','.join(args))) + + # form "arg1, arg2, arg3=arg3, arg4=arg4", etc. + pass_args = [ + "%s=%s" % ((a.split('=')[0],) * 2) if '=' in a else a + for a in args + ] + + self.write_variable_declares( + identifiers, + toplevel=toplevel, + limit=node_or_pagetag.undeclared_identifiers() + ) + if buffered: + s = "context.get('local')."\ + "cache._ctx_get_or_create("\ + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" % ( + cachekey, name, ','.join(pass_args), + ''.join(["%s=%s, " % (k, v) + for k, v in cache_args.items()]), + name + ) + # apply buffer_filters + s = self.create_filter_callable(self.compiler.buffer_filters, s, + False) + self.printer.writelines("return " + s, None) + else: + self.printer.writelines( + "__M_writer(context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" % + ( + cachekey, name, ','.join(pass_args), + ''.join(["%s=%s, " % (k, v) + for k, v in cache_args.items()]), + name, + ), + "return ''", + None + ) + + def create_filter_callable(self, args, target, is_expression): + """write a filter-applying expression based on the filters + present in the given filter names, adjusting for the global + 'default' filter aliases as needed.""" + + def locate_encode(name): + if re.match(r'decode\..+', name): + return "filters." + name + elif self.compiler.disable_unicode: + return filters.NON_UNICODE_ESCAPES.get(name, name) + else: + return filters.DEFAULT_ESCAPES.get(name, name) + + if 'n' not in args: + if is_expression: + if self.compiler.pagetag: + args = self.compiler.pagetag.filter_args.args + args + if self.compiler.default_filters: + args = self.compiler.default_filters + args + for e in args: + # if filter given as a function, get just the identifier portion + if e == 'n': + continue + m = re.match(r'(.+?)(\(.*\))', e) + if m: + ident, fargs = m.group(1, 2) + f = locate_encode(ident) + e = f + fargs + else: + e = locate_encode(e) + assert e is not None + target = "%s(%s)" % (e, target) + return target + + def visitExpression(self, node): + self.printer.start_source(node.lineno) + if len(node.escapes) or \ + ( + self.compiler.pagetag is not None and + len(self.compiler.pagetag.filter_args.args) + ) or \ + len(self.compiler.default_filters): + + s = self.create_filter_callable(node.escapes_code.args, + "%s" % node.text, True) + self.printer.writeline("__M_writer(%s)" % s) + else: + self.printer.writeline("__M_writer(%s)" % node.text) + + def visitControlLine(self, node): + if node.isend: + self.printer.writeline(None) + if node.has_loop_context: + self.printer.writeline('finally:') + self.printer.writeline("loop = __M_loop._exit()") + self.printer.writeline(None) + else: + self.printer.start_source(node.lineno) + if self.compiler.enable_loop and node.keyword == 'for': + text = mangle_mako_loop(node, self.printer) + else: + text = node.text + self.printer.writeline(text) + children = node.get_children() + # this covers the three situations where we want to insert a pass: + # 1) a ternary control line with no children, + # 2) a primary control line with nothing but its own ternary + # and end control lines, and + # 3) any control line with no content other than comments + if not children or ( + compat.all(isinstance(c, (parsetree.Comment, + parsetree.ControlLine)) + for c in children) and + compat.all((node.is_ternary(c.keyword) or c.isend) + for c in children + if isinstance(c, parsetree.ControlLine))): + self.printer.writeline("pass") + + def visitText(self, node): + self.printer.start_source(node.lineno) + self.printer.writeline("__M_writer(%s)" % repr(node.content)) + + def visitTextTag(self, node): + filtered = len(node.filter_args.args) > 0 + if filtered: + self.printer.writelines( + "__M_writer = context._push_writer()", + "try:", + ) + for n in node.nodes: + n.accept_visitor(self) + if filtered: + self.printer.writelines( + "finally:", + "__M_buf, __M_writer = context._pop_buffer_and_writer()", + "__M_writer(%s)" % + self.create_filter_callable( + node.filter_args.args, + "__M_buf.getvalue()", + False), + None + ) + + def visitCode(self, node): + if not node.ismodule: + self.printer.start_source(node.lineno) + self.printer.write_indented_block(node.text) + + if not self.in_def and len(self.identifiers.locally_assigned) > 0: + # if we are the "template" def, fudge locally + # declared/modified variables into the "__M_locals" dictionary, + # which is used for def calls within the same template, + # to simulate "enclosing scope" + self.printer.writeline( + '__M_locals_builtin_stored = __M_locals_builtin()') + self.printer.writeline( + '__M_locals.update(__M_dict_builtin([(__M_key,' + ' __M_locals_builtin_stored[__M_key]) for __M_key in' + ' [%s] if __M_key in __M_locals_builtin_stored]))' % + ','.join([repr(x) for x in node.declared_identifiers()])) + + def visitIncludeTag(self, node): + self.printer.start_source(node.lineno) + args = node.attributes.get('args') + if args: + self.printer.writeline( + "runtime._include_file(context, %s, _template_uri, %s)" % + (node.parsed_attributes['file'], args)) + else: + self.printer.writeline( + "runtime._include_file(context, %s, _template_uri)" % + (node.parsed_attributes['file'])) + + def visitNamespaceTag(self, node): + pass + + def visitDefTag(self, node): + pass + + def visitBlockTag(self, node): + if node.is_anonymous: + self.printer.writeline("%s()" % node.funcname) + else: + nameargs = node.get_argument_expressions(as_call=True) + nameargs += ['**pageargs'] + self.printer.writeline( + "if 'parent' not in context._data or " + "not hasattr(context._data['parent'], '%s'):" + % node.funcname) + self.printer.writeline( + "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))) + self.printer.writeline("\n") + + def visitCallNamespaceTag(self, node): + # TODO: we can put namespace-specific checks here, such + # as ensure the given namespace will be imported, + # pre-import the namespace, etc. + self.visitCallTag(node) + + def visitCallTag(self, node): + self.printer.writeline("def ccall(caller):") + export = ['body'] + callable_identifiers = self.identifiers.branch(node, nested=True) + body_identifiers = callable_identifiers.branch(node, nested=False) + # we want the 'caller' passed to ccall to be used + # for the body() function, but for other non-body() + # <%def>s within <%call> we want the current caller + # off the call stack (if any) + body_identifiers.add_declared('caller') + + self.identifier_stack.append(body_identifiers) + + class DefVisitor(object): + + def visitDefTag(s, node): + s.visitDefOrBase(node) + + def visitBlockTag(s, node): + s.visitDefOrBase(node) + + def visitDefOrBase(s, node): + self.write_inline_def(node, callable_identifiers, nested=False) + if not node.is_anonymous: + export.append(node.funcname) + # remove defs that are within the <%call> from the + # "closuredefs" defined in the body, so they dont render twice + if node.funcname in body_identifiers.closuredefs: + del body_identifiers.closuredefs[node.funcname] + + vis = DefVisitor() + for n in node.nodes: + n.accept_visitor(vis) + self.identifier_stack.pop() + + bodyargs = node.body_decl.get_argument_expressions() + self.printer.writeline("def body(%s):" % ','.join(bodyargs)) + + # TODO: figure out best way to specify + # buffering/nonbuffering (at call time would be better) + buffered = False + if buffered: + self.printer.writelines( + "context._push_buffer()", + "try:" + ) + self.write_variable_declares(body_identifiers) + self.identifier_stack.append(body_identifiers) + + for n in node.nodes: + n.accept_visitor(self) + self.identifier_stack.pop() + + self.write_def_finish(node, buffered, False, False, callstack=False) + self.printer.writelines( + None, + "return [%s]" % (','.join(export)), + None + ) + + self.printer.writelines( + # push on caller for nested call + "context.caller_stack.nextcaller = " + "runtime.Namespace('caller', context, " + "callables=ccall(__M_caller))", + "try:") + self.printer.start_source(node.lineno) + self.printer.writelines( + "__M_writer(%s)" % self.create_filter_callable( + [], node.expression, True), + "finally:", + "context.caller_stack.nextcaller = None", + None + ) + + +class _Identifiers(object): + + """tracks the status of identifier names as template code is rendered.""" + + def __init__(self, compiler, node=None, parent=None, nested=False): + if parent is not None: + # if we are the branch created in write_namespaces(), + # we don't share any context from the main body(). + if isinstance(node, parsetree.NamespaceTag): + self.declared = set() + self.topleveldefs = util.SetLikeDict() + else: + # things that have already been declared + # in an enclosing namespace (i.e. names we can just use) + self.declared = set(parent.declared).\ + union([c.name for c in parent.closuredefs.values()]).\ + union(parent.locally_declared).\ + union(parent.argument_declared) + + # if these identifiers correspond to a "nested" + # scope, it means whatever the parent identifiers + # had as undeclared will have been declared by that parent, + # and therefore we have them in our scope. + if nested: + self.declared = self.declared.union(parent.undeclared) + + # top level defs that are available + self.topleveldefs = util.SetLikeDict(**parent.topleveldefs) + else: + self.declared = set() + self.topleveldefs = util.SetLikeDict() + + self.compiler = compiler + + # things within this level that are referenced before they + # are declared (e.g. assigned to) + self.undeclared = set() + + # things that are declared locally. some of these things + # could be in the "undeclared" list as well if they are + # referenced before declared + self.locally_declared = set() + + # assignments made in explicit python blocks. + # these will be propagated to + # the context of local def calls. + self.locally_assigned = set() + + # things that are declared in the argument + # signature of the def callable + self.argument_declared = set() + + # closure defs that are defined in this level + self.closuredefs = util.SetLikeDict() + + self.node = node + + if node is not None: + node.accept_visitor(self) + + illegal_names = self.compiler.reserved_names.intersection( + self.locally_declared) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words declared in template: %s" % + ", ".join(illegal_names)) + + def branch(self, node, **kwargs): + """create a new Identifiers for a new Node, with + this Identifiers as the parent.""" + + return _Identifiers(self.compiler, node, self, **kwargs) + + @property + def defs(self): + return set(self.topleveldefs.union(self.closuredefs).values()) + + def __repr__(self): + return "Identifiers(declared=%r, locally_declared=%r, "\ + "undeclared=%r, topleveldefs=%r, closuredefs=%r, "\ + "argumentdeclared=%r)" %\ + ( + list(self.declared), + list(self.locally_declared), + list(self.undeclared), + [c.name for c in self.topleveldefs.values()], + [c.name for c in self.closuredefs.values()], + self.argument_declared) + + def check_declared(self, node): + """update the state of this Identifiers with the undeclared + and declared identifiers of the given node.""" + + for ident in node.undeclared_identifiers(): + if ident != 'context' and\ + ident not in self.declared.union(self.locally_declared): + self.undeclared.add(ident) + for ident in node.declared_identifiers(): + self.locally_declared.add(ident) + + def add_declared(self, ident): + self.declared.add(ident) + if ident in self.undeclared: + self.undeclared.remove(ident) + + def visitExpression(self, node): + self.check_declared(node) + + def visitControlLine(self, node): + self.check_declared(node) + + def visitCode(self, node): + if not node.ismodule: + self.check_declared(node) + self.locally_assigned = self.locally_assigned.union( + node.declared_identifiers()) + + def visitNamespaceTag(self, node): + # only traverse into the sub-elements of a + # <%namespace> tag if we are the branch created in + # write_namespaces() + if self.node is node: + for n in node.nodes: + n.accept_visitor(self) + + def _check_name_exists(self, collection, node): + existing = collection.get(node.funcname) + collection[node.funcname] = node + if existing is not None and \ + existing is not node and \ + (node.is_block or existing.is_block): + raise exceptions.CompileException( + "%%def or %%block named '%s' already " + "exists in this template." % + node.funcname, **node.exception_kwargs) + + def visitDefTag(self, node): + if node.is_root() and not node.is_anonymous: + self._check_name_exists(self.topleveldefs, node) + elif node is not self.node: + self._check_name_exists(self.closuredefs, node) + + for ident in node.undeclared_identifiers(): + if ident != 'context' and \ + ident not in self.declared.union(self.locally_declared): + self.undeclared.add(ident) + + # visit defs only one level deep + if node is self.node: + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + + for n in node.nodes: + n.accept_visitor(self) + + def visitBlockTag(self, node): + if node is not self.node and not node.is_anonymous: + + if isinstance(self.node, parsetree.DefTag): + raise exceptions.CompileException( + "Named block '%s' not allowed inside of def '%s'" + % (node.name, self.node.name), **node.exception_kwargs) + elif isinstance(self.node, + (parsetree.CallTag, parsetree.CallNamespaceTag)): + raise exceptions.CompileException( + "Named block '%s' not allowed inside of <%%call> tag" + % (node.name, ), **node.exception_kwargs) + + for ident in node.undeclared_identifiers(): + if ident != 'context' and \ + ident not in self.declared.union(self.locally_declared): + self.undeclared.add(ident) + + if not node.is_anonymous: + self._check_name_exists(self.topleveldefs, node) + self.undeclared.add(node.funcname) + elif node is not self.node: + self._check_name_exists(self.closuredefs, node) + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + for n in node.nodes: + n.accept_visitor(self) + + def visitTextTag(self, node): + for ident in node.undeclared_identifiers(): + if ident != 'context' and \ + ident not in self.declared.union(self.locally_declared): + self.undeclared.add(ident) + + def visitIncludeTag(self, node): + self.check_declared(node) + + def visitPageTag(self, node): + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + self.check_declared(node) + + def visitCallNamespaceTag(self, node): + self.visitCallTag(node) + + def visitCallTag(self, node): + if node is self.node: + for ident in node.undeclared_identifiers(): + if ident != 'context' and \ + ident not in self.declared.union( + self.locally_declared): + self.undeclared.add(ident) + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + for n in node.nodes: + n.accept_visitor(self) + else: + for ident in node.undeclared_identifiers(): + if ident != 'context' and \ + ident not in self.declared.union( + self.locally_declared): + self.undeclared.add(ident) + + +_FOR_LOOP = re.compile( + r'^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*' + r'(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):' +) + + +def mangle_mako_loop(node, printer): + """converts a for loop into a context manager wrapped around a for loop + when access to the `loop` variable has been detected in the for loop body + """ + loop_variable = LoopVariable() + node.accept_visitor(loop_variable) + if loop_variable.detected: + node.nodes[-1].has_loop_context = True + match = _FOR_LOOP.match(node.text) + if match: + printer.writelines( + 'loop = __M_loop._enter(%s)' % match.group(2), + 'try:' + # 'with __M_loop(%s) as loop:' % match.group(2) + ) + text = 'for %s in loop:' % match.group(1) + else: + raise SyntaxError("Couldn't apply loop context: %s" % node.text) + else: + text = node.text + return text + + +class LoopVariable(object): + + """A node visitor which looks for the name 'loop' within undeclared + identifiers.""" + + def __init__(self): + self.detected = False + + def _loop_reference_detected(self, node): + if 'loop' in node.undeclared_identifiers(): + self.detected = True + else: + for n in node.get_children(): + n.accept_visitor(self) + + def visitControlLine(self, node): + self._loop_reference_detected(node) + + def visitCode(self, node): + self._loop_reference_detected(node) + + def visitExpression(self, node): + self._loop_reference_detected(node) diff --git a/server/www/packages/packages-common/mako/compat.py b/server/www/packages/packages-common/mako/compat.py new file mode 100644 index 0000000..db22b99 --- /dev/null +++ b/server/www/packages/packages-common/mako/compat.py @@ -0,0 +1,200 @@ +import sys +import time + +py3k = sys.version_info >= (3, 0) +py33 = sys.version_info >= (3, 3) +py2k = sys.version_info < (3,) +py26 = sys.version_info >= (2, 6) +jython = sys.platform.startswith('java') +win32 = sys.platform.startswith('win') +pypy = hasattr(sys, 'pypy_version_info') + +if py3k: + # create a "getargspec" from getfullargspec(), which is not deprecated + # in Py3K; getargspec() has started to emit warnings as of Py3.5. + # As of Py3.4, now they are trying to move from getfullargspec() + # to "signature()", but getfullargspec() is not deprecated, so stick + # with that for now. + + import collections + ArgSpec = collections.namedtuple( + "ArgSpec", + ["args", "varargs", "keywords", "defaults"]) + from inspect import getfullargspec as inspect_getfullargspec + + def inspect_getargspec(func): + return ArgSpec( + *inspect_getfullargspec(func)[0:4] + ) +else: + from inspect import getargspec as inspect_getargspec # noqa + + +if py3k: + from io import StringIO + import builtins as compat_builtins + from urllib.parse import quote_plus, unquote_plus + from html.entities import codepoint2name, name2codepoint + string_types = str, + binary_type = bytes + text_type = str + + from io import BytesIO as byte_buffer + + def u(s): + return s + + def b(s): + return s.encode("latin-1") + + def octal(lit): + return eval("0o" + lit) + +else: + import __builtin__ as compat_builtins # noqa + try: + from cStringIO import StringIO + except: + from StringIO import StringIO + + byte_buffer = StringIO + + from urllib import quote_plus, unquote_plus # noqa + from htmlentitydefs import codepoint2name, name2codepoint # noqa + string_types = basestring, # noqa + binary_type = str + text_type = unicode # noqa + + def u(s): + return unicode(s, "utf-8") # noqa + + def b(s): + return s + + def octal(lit): + return eval("0" + lit) + + +if py33: + from importlib import machinery + + def load_module(module_id, path): + return machinery.SourceFileLoader(module_id, path).load_module() +else: + import imp + + def load_module(module_id, path): + fp = open(path, 'rb') + try: + return imp.load_source(module_id, path, fp) + finally: + fp.close() + + +if py3k: + def reraise(tp, value, tb=None, cause=None): + if cause is not None: + value.__cause__ = cause + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value +else: + exec("def reraise(tp, value, tb=None, cause=None):\n" + " raise tp, value, tb\n") + + +def exception_as(): + return sys.exc_info()[1] + +try: + import threading + if py3k: + import _thread as thread + else: + import thread +except ImportError: + import dummy_threading as threading # noqa + if py3k: + import _dummy_thread as thread + else: + import dummy_thread as thread # noqa + +if win32 or jython: + time_func = time.clock +else: + time_func = time.time + +try: + from functools import partial +except: + def partial(func, *args, **keywords): + def newfunc(*fargs, **fkeywords): + newkeywords = keywords.copy() + newkeywords.update(fkeywords) + return func(*(args + fargs), **newkeywords) + return newfunc + + +all = all +import json # noqa + + +def exception_name(exc): + return exc.__class__.__name__ + +try: + from inspect import CO_VARKEYWORDS, CO_VARARGS + + def inspect_func_args(fn): + if py3k: + co = fn.__code__ + else: + co = fn.func_code + + nargs = co.co_argcount + names = co.co_varnames + args = list(names[:nargs]) + + varargs = None + if co.co_flags & CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + + if py3k: + return args, varargs, varkw, fn.__defaults__ + else: + return args, varargs, varkw, fn.func_defaults +except ImportError: + import inspect + + def inspect_func_args(fn): + return inspect.getargspec(fn) + +if py3k: + def callable(fn): + return hasattr(fn, '__call__') +else: + callable = callable + + +################################################ +# cross-compatible metaclass implementation +# Copyright (c) 2010-2012 Benjamin Peterson +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("%sBase" % meta.__name__, (base,), {}) +################################################ + + +def arg_stringname(func_arg): + """Gets the string name of a kwarg or vararg + In Python3.4 a function's args are + of _ast.arg type not _ast.name + """ + if hasattr(func_arg, 'arg'): + return func_arg.arg + else: + return str(func_arg) diff --git a/server/www/packages/packages-common/mako/exceptions.py b/server/www/packages/packages-common/mako/exceptions.py new file mode 100644 index 0000000..84d2297 --- /dev/null +++ b/server/www/packages/packages-common/mako/exceptions.py @@ -0,0 +1,394 @@ +# mako/exceptions.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""exception classes""" + +import traceback +import sys +from mako import util, compat + + +class MakoException(Exception): + pass + + +class RuntimeException(MakoException): + pass + + +def _format_filepos(lineno, pos, filename): + if filename is None: + return " at line: %d char: %d" % (lineno, pos) + else: + return " in file '%s' at line: %d char: %d" % (filename, lineno, pos) + + +class CompileException(MakoException): + + def __init__(self, message, source, lineno, pos, filename): + MakoException.__init__( + self, + message + _format_filepos(lineno, pos, filename)) + self.lineno = lineno + self.pos = pos + self.filename = filename + self.source = source + + +class SyntaxException(MakoException): + + def __init__(self, message, source, lineno, pos, filename): + MakoException.__init__( + self, + message + _format_filepos(lineno, pos, filename)) + self.lineno = lineno + self.pos = pos + self.filename = filename + self.source = source + + +class UnsupportedError(MakoException): + + """raised when a retired feature is used.""" + + +class NameConflictError(MakoException): + + """raised when a reserved word is used inappropriately""" + + +class TemplateLookupException(MakoException): + pass + + +class TopLevelLookupException(TemplateLookupException): + pass + + +class RichTraceback(object): + + """Pull the current exception from the ``sys`` traceback and extracts + Mako-specific template information. + + See the usage examples in :ref:`handling_exceptions`. + + """ + + def __init__(self, error=None, traceback=None): + self.source, self.lineno = "", 0 + + if error is None or traceback is None: + t, value, tback = sys.exc_info() + + if error is None: + error = value or t + + if traceback is None: + traceback = tback + + self.error = error + self.records = self._init(traceback) + + if isinstance(self.error, (CompileException, SyntaxException)): + self.source = self.error.source + self.lineno = self.error.lineno + self._has_source = True + + self._init_message() + + @property + def errorname(self): + return compat.exception_name(self.error) + + def _init_message(self): + """Find a unicode representation of self.error""" + try: + self.message = compat.text_type(self.error) + except UnicodeError: + try: + self.message = str(self.error) + except UnicodeEncodeError: + # Fallback to args as neither unicode nor + # str(Exception(u'\xe6')) work in Python < 2.6 + self.message = self.error.args[0] + if not isinstance(self.message, compat.text_type): + self.message = compat.text_type(self.message, 'ascii', 'replace') + + def _get_reformatted_records(self, records): + for rec in records: + if rec[6] is not None: + yield (rec[4], rec[5], rec[2], rec[6]) + else: + yield tuple(rec[0:4]) + + @property + def traceback(self): + """Return a list of 4-tuple traceback records (i.e. normal python + format) with template-corresponding lines remapped to the originating + template. + + """ + return list(self._get_reformatted_records(self.records)) + + @property + def reverse_records(self): + return reversed(self.records) + + @property + def reverse_traceback(self): + """Return the same data as traceback, except in reverse order. + """ + + return list(self._get_reformatted_records(self.reverse_records)) + + def _init(self, trcback): + """format a traceback from sys.exc_info() into 7-item tuples, + containing the regular four traceback tuple items, plus the original + template filename, the line number adjusted relative to the template + source, and code line from that line number of the template.""" + + import mako.template + mods = {} + rawrecords = traceback.extract_tb(trcback) + new_trcback = [] + for filename, lineno, function, line in rawrecords: + if not line: + line = '' + try: + (line_map, template_lines) = mods[filename] + except KeyError: + try: + info = mako.template._get_module_info(filename) + module_source = info.code + template_source = info.source + template_filename = info.template_filename or filename + except KeyError: + # A normal .py file (not a Template) + if not compat.py3k: + try: + fp = open(filename, 'rb') + encoding = util.parse_encoding(fp) + fp.close() + except IOError: + encoding = None + if encoding: + line = line.decode(encoding) + else: + line = line.decode('ascii', 'replace') + new_trcback.append((filename, lineno, function, line, + None, None, None, None)) + continue + + template_ln = 1 + + source_map = mako.template.ModuleInfo.\ + get_module_source_metadata( + module_source, full_line_map=True) + line_map = source_map['full_line_map'] + + template_lines = [line_ for line_ in + template_source.split("\n")] + mods[filename] = (line_map, template_lines) + + template_ln = line_map[lineno - 1] + + if template_ln <= len(template_lines): + template_line = template_lines[template_ln - 1] + else: + template_line = None + new_trcback.append((filename, lineno, function, + line, template_filename, template_ln, + template_line, template_source)) + if not self.source: + for l in range(len(new_trcback) - 1, 0, -1): + if new_trcback[l][5]: + self.source = new_trcback[l][7] + self.lineno = new_trcback[l][5] + break + else: + if new_trcback: + try: + # A normal .py file (not a Template) + fp = open(new_trcback[-1][0], 'rb') + encoding = util.parse_encoding(fp) + fp.seek(0) + self.source = fp.read() + fp.close() + if encoding: + self.source = self.source.decode(encoding) + except IOError: + self.source = '' + self.lineno = new_trcback[-1][1] + return new_trcback + + +def text_error_template(lookup=None): + """Provides a template that renders a stack trace in a similar format to + the Python interpreter, substituting source template filenames, line + numbers and code for that of the originating source template, as + applicable. + + """ + import mako.template + return mako.template.Template(r""" +<%page args="error=None, traceback=None"/> +<%! + from mako.exceptions import RichTraceback +%>\ +<% + tback = RichTraceback(error=error, traceback=traceback) +%>\ +Traceback (most recent call last): +% for (filename, lineno, function, line) in tback.traceback: + File "${filename}", line ${lineno}, in ${function or '?'} + ${line | trim} +% endfor +${tback.errorname}: ${tback.message} +""") + + +def _install_pygments(): + global syntax_highlight, pygments_html_formatter + from mako.ext.pygmentplugin import syntax_highlight # noqa + from mako.ext.pygmentplugin import pygments_html_formatter # noqa + + +def _install_fallback(): + global syntax_highlight, pygments_html_formatter + from mako.filters import html_escape + pygments_html_formatter = None + + def syntax_highlight(filename='', language=None): + return html_escape + + +def _install_highlighting(): + try: + _install_pygments() + except ImportError: + _install_fallback() +_install_highlighting() + + +def html_error_template(): + """Provides a template that renders a stack trace in an HTML format, + providing an excerpt of code as well as substituting source template + filenames, line numbers and code for that of the originating source + template, as applicable. + + The template's default ``encoding_errors`` value is + ``'htmlentityreplace'``. The template has two options. With the + ``full`` option disabled, only a section of an HTML document is + returned. With the ``css`` option disabled, the default stylesheet + won't be included. + + """ + import mako.template + return mako.template.Template(r""" +<%! + from mako.exceptions import RichTraceback, syntax_highlight,\ + pygments_html_formatter +%> +<%page args="full=True, css=True, error=None, traceback=None"/> +% if full: + + + Mako Runtime Error +% endif +% if css: + +% endif +% if full: + + +% endif + +

Error !

+<% + tback = RichTraceback(error=error, traceback=traceback) + src = tback.source + line = tback.lineno + if src: + lines = src.split('\n') + else: + lines = None +%> +

${tback.errorname}: ${tback.message|h}

+ +% if lines: +
+
+% for index in range(max(0, line-4),min(len(lines), line+5)): + <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = index + 1 + %> + % if index + 1 == line: + <% + if pygments_html_formatter: + old_cssclass = pygments_html_formatter.cssclass + pygments_html_formatter.cssclass = 'error ' + old_cssclass + %> + ${lines[index] | syntax_highlight(language='mako')} + <% + if pygments_html_formatter: + pygments_html_formatter.cssclass = old_cssclass + %> + % else: + ${lines[index] | syntax_highlight(language='mako')} + % endif +% endfor +
+
+% endif + +
+% for (filename, lineno, function, line) in tback.reverse_traceback: +
${filename}, line ${lineno}:
+
+ <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = lineno + %> +
${line | syntax_highlight(filename)}
+
+% endfor +
+ +% if full: + + +% endif +""", output_encoding=sys.getdefaultencoding(), + encoding_errors='htmlentityreplace') diff --git a/server/www/packages/packages-common/mako/ext/__init__.py b/server/www/packages/packages-common/mako/ext/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/www/packages/packages-common/mako/ext/autohandler.py b/server/www/packages/packages-common/mako/ext/autohandler.py new file mode 100644 index 0000000..9ee780a --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/autohandler.py @@ -0,0 +1,68 @@ +# ext/autohandler.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""adds autohandler functionality to Mako templates. + +requires that the TemplateLookup class is used with templates. + +usage: + +<%! + from mako.ext.autohandler import autohandler +%> +<%inherit file="${autohandler(template, context)}"/> + + +or with custom autohandler filename: + +<%! + from mako.ext.autohandler import autohandler +%> +<%inherit file="${autohandler(template, context, name='somefilename')}"/> + +""" + +import posixpath +import os +import re + + +def autohandler(template, context, name='autohandler'): + lookup = context.lookup + _template_uri = template.module._template_uri + if not lookup.filesystem_checks: + try: + return lookup._uri_cache[(autohandler, _template_uri, name)] + except KeyError: + pass + + tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name] + while len(tokens): + path = '/' + '/'.join(tokens) + if path != _template_uri and _file_exists(lookup, path): + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), path) + else: + return path + if len(tokens) == 1: + break + tokens[-2:] = [name] + + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), None) + else: + return None + + +def _file_exists(lookup, path): + psub = re.sub(r'^/', '', path) + for d in lookup.directories: + if os.path.exists(d + '/' + psub): + return True + else: + return False diff --git a/server/www/packages/packages-common/mako/ext/babelplugin.py b/server/www/packages/packages-common/mako/ext/babelplugin.py new file mode 100644 index 0000000..53d62ba --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/babelplugin.py @@ -0,0 +1,50 @@ +# ext/babelplugin.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""gettext message extraction via Babel: http://babel.edgewall.org/""" +from babel.messages.extract import extract_python +from mako.ext.extract import MessageExtractor + + +class BabelMakoExtractor(MessageExtractor): + + def __init__(self, keywords, comment_tags, options): + self.keywords = keywords + self.options = options + self.config = { + 'comment-tags': u' '.join(comment_tags), + 'encoding': options.get('input_encoding', + options.get('encoding', None)), + } + super(BabelMakoExtractor, self).__init__() + + def __call__(self, fileobj): + return self.process_file(fileobj) + + def process_python(self, code, code_lineno, translator_strings): + comment_tags = self.config['comment-tags'] + for lineno, funcname, messages, python_translator_comments \ + in extract_python(code, + self.keywords, comment_tags, self.options): + yield (code_lineno + (lineno - 1), funcname, messages, + translator_strings + python_translator_comments) + + +def extract(fileobj, keywords, comment_tags, options): + """Extract messages from Mako templates. + + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :return: an iterator over ``(lineno, funcname, message, comments)`` tuples + :rtype: ``iterator`` + """ + extractor = BabelMakoExtractor(keywords, comment_tags, options) + for message in extractor(fileobj): + yield message diff --git a/server/www/packages/packages-common/mako/ext/beaker_cache.py b/server/www/packages/packages-common/mako/ext/beaker_cache.py new file mode 100644 index 0000000..c7c260d --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/beaker_cache.py @@ -0,0 +1,76 @@ +"""Provide a :class:`.CacheImpl` for the Beaker caching system.""" + +from mako import exceptions + +from mako.cache import CacheImpl + +try: + from beaker import cache as beaker_cache +except: + has_beaker = False +else: + has_beaker = True + +_beaker_cache = None + + +class BeakerCacheImpl(CacheImpl): + + """A :class:`.CacheImpl` provided for the Beaker caching system. + + This plugin is used by default, based on the default + value of ``'beaker'`` for the ``cache_impl`` parameter of the + :class:`.Template` or :class:`.TemplateLookup` classes. + + """ + + def __init__(self, cache): + if not has_beaker: + raise exceptions.RuntimeException( + "Can't initialize Beaker plugin; Beaker is not installed.") + global _beaker_cache + if _beaker_cache is None: + if 'manager' in cache.template.cache_args: + _beaker_cache = cache.template.cache_args['manager'] + else: + _beaker_cache = beaker_cache.CacheManager() + super(BeakerCacheImpl, self).__init__(cache) + + def _get_cache(self, **kw): + expiretime = kw.pop('timeout', None) + if 'dir' in kw: + kw['data_dir'] = kw.pop('dir') + elif self.cache.template.module_directory: + kw['data_dir'] = self.cache.template.module_directory + + if 'manager' in kw: + kw.pop('manager') + + if kw.get('type') == 'memcached': + kw['type'] = 'ext:memcached' + + if 'region' in kw: + region = kw.pop('region') + cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) + else: + cache = _beaker_cache.get_cache(self.cache.id, **kw) + cache_args = {'starttime': self.cache.starttime} + if expiretime: + cache_args['expiretime'] = expiretime + return cache, cache_args + + def get_or_create(self, key, creation_function, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, createfunc=creation_function, **kw) + + def put(self, key, value, **kw): + cache, kw = self._get_cache(**kw) + cache.put(key, value, **kw) + + def get(self, key, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, **kw) + + def invalidate(self, key, **kw): + cache, kw = self._get_cache(**kw) + cache.remove_value(key, **kw) diff --git a/server/www/packages/packages-common/mako/ext/extract.py b/server/www/packages/packages-common/mako/ext/extract.py new file mode 100644 index 0000000..8dd2e96 --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/extract.py @@ -0,0 +1,108 @@ +import re +from mako import compat +from mako import lexer +from mako import parsetree + + +class MessageExtractor(object): + + def process_file(self, fileobj): + template_node = lexer.Lexer( + fileobj.read(), + input_encoding=self.config['encoding']).parse() + for extracted in self.extract_nodes(template_node.get_children()): + yield extracted + + def extract_nodes(self, nodes): + translator_comments = [] + in_translator_comments = False + input_encoding = self.config['encoding'] or 'ascii' + comment_tags = list( + filter(None, re.split(r'\s+', self.config['comment-tags']))) + + for node in nodes: + child_nodes = None + if in_translator_comments and \ + isinstance(node, parsetree.Text) and \ + not node.content.strip(): + # Ignore whitespace within translator comments + continue + + if isinstance(node, parsetree.Comment): + value = node.text.strip() + if in_translator_comments: + translator_comments.extend( + self._split_comment(node.lineno, value)) + continue + for comment_tag in comment_tags: + if value.startswith(comment_tag): + in_translator_comments = True + translator_comments.extend( + self._split_comment(node.lineno, value)) + continue + + if isinstance(node, parsetree.DefTag): + code = node.function_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.BlockTag): + code = node.body_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.CallTag): + code = node.code.code + child_nodes = node.nodes + elif isinstance(node, parsetree.PageTag): + code = node.body_decl.code + elif isinstance(node, parsetree.CallNamespaceTag): + code = node.expression + child_nodes = node.nodes + elif isinstance(node, parsetree.ControlLine): + if node.isend: + in_translator_comments = False + continue + code = node.text + elif isinstance(node, parsetree.Code): + in_translator_comments = False + code = node.code.code + elif isinstance(node, parsetree.Expression): + code = node.code.code + else: + continue + + # Comments don't apply unless they immediately preceed the message + if translator_comments and \ + translator_comments[-1][0] < node.lineno - 1: + translator_comments = [] + + translator_strings = [ + comment[1] for comment in translator_comments] + + if isinstance(code, compat.text_type): + code = code.encode(input_encoding, 'backslashreplace') + + used_translator_comments = False + # We add extra newline to work around a pybabel bug + # (see python-babel/babel#274, parse_encoding dies if the first + # input string of the input is non-ascii) + # Also, because we added it, we have to subtract one from + # node.lineno + code = compat.byte_buffer(compat.b('\n') + code) + + for message in self.process_python( + code, node.lineno - 1, translator_strings): + yield message + used_translator_comments = True + + if used_translator_comments: + translator_comments = [] + in_translator_comments = False + + if child_nodes: + for extracted in self.extract_nodes(child_nodes): + yield extracted + + @staticmethod + def _split_comment(lineno, comment): + """Return the multiline comment at lineno split into a list of + comment line numbers and the accompanying comment line""" + return [(lineno + index, line) for index, line in + enumerate(comment.splitlines())] diff --git a/server/www/packages/packages-common/mako/ext/linguaplugin.py b/server/www/packages/packages-common/mako/ext/linguaplugin.py new file mode 100644 index 0000000..46b0d6a --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/linguaplugin.py @@ -0,0 +1,43 @@ +import io +from lingua.extractors import Extractor +from lingua.extractors import Message +from lingua.extractors import get_extractor +from mako.ext.extract import MessageExtractor +from mako import compat + + +class LinguaMakoExtractor(Extractor, MessageExtractor): + + '''Mako templates''' + extensions = ['.mako'] + default_config = { + 'encoding': 'utf-8', + 'comment-tags': '', + } + + def __call__(self, filename, options, fileobj=None): + self.options = options + self.filename = filename + self.python_extractor = get_extractor('x.py') + if fileobj is None: + fileobj = open(filename, 'rb') + return self.process_file(fileobj) + + def process_python(self, code, code_lineno, translator_strings): + source = code.getvalue().strip() + if source.endswith(compat.b(':')): + if source in (compat.b('try:'), compat.b('else:')) or source.startswith(compat.b('except')): + source = compat.b('') # Ignore try/except and else + elif source.startswith(compat.b('elif')): + source = source[2:] # Replace "elif" with "if" + source += compat.b('pass') + code = io.BytesIO(source) + for msg in self.python_extractor( + self.filename, self.options, code, code_lineno -1): + if translator_strings: + msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural, + msg.flags, + compat.u(' ').join( + translator_strings + [msg.comment]), + msg.tcomment, msg.location) + yield msg diff --git a/server/www/packages/packages-common/mako/ext/preprocessors.py b/server/www/packages/packages-common/mako/ext/preprocessors.py new file mode 100644 index 0000000..5624f70 --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/preprocessors.py @@ -0,0 +1,20 @@ +# ext/preprocessors.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""preprocessing functions, used with the 'preprocessor' +argument on Template, TemplateLookup""" + +import re + + +def convert_comments(text): + """preprocess old style comments. + + example: + + from mako.ext.preprocessors import convert_comments + t = Template(..., preprocessor=convert_comments)""" + return re.sub(r'(?<=\n)\s*#[^#]', "##", text) diff --git a/server/www/packages/packages-common/mako/ext/pygmentplugin.py b/server/www/packages/packages-common/mako/ext/pygmentplugin.py new file mode 100644 index 0000000..1121c5d --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/pygmentplugin.py @@ -0,0 +1,127 @@ +# ext/pygmentplugin.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from pygments.lexers.web import \ + HtmlLexer, XmlLexer, JavascriptLexer, CssLexer +from pygments.lexers.agile import PythonLexer, Python3Lexer +from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \ + include, using +from pygments.token import \ + Text, Comment, Operator, Keyword, Name, String, Other +from pygments.formatters.html import HtmlFormatter +from pygments import highlight +from mako import compat + + +class MakoLexer(RegexLexer): + name = 'Mako' + aliases = ['mako'] + filenames = ['*.mao'] + + tokens = { + 'root': [ + (r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)', + bygroups(Text, Comment.Preproc, Keyword, Other)), + (r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)', + bygroups(Text, Comment.Preproc, using(PythonLexer), Other)), + (r'(\s*)(##[^\n]*)(\n|\Z)', + bygroups(Text, Comment.Preproc, Other)), + (r'''(?s)<%doc>.*?''', Comment.Preproc), + (r'(<%)([\w\.\:]+)', + bygroups(Comment.Preproc, Name.Builtin), 'tag'), + (r'()', + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), + (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'), + (r'(<%(?:!?))(.*?)(%>)(?s)', + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), + (r'(\$\{)(.*?)(\})', + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), + (r'''(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line + (?=\#\*) | # multiline comment + (?=', Comment.Preproc, '#pop'), + (r'\s+', Text), + ], + 'attr': [ + ('".*?"', String, '#pop'), + ("'.*?'", String, '#pop'), + (r'[^\s>]+', String, '#pop'), + ], + } + + +class MakoHtmlLexer(DelegatingLexer): + name = 'HTML+Mako' + aliases = ['html+mako'] + + def __init__(self, **options): + super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, + **options) + + +class MakoXmlLexer(DelegatingLexer): + name = 'XML+Mako' + aliases = ['xml+mako'] + + def __init__(self, **options): + super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, + **options) + + +class MakoJavascriptLexer(DelegatingLexer): + name = 'JavaScript+Mako' + aliases = ['js+mako', 'javascript+mako'] + + def __init__(self, **options): + super(MakoJavascriptLexer, self).__init__(JavascriptLexer, + MakoLexer, **options) + + +class MakoCssLexer(DelegatingLexer): + name = 'CSS+Mako' + aliases = ['css+mako'] + + def __init__(self, **options): + super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, + **options) + + +pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted', + linenos=True) + + +def syntax_highlight(filename='', language=None): + mako_lexer = MakoLexer() + if compat.py3k: + python_lexer = Python3Lexer() + else: + python_lexer = PythonLexer() + if filename.startswith('memory:') or language == 'mako': + return lambda string: highlight(string, mako_lexer, + pygments_html_formatter) + return lambda string: highlight(string, python_lexer, + pygments_html_formatter) diff --git a/server/www/packages/packages-common/mako/ext/turbogears.py b/server/www/packages/packages-common/mako/ext/turbogears.py new file mode 100644 index 0000000..2e7d039 --- /dev/null +++ b/server/www/packages/packages-common/mako/ext/turbogears.py @@ -0,0 +1,58 @@ +# ext/turbogears.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import compat +from mako.lookup import TemplateLookup +from mako.template import Template + + +class TGPlugin(object): + + """TurboGears compatible Template Plugin.""" + + def __init__(self, extra_vars_func=None, options=None, extension='mak'): + self.extra_vars_func = extra_vars_func + self.extension = extension + if not options: + options = {} + + # Pull the options out and initialize the lookup + lookup_options = {} + for k, v in options.items(): + if k.startswith('mako.'): + lookup_options[k[5:]] = v + elif k in ['directories', 'filesystem_checks', 'module_directory']: + lookup_options[k] = v + self.lookup = TemplateLookup(**lookup_options) + + self.tmpl_options = {} + # transfer lookup args to template args, based on those available + # in getargspec + for kw in compat.inspect_getargspec(Template.__init__)[0]: + if kw in lookup_options: + self.tmpl_options[kw] = lookup_options[kw] + + def load_template(self, templatename, template_string=None): + """Loads a template from a file or a string""" + if template_string is not None: + return Template(template_string, **self.tmpl_options) + # Translate TG dot notation to normal / template path + if '/' not in templatename: + templatename = '/' + templatename.replace('.', '/') + '.' +\ + self.extension + + # Lookup template + return self.lookup.get_template(templatename) + + def render(self, info, format="html", fragment=False, template=None): + if isinstance(template, compat.string_types): + template = self.load_template(template) + + # Load extra vars func if provided + if self.extra_vars_func: + info.update(self.extra_vars_func()) + + return template.render(**info) diff --git a/server/www/packages/packages-common/mako/filters.py b/server/www/packages/packages-common/mako/filters.py new file mode 100644 index 0000000..525aeb8 --- /dev/null +++ b/server/www/packages/packages-common/mako/filters.py @@ -0,0 +1,209 @@ +# mako/filters.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import re +import codecs + +from mako.compat import quote_plus, unquote_plus, codepoint2name, \ + name2codepoint + +from mako import compat + +xml_escapes = { + '&': '&', + '>': '>', + '<': '<', + '"': '"', # also " in html-only + "'": ''' # also ' in html-only +} + +# XXX: " is valid in HTML and XML +# ' is not valid HTML, but is valid XML + + +def legacy_html_escape(s): + """legacy HTML escape for non-unicode mode.""" + s = s.replace("&", "&") + s = s.replace(">", ">") + s = s.replace("<", "<") + s = s.replace('"', """) + s = s.replace("'", "'") + return s + + +try: + import markupsafe + html_escape = markupsafe.escape +except ImportError: + html_escape = legacy_html_escape + + +def xml_escape(string): + return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string) + + +def url_escape(string): + # convert into a list of octets + string = string.encode("utf8") + return quote_plus(string) + + +def legacy_url_escape(string): + # convert into a list of octets + return quote_plus(string) + + +def url_unescape(string): + text = unquote_plus(string) + if not is_ascii_str(text): + text = text.decode("utf8") + return text + + +def trim(string): + return string.strip() + + +class Decode(object): + + def __getattr__(self, key): + def decode(x): + if isinstance(x, compat.text_type): + return x + elif not isinstance(x, compat.binary_type): + return decode(str(x)) + else: + return compat.text_type(x, encoding=key) + return decode +decode = Decode() + + +_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z') + + +def is_ascii_str(text): + return isinstance(text, str) and _ASCII_re.match(text) + +################################################################ + + +class XMLEntityEscaper(object): + + def __init__(self, codepoint2name, name2codepoint): + self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n)) + for c, n in codepoint2name.items()]) + self.name2codepoint = name2codepoint + + def escape_entities(self, text): + """Replace characters with their character entity references. + + Only characters corresponding to a named entity are replaced. + """ + return compat.text_type(text).translate(self.codepoint2entity) + + def __escape(self, m): + codepoint = ord(m.group()) + try: + return self.codepoint2entity[codepoint] + except (KeyError, IndexError): + return '&#x%X;' % codepoint + + __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') + + def escape(self, text): + """Replace characters with their character references. + + Replace characters by their named entity references. + Non-ASCII characters, if they do not have a named entity reference, + are replaced by numerical character references. + + The return value is guaranteed to be ASCII. + """ + return self.__escapable.sub(self.__escape, compat.text_type(text) + ).encode('ascii') + + # XXX: This regexp will not match all valid XML entity names__. + # (It punts on details involving involving CombiningChars and Extenders.) + # + # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef + __characterrefs = re.compile(r'''& (?: + \#(\d+) + | \#x([\da-f]+) + | ( (?!\d) [:\w] [-.:\w]+ ) + ) ;''', + re.X | re.UNICODE) + + def __unescape(self, m): + dval, hval, name = m.groups() + if dval: + codepoint = int(dval) + elif hval: + codepoint = int(hval, 16) + else: + codepoint = self.name2codepoint.get(name, 0xfffd) + # U+FFFD = "REPLACEMENT CHARACTER" + if codepoint < 128: + return chr(codepoint) + return chr(codepoint) + + def unescape(self, text): + """Unescape character references. + + All character references (both entity references and numerical + character references) are unescaped. + """ + return self.__characterrefs.sub(self.__unescape, text) + + +_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint) + +html_entities_escape = _html_entities_escaper.escape_entities +html_entities_unescape = _html_entities_escaper.unescape + + +def htmlentityreplace_errors(ex): + """An encoding error handler. + + This python `codecs`_ error handler replaces unencodable + characters with HTML entities, or, if no HTML entity exists for + the character, XML character references. + + >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + 'The cost was €12.' + """ + if isinstance(ex, UnicodeEncodeError): + # Handle encoding errors + bad_text = ex.object[ex.start:ex.end] + text = _html_entities_escaper.escape(bad_text) + return (compat.text_type(text), ex.end) + raise ex + +codecs.register_error('htmlentityreplace', htmlentityreplace_errors) + + +# TODO: options to make this dynamic per-compilation will be added in a later +# release +DEFAULT_ESCAPES = { + 'x': 'filters.xml_escape', + 'h': 'filters.html_escape', + 'u': 'filters.url_escape', + 'trim': 'filters.trim', + 'entity': 'filters.html_entities_escape', + 'unicode': 'unicode', + 'decode': 'decode', + 'str': 'str', + 'n': 'n' +} + +if compat.py3k: + DEFAULT_ESCAPES.update({ + 'unicode': 'str' + }) + +NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy() +NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape' +NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape' diff --git a/server/www/packages/packages-common/mako/lexer.py b/server/www/packages/packages-common/mako/lexer.py new file mode 100644 index 0000000..2fa08e4 --- /dev/null +++ b/server/www/packages/packages-common/mako/lexer.py @@ -0,0 +1,443 @@ +# mako/lexer.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides the Lexer class for parsing template strings into parse trees.""" + +import re +import codecs +from mako import parsetree, exceptions, compat +from mako.pygen import adjust_whitespace + +_regexp_cache = {} + + +class Lexer(object): + + def __init__(self, text, filename=None, + disable_unicode=False, + input_encoding=None, preprocessor=None): + self.text = text + self.filename = filename + self.template = parsetree.TemplateNode(self.filename) + self.matched_lineno = 1 + self.matched_charpos = 0 + self.lineno = 1 + self.match_position = 0 + self.tag = [] + self.control_line = [] + self.ternary_stack = [] + self.disable_unicode = disable_unicode + self.encoding = input_encoding + + if compat.py3k and disable_unicode: + raise exceptions.UnsupportedError( + "Mako for Python 3 does not " + "support disabling Unicode") + + if preprocessor is None: + self.preprocessor = [] + elif not hasattr(preprocessor, '__iter__'): + self.preprocessor = [preprocessor] + else: + self.preprocessor = preprocessor + + @property + def exception_kwargs(self): + return {'source': self.text, + 'lineno': self.matched_lineno, + 'pos': self.matched_charpos, + 'filename': self.filename} + + def match(self, regexp, flags=None): + """compile the given regexp, cache the reg, and call match_reg().""" + + try: + reg = _regexp_cache[(regexp, flags)] + except KeyError: + if flags: + reg = re.compile(regexp, flags) + else: + reg = re.compile(regexp) + _regexp_cache[(regexp, flags)] = reg + + return self.match_reg(reg) + + def match_reg(self, reg): + """match the given regular expression object to the current text + position. + + if a match occurs, update the current text and line position. + + """ + + mp = self.match_position + + match = reg.match(self.text, self.match_position) + if match: + (start, end) = match.span() + if end == start: + self.match_position = end + 1 + else: + self.match_position = end + self.matched_lineno = self.lineno + lines = re.findall(r"\n", self.text[mp:self.match_position]) + cp = mp - 1 + while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'): + cp -= 1 + self.matched_charpos = mp - cp + self.lineno += len(lines) + # print "MATCHED:", match.group(0), "LINE START:", + # self.matched_lineno, "LINE END:", self.lineno + # print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \ + # (match and "TRUE" or "FALSE") + return match + + def parse_until_text(self, *text): + startpos = self.match_position + text_re = r'|'.join(text) + brace_level = 0 + while True: + match = self.match(r'#.*\n') + if match: + continue + match = self.match(r'(\"\"\"|\'\'\'|\"|\')((? 0: + brace_level -= 1 + continue + return \ + self.text[startpos: + self.match_position - len(match.group(1))],\ + match.group(1) + match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) + if match: + brace_level += match.group(1).count('{') + brace_level -= match.group(1).count('}') + continue + raise exceptions.SyntaxException( + "Expected: %s" % + ','.join(text), + **self.exception_kwargs) + + def append_node(self, nodecls, *args, **kwargs): + kwargs.setdefault('source', self.text) + kwargs.setdefault('lineno', self.matched_lineno) + kwargs.setdefault('pos', self.matched_charpos) + kwargs['filename'] = self.filename + node = nodecls(*args, **kwargs) + if len(self.tag): + self.tag[-1].nodes.append(node) + else: + self.template.nodes.append(node) + # build a set of child nodes for the control line + # (used for loop variable detection) + # also build a set of child nodes on ternary control lines + # (used for determining if a pass needs to be auto-inserted + if self.control_line: + control_frame = self.control_line[-1] + control_frame.nodes.append(node) + if not (isinstance(node, parsetree.ControlLine) and + control_frame.is_ternary(node.keyword)): + if self.ternary_stack and self.ternary_stack[-1]: + self.ternary_stack[-1][-1].nodes.append(node) + if isinstance(node, parsetree.Tag): + if len(self.tag): + node.parent = self.tag[-1] + self.tag.append(node) + elif isinstance(node, parsetree.ControlLine): + if node.isend: + self.control_line.pop() + self.ternary_stack.pop() + elif node.is_primary: + self.control_line.append(node) + self.ternary_stack.append([]) + elif self.control_line and \ + self.control_line[-1].is_ternary(node.keyword): + self.ternary_stack[-1].append(node) + elif self.control_line and \ + not self.control_line[-1].is_ternary(node.keyword): + raise exceptions.SyntaxException( + "Keyword '%s' not a legal ternary for keyword '%s'" % + (node.keyword, self.control_line[-1].keyword), + **self.exception_kwargs) + + _coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n') + + def decode_raw_stream(self, text, decode_raw, known_encoding, filename): + """given string/unicode or bytes/string, determine encoding + from magic encoding comment, return body as unicode + or raw if decode_raw=False + + """ + if isinstance(text, compat.text_type): + m = self._coding_re.match(text) + encoding = m and m.group(1) or known_encoding or 'ascii' + return encoding, text + + if text.startswith(codecs.BOM_UTF8): + text = text[len(codecs.BOM_UTF8):] + parsed_encoding = 'utf-8' + m = self._coding_re.match(text.decode('utf-8', 'ignore')) + if m is not None and m.group(1) != 'utf-8': + raise exceptions.CompileException( + "Found utf-8 BOM in file, with conflicting " + "magic encoding comment of '%s'" % m.group(1), + text.decode('utf-8', 'ignore'), + 0, 0, filename) + else: + m = self._coding_re.match(text.decode('utf-8', 'ignore')) + if m: + parsed_encoding = m.group(1) + else: + parsed_encoding = known_encoding or 'ascii' + + if decode_raw: + try: + text = text.decode(parsed_encoding) + except UnicodeDecodeError: + raise exceptions.CompileException( + "Unicode decode operation of encoding '%s' failed" % + parsed_encoding, + text.decode('utf-8', 'ignore'), + 0, 0, filename) + + return parsed_encoding, text + + def parse(self): + self.encoding, self.text = self.decode_raw_stream( + self.text, + not self.disable_unicode, + self.encoding, + self.filename) + + for preproc in self.preprocessor: + self.text = preproc(self.text) + + # push the match marker past the + # encoding comment. + self.match_reg(self._coding_re) + + self.textlength = len(self.text) + + while (True): + if self.match_position > self.textlength: + break + + if self.match_end(): + break + if self.match_expression(): + continue + if self.match_control_line(): + continue + if self.match_comment(): + continue + if self.match_tag_start(): + continue + if self.match_tag_end(): + continue + if self.match_python_block(): + continue + if self.match_text(): + continue + + if self.match_position > self.textlength: + break + raise exceptions.CompileException("assertion failed") + + if len(self.tag): + raise exceptions.SyntaxException("Unclosed tag: <%%%s>" % + self.tag[-1].keyword, + **self.exception_kwargs) + if len(self.control_line): + raise exceptions.SyntaxException( + "Unterminated control keyword: '%s'" % + self.control_line[-1].keyword, + self.text, + self.control_line[-1].lineno, + self.control_line[-1].pos, self.filename) + return self.template + + def match_tag_start(self): + match = self.match(r''' + \<% # opening tag + + ([\w\.\:]+) # keyword + + ((?:\s+\w+|\s*=\s*|".*?"|'.*?')*) # attrname, = \ + # sign, string expression + + \s* # more whitespace + + (/)?> # closing + + ''', + + re.I | re.S | re.X) + + if match: + keyword, attr, isend = match.groups() + self.keyword = keyword + attributes = {} + if attr: + for att in re.findall( + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr): + key, val1, val2 = att + text = val1 or val2 + text = text.replace('\r\n', '\n') + attributes[key] = text + self.append_node(parsetree.Tag, keyword, attributes) + if isend: + self.tag.pop() + else: + if keyword == 'text': + match = self.match(r'(.*?)(?=\)', re.S) + if not match: + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % + self.tag[-1].keyword, + **self.exception_kwargs) + self.append_node(parsetree.Text, match.group(1)) + return self.match_tag_end() + return True + else: + return False + + def match_tag_end(self): + match = self.match(r'\') + if match: + if not len(self.tag): + raise exceptions.SyntaxException( + "Closing tag without opening tag: " % + match.group(1), + **self.exception_kwargs) + elif self.tag[-1].keyword != match.group(1): + raise exceptions.SyntaxException( + "Closing tag does not match tag: <%%%s>" % + (match.group(1), self.tag[-1].keyword), + **self.exception_kwargs) + self.tag.pop() + return True + else: + return False + + def match_end(self): + match = self.match(r'\Z', re.S) + if match: + string = match.group() + if string: + return string + else: + return True + else: + return False + + def match_text(self): + match = self.match(r""" + (.*?) # anything, followed by: + ( + (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based + # comment preceded by a + # consumed newline and whitespace + | + (?=\${) # an expression + | + (?=') + # the trailing newline helps + # compiler.parse() not complain about indentation + text = adjust_whitespace(text) + "\n" + self.append_node( + parsetree.Code, + text, + match.group(1) == '!', lineno=line, pos=pos) + return True + else: + return False + + def match_expression(self): + match = self.match(r"\${") + if match: + line, pos = self.matched_lineno, self.matched_charpos + text, end = self.parse_until_text(r'\|', r'}') + if end == '|': + escapes, end = self.parse_until_text(r'}') + else: + escapes = "" + text = text.replace('\r\n', '\n') + self.append_node( + parsetree.Expression, + text, escapes.strip(), + lineno=line, pos=pos) + return True + else: + return False + + def match_control_line(self): + match = self.match( + r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)" + r"(?:\r?\n|\Z)", re.M) + if match: + operator = match.group(1) + text = match.group(2) + if operator == '%': + m2 = re.match(r'(end)?(\w+)\s*(.*)', text) + if not m2: + raise exceptions.SyntaxException( + "Invalid control line: '%s'" % + text, + **self.exception_kwargs) + isend, keyword = m2.group(1, 2) + isend = (isend is not None) + + if isend: + if not len(self.control_line): + raise exceptions.SyntaxException( + "No starting keyword '%s' for '%s'" % + (keyword, text), + **self.exception_kwargs) + elif self.control_line[-1].keyword != keyword: + raise exceptions.SyntaxException( + "Keyword '%s' doesn't match keyword '%s'" % + (text, self.control_line[-1].keyword), + **self.exception_kwargs) + self.append_node(parsetree.ControlLine, keyword, isend, text) + else: + self.append_node(parsetree.Comment, text) + return True + else: + return False + + def match_comment(self): + """matches the multiline version of a comment""" + match = self.match(r"<%doc>(.*?)", re.S) + if match: + self.append_node(parsetree.Comment, match.group(1)) + return True + else: + return False diff --git a/server/www/packages/packages-common/mako/lookup.py b/server/www/packages/packages-common/mako/lookup.py new file mode 100644 index 0000000..5cfd16c --- /dev/null +++ b/server/www/packages/packages-common/mako/lookup.py @@ -0,0 +1,367 @@ +# mako/lookup.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import os +import stat +import posixpath +import re +from mako import exceptions, util +from mako.template import Template + +try: + import threading +except: + import dummy_threading as threading + + +class TemplateCollection(object): + + """Represent a collection of :class:`.Template` objects, + identifiable via URI. + + A :class:`.TemplateCollection` is linked to the usage of + all template tags that address other templates, such + as ``<%include>``, ``<%namespace>``, and ``<%inherit>``. + The ``file`` attribute of each of those tags refers + to a string URI that is passed to that :class:`.Template` + object's :class:`.TemplateCollection` for resolution. + + :class:`.TemplateCollection` is an abstract class, + with the usual default implementation being :class:`.TemplateLookup`. + + """ + + def has_template(self, uri): + """Return ``True`` if this :class:`.TemplateLookup` is + capable of returning a :class:`.Template` object for the + given ``uri``. + + :param uri: String URI of the template to be resolved. + + """ + try: + self.get_template(uri) + return True + except exceptions.TemplateLookupException: + return False + + def get_template(self, uri, relativeto=None): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + The default implementation raises + :class:`.NotImplementedError`. Implementations should + raise :class:`.TemplateLookupException` if the given ``uri`` + cannot be resolved. + + :param uri: String URI of the template to be resolved. + :param relativeto: if present, the given ``uri`` is assumed to + be relative to this URI. + + """ + raise NotImplementedError() + + def filename_to_uri(self, uri, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + return uri + + def adjust_uri(self, uri, filename): + """Adjust the given ``uri`` based on the calling ``filename``. + + When this method is called from the runtime, the + ``filename`` parameter is taken directly to the ``filename`` + attribute of the calling template. Therefore a custom + :class:`.TemplateCollection` subclass can place any string + identifier desired in the ``filename`` parameter of the + :class:`.Template` objects it constructs and have them come back + here. + + """ + return uri + + +class TemplateLookup(TemplateCollection): + + """Represent a collection of templates that locates template source files + from the local filesystem. + + The primary argument is the ``directories`` argument, the list of + directories to search: + + .. sourcecode:: python + + lookup = TemplateLookup(["/path/to/templates"]) + some_template = lookup.get_template("/admin_index.mako") + + The :class:`.TemplateLookup` can also be given :class:`.Template` objects + programatically using :meth:`.put_string` or :meth:`.put_template`: + + .. sourcecode:: python + + lookup = TemplateLookup() + lookup.put_string("base.html", ''' + ${self.next()} + ''') + lookup.put_string("hello.html", ''' + <%include file='base.html'/> + + Hello, world ! + ''') + + + :param directories: A list of directory names which will be + searched for a particular template URI. The URI is appended + to each directory and the filesystem checked. + + :param collection_size: Approximate size of the collection used + to store templates. If left at its default of ``-1``, the size + is unbounded, and a plain Python dictionary is used to + relate URI strings to :class:`.Template` instances. + Otherwise, a least-recently-used cache object is used which + will maintain the size of the collection approximately to + the number given. + + :param filesystem_checks: When at its default value of ``True``, + each call to :meth:`.TemplateLookup.get_template()` will + compare the filesystem last modified time to the time in + which an existing :class:`.Template` object was created. + This allows the :class:`.TemplateLookup` to regenerate a + new :class:`.Template` whenever the original source has + been updated. Set this to ``False`` for a very minor + performance increase. + + :param modulename_callable: A callable which, when present, + is passed the path of the source file as well as the + requested URI, and then returns the full path of the + generated Python module file. This is used to inject + alternate schemes for Python module location. If left at + its default of ``None``, the built in system of generation + based on ``module_directory`` plus ``uri`` is used. + + All other keyword parameters available for + :class:`.Template` are mirrored here. When new + :class:`.Template` objects are created, the keywords + established with this :class:`.TemplateLookup` are passed on + to each new :class:`.Template`. + + """ + + def __init__(self, + directories=None, + module_directory=None, + filesystem_checks=True, + collection_size=-1, + format_exceptions=False, + error_handler=None, + disable_unicode=False, + bytestring_passthrough=False, + output_encoding=None, + encoding_errors='strict', + + cache_args=None, + cache_impl='beaker', + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + + modulename_callable=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + input_encoding=None, + preprocessor=None, + lexer_cls=None): + + self.directories = [posixpath.normpath(d) for d in + util.to_list(directories, ()) + ] + self.module_directory = module_directory + self.modulename_callable = modulename_callable + self.filesystem_checks = filesystem_checks + self.collection_size = collection_size + + if cache_args is None: + cache_args = {} + # transfer deprecated cache_* args + if cache_dir: + cache_args.setdefault('dir', cache_dir) + if cache_url: + cache_args.setdefault('url', cache_url) + if cache_type: + cache_args.setdefault('type', cache_type) + + self.template_args = { + 'format_exceptions': format_exceptions, + 'error_handler': error_handler, + 'disable_unicode': disable_unicode, + 'bytestring_passthrough': bytestring_passthrough, + 'output_encoding': output_encoding, + 'cache_impl': cache_impl, + 'encoding_errors': encoding_errors, + 'input_encoding': input_encoding, + 'module_directory': module_directory, + 'module_writer': module_writer, + 'cache_args': cache_args, + 'cache_enabled': cache_enabled, + 'default_filters': default_filters, + 'buffer_filters': buffer_filters, + 'strict_undefined': strict_undefined, + 'imports': imports, + 'future_imports': future_imports, + 'enable_loop': enable_loop, + 'preprocessor': preprocessor, + 'lexer_cls': lexer_cls + } + + if collection_size == -1: + self._collection = {} + self._uri_cache = {} + else: + self._collection = util.LRUCache(collection_size) + self._uri_cache = util.LRUCache(collection_size) + self._mutex = threading.Lock() + + def get_template(self, uri): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + .. note:: The ``relativeto`` argument is not supported here at + the moment. + + """ + + try: + if self.filesystem_checks: + return self._check(uri, self._collection[uri]) + else: + return self._collection[uri] + except KeyError: + u = re.sub(r'^\/+', '', uri) + for dir in self.directories: + # make sure the path seperators are posix - os.altsep is empty + # on POSIX and cannot be used. + dir = dir.replace(os.path.sep, posixpath.sep) + srcfile = posixpath.normpath(posixpath.join(dir, u)) + if os.path.isfile(srcfile): + return self._load(srcfile, uri) + else: + raise exceptions.TopLevelLookupException( + "Cant locate template for uri %r" % uri) + + def adjust_uri(self, uri, relativeto): + """Adjust the given ``uri`` based on the given relative URI.""" + + key = (uri, relativeto) + if key in self._uri_cache: + return self._uri_cache[key] + + if uri[0] != '/': + if relativeto is not None: + v = self._uri_cache[key] = posixpath.join( + posixpath.dirname(relativeto), uri) + else: + v = self._uri_cache[key] = '/' + uri + else: + v = self._uri_cache[key] = uri + return v + + def filename_to_uri(self, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + try: + return self._uri_cache[filename] + except KeyError: + value = self._relativeize(filename) + self._uri_cache[filename] = value + return value + + def _relativeize(self, filename): + """Return the portion of a filename that is 'relative' + to the directories in this lookup. + + """ + + filename = posixpath.normpath(filename) + for dir in self.directories: + if filename[0:len(dir)] == dir: + return filename[len(dir):] + else: + return None + + def _load(self, filename, uri): + self._mutex.acquire() + try: + try: + # try returning from collection one + # more time in case concurrent thread already loaded + return self._collection[uri] + except KeyError: + pass + try: + if self.modulename_callable is not None: + module_filename = self.modulename_callable(filename, uri) + else: + module_filename = None + self._collection[uri] = template = Template( + uri=uri, + filename=posixpath.normpath(filename), + lookup=self, + module_filename=module_filename, + **self.template_args) + return template + except: + # if compilation fails etc, ensure + # template is removed from collection, + # re-raise + self._collection.pop(uri, None) + raise + finally: + self._mutex.release() + + def _check(self, uri, template): + if template.filename is None: + return template + + try: + template_stat = os.stat(template.filename) + if template.module._modified_time < \ + template_stat[stat.ST_MTIME]: + self._collection.pop(uri, None) + return self._load(template.filename, uri) + else: + return template + except OSError: + self._collection.pop(uri, None) + raise exceptions.TemplateLookupException( + "Cant locate template for uri %r" % uri) + + def put_string(self, uri, text): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given string of + ``text``. + + """ + self._collection[uri] = Template( + text, + lookup=self, + uri=uri, + **self.template_args) + + def put_template(self, uri, template): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given + :class:`.Template` object. + + """ + self._collection[uri] = template diff --git a/server/www/packages/packages-common/mako/parsetree.py b/server/www/packages/packages-common/mako/parsetree.py new file mode 100644 index 0000000..e7af4bc --- /dev/null +++ b/server/www/packages/packages-common/mako/parsetree.py @@ -0,0 +1,616 @@ +# mako/parsetree.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""defines the parse tree components for Mako templates.""" + +from mako import exceptions, ast, util, filters, compat +import re + + +class Node(object): + + """base class for a Node in the parse tree.""" + + def __init__(self, source, lineno, pos, filename): + self.source = source + self.lineno = lineno + self.pos = pos + self.filename = filename + + @property + def exception_kwargs(self): + return {'source': self.source, 'lineno': self.lineno, + 'pos': self.pos, 'filename': self.filename} + + def get_children(self): + return [] + + def accept_visitor(self, visitor): + def traverse(node): + for n in node.get_children(): + n.accept_visitor(visitor) + + method = getattr(visitor, "visit" + self.__class__.__name__, traverse) + method(self) + + +class TemplateNode(Node): + + """a 'container' node that stores the overall collection of nodes.""" + + def __init__(self, filename): + super(TemplateNode, self).__init__('', 0, 0, filename) + self.nodes = [] + self.page_attributes = {} + + def get_children(self): + return self.nodes + + def __repr__(self): + return "TemplateNode(%s, %r)" % ( + util.sorted_dict_repr(self.page_attributes), + self.nodes) + + +class ControlLine(Node): + + """defines a control line, a line-oriented python line or end tag. + + e.g.:: + + % if foo: + (markup) + % endif + + """ + + has_loop_context = False + + def __init__(self, keyword, isend, text, **kwargs): + super(ControlLine, self).__init__(**kwargs) + self.text = text + self.keyword = keyword + self.isend = isend + self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with'] + self.nodes = [] + if self.isend: + self._declared_identifiers = [] + self._undeclared_identifiers = [] + else: + code = ast.PythonFragment(text, **self.exception_kwargs) + self._declared_identifiers = code.declared_identifiers + self._undeclared_identifiers = code.undeclared_identifiers + + def get_children(self): + return self.nodes + + def declared_identifiers(self): + return self._declared_identifiers + + def undeclared_identifiers(self): + return self._undeclared_identifiers + + def is_ternary(self, keyword): + """return true if the given keyword is a ternary keyword + for this ControlLine""" + + return keyword in { + 'if': set(['else', 'elif']), + 'try': set(['except', 'finally']), + 'for': set(['else']) + }.get(self.keyword, []) + + def __repr__(self): + return "ControlLine(%r, %r, %r, %r)" % ( + self.keyword, + self.text, + self.isend, + (self.lineno, self.pos) + ) + + +class Text(Node): + + """defines plain text in the template.""" + + def __init__(self, content, **kwargs): + super(Text, self).__init__(**kwargs) + self.content = content + + def __repr__(self): + return "Text(%r, %r)" % (self.content, (self.lineno, self.pos)) + + +class Code(Node): + + """defines a Python code block, either inline or module level. + + e.g.:: + + inline: + <% + x = 12 + %> + + module level: + <%! + import logger + %> + + """ + + def __init__(self, text, ismodule, **kwargs): + super(Code, self).__init__(**kwargs) + self.text = text + self.ismodule = ismodule + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return self.code.declared_identifiers + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers + + def __repr__(self): + return "Code(%r, %r, %r)" % ( + self.text, + self.ismodule, + (self.lineno, self.pos) + ) + + +class Comment(Node): + + """defines a comment line. + + # this is a comment + + """ + + def __init__(self, text, **kwargs): + super(Comment, self).__init__(**kwargs) + self.text = text + + def __repr__(self): + return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos)) + + +class Expression(Node): + + """defines an inline expression. + + ${x+y} + + """ + + def __init__(self, text, escapes, **kwargs): + super(Expression, self).__init__(**kwargs) + self.text = text + self.escapes = escapes + self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs) + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + # TODO: make the "filter" shortcut list configurable at parse/gen time + return self.code.undeclared_identifiers.union( + self.escapes_code.undeclared_identifiers.difference( + set(filters.DEFAULT_ESCAPES.keys()) + ) + ).difference(self.code.declared_identifiers) + + def __repr__(self): + return "Expression(%r, %r, %r)" % ( + self.text, + self.escapes_code.args, + (self.lineno, self.pos) + ) + + +class _TagMeta(type): + + """metaclass to allow Tag to produce a subclass according to + its keyword""" + + _classmap = {} + + def __init__(cls, clsname, bases, dict): + if getattr(cls, '__keyword__', None) is not None: + cls._classmap[cls.__keyword__] = cls + super(_TagMeta, cls).__init__(clsname, bases, dict) + + def __call__(cls, keyword, attributes, **kwargs): + if ":" in keyword: + ns, defname = keyword.split(':') + return type.__call__(CallNamespaceTag, ns, defname, + attributes, **kwargs) + + try: + cls = _TagMeta._classmap[keyword] + except KeyError: + raise exceptions.CompileException( + "No such tag: '%s'" % keyword, + source=kwargs['source'], + lineno=kwargs['lineno'], + pos=kwargs['pos'], + filename=kwargs['filename'] + ) + return type.__call__(cls, keyword, attributes, **kwargs) + + +class Tag(compat.with_metaclass(_TagMeta, Node)): + + """abstract base class for tags. + + <%sometag/> + + <%someothertag> + stuff + + + """ + __keyword__ = None + + def __init__(self, keyword, attributes, expressions, + nonexpressions, required, **kwargs): + """construct a new Tag instance. + + this constructor not called directly, and is only called + by subclasses. + + :param keyword: the tag keyword + + :param attributes: raw dictionary of attribute key/value pairs + + :param expressions: a set of identifiers that are legal attributes, + which can also contain embedded expressions + + :param nonexpressions: a set of identifiers that are legal + attributes, which cannot contain embedded expressions + + :param \**kwargs: + other arguments passed to the Node superclass (lineno, pos) + + """ + super(Tag, self).__init__(**kwargs) + self.keyword = keyword + self.attributes = attributes + self._parse_attributes(expressions, nonexpressions) + missing = [r for r in required if r not in self.parsed_attributes] + if len(missing): + raise exceptions.CompileException( + "Missing attribute(s): %s" % + ",".join([repr(m) for m in missing]), + **self.exception_kwargs) + self.parent = None + self.nodes = [] + + def is_root(self): + return self.parent is None + + def get_children(self): + return self.nodes + + def _parse_attributes(self, expressions, nonexpressions): + undeclared_identifiers = set() + self.parsed_attributes = {} + for key in self.attributes: + if key in expressions: + expr = [] + for x in re.compile(r'(\${.+?})', + re.S).split(self.attributes[key]): + m = re.compile(r'^\${(.+?)}$', re.S).match(x) + if m: + code = ast.PythonCode(m.group(1).rstrip(), + **self.exception_kwargs) + # we aren't discarding "declared_identifiers" here, + # which we do so that list comprehension-declared + # variables aren't counted. As yet can't find a + # condition that requires it here. + undeclared_identifiers = \ + undeclared_identifiers.union( + code.undeclared_identifiers) + expr.append('(%s)' % m.group(1)) + else: + if x: + expr.append(repr(x)) + self.parsed_attributes[key] = " + ".join(expr) or repr('') + elif key in nonexpressions: + if re.search(r'\${.+?}', self.attributes[key]): + raise exceptions.CompileException( + "Attibute '%s' in tag '%s' does not allow embedded " + "expressions" % (key, self.keyword), + **self.exception_kwargs) + self.parsed_attributes[key] = repr(self.attributes[key]) + else: + raise exceptions.CompileException( + "Invalid attribute for tag '%s': '%s'" % + (self.keyword, key), + **self.exception_kwargs) + self.expression_undeclared_identifiers = undeclared_identifiers + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + return self.expression_undeclared_identifiers + + def __repr__(self): + return "%s(%r, %s, %r, %r)" % (self.__class__.__name__, + self.keyword, + util.sorted_dict_repr(self.attributes), + (self.lineno, self.pos), + self.nodes + ) + + +class IncludeTag(Tag): + __keyword__ = 'include' + + def __init__(self, keyword, attributes, **kwargs): + super(IncludeTag, self).__init__( + keyword, + attributes, + ('file', 'import', 'args'), + (), ('file',), **kwargs) + self.page_args = ast.PythonCode( + "__DUMMY(%s)" % attributes.get('args', ''), + **self.exception_kwargs) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + identifiers = self.page_args.undeclared_identifiers.\ + difference(set(["__DUMMY"])).\ + difference(self.page_args.declared_identifiers) + return identifiers.union(super(IncludeTag, self). + undeclared_identifiers()) + + +class NamespaceTag(Tag): + __keyword__ = 'namespace' + + def __init__(self, keyword, attributes, **kwargs): + super(NamespaceTag, self).__init__( + keyword, attributes, + ('file',), + ('name', 'inheritable', + 'import', 'module'), + (), **kwargs) + + self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self)))) + if 'name' not in attributes and 'import' not in attributes: + raise exceptions.CompileException( + "'name' and/or 'import' attributes are required " + "for <%namespace>", + **self.exception_kwargs) + if 'file' in attributes and 'module' in attributes: + raise exceptions.CompileException( + "<%namespace> may only have one of 'file' or 'module'", + **self.exception_kwargs + ) + + def declared_identifiers(self): + return [] + + +class TextTag(Tag): + __keyword__ = 'text' + + def __init__(self, keyword, attributes, **kwargs): + super(TextTag, self).__init__( + keyword, + attributes, (), + ('filter'), (), **kwargs) + self.filter_args = ast.ArgumentList( + attributes.get('filter', ''), + **self.exception_kwargs) + + def undeclared_identifiers(self): + return self.filter_args.\ + undeclared_identifiers.\ + difference(filters.DEFAULT_ESCAPES.keys()).union( + self.expression_undeclared_identifiers + ) + + +class DefTag(Tag): + __keyword__ = 'def' + + def __init__(self, keyword, attributes, **kwargs): + expressions = ['buffered', 'cached'] + [ + c for c in attributes if c.startswith('cache_')] + + super(DefTag, self).__init__( + keyword, + attributes, + expressions, + ('name', 'filter', 'decorator'), + ('name',), + **kwargs) + name = attributes['name'] + if re.match(r'^[\w_]+$', name): + raise exceptions.CompileException( + "Missing parenthesis in %def", + **self.exception_kwargs) + self.function_decl = ast.FunctionDecl("def " + name + ":pass", + **self.exception_kwargs) + self.name = self.function_decl.funcname + self.decorator = attributes.get('decorator', '') + self.filter_args = ast.ArgumentList( + attributes.get('filter', ''), + **self.exception_kwargs) + + is_anonymous = False + is_block = False + + @property + def funcname(self): + return self.function_decl.funcname + + def get_argument_expressions(self, **kw): + return self.function_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.function_decl.allargnames + + def undeclared_identifiers(self): + res = [] + for c in self.function_decl.defaults: + res += list(ast.PythonCode(c, **self.exception_kwargs). + undeclared_identifiers) + return set(res).union( + self.filter_args. + undeclared_identifiers. + difference(filters.DEFAULT_ESCAPES.keys()) + ).union( + self.expression_undeclared_identifiers + ).difference( + self.function_decl.allargnames + ) + + +class BlockTag(Tag): + __keyword__ = 'block' + + def __init__(self, keyword, attributes, **kwargs): + expressions = ['buffered', 'cached', 'args'] + [ + c for c in attributes if c.startswith('cache_')] + + super(BlockTag, self).__init__( + keyword, + attributes, + expressions, + ('name', 'filter', 'decorator'), + (), + **kwargs) + name = attributes.get('name') + if name and not re.match(r'^[\w_]+$', name): + raise exceptions.CompileException( + "%block may not specify an argument signature", + **self.exception_kwargs) + if not name and attributes.get('args', None): + raise exceptions.CompileException( + "Only named %blocks may specify args", + **self.exception_kwargs + ) + self.body_decl = ast.FunctionArgs(attributes.get('args', ''), + **self.exception_kwargs) + + self.name = name + self.decorator = attributes.get('decorator', '') + self.filter_args = ast.ArgumentList( + attributes.get('filter', ''), + **self.exception_kwargs) + + is_block = True + + @property + def is_anonymous(self): + return self.name is None + + @property + def funcname(self): + return self.name or "__M_anon_%d" % (self.lineno, ) + + def get_argument_expressions(self, **kw): + return self.body_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.body_decl.allargnames + + def undeclared_identifiers(self): + return (self.filter_args. + undeclared_identifiers. + difference(filters.DEFAULT_ESCAPES.keys()) + ).union(self.expression_undeclared_identifiers) + + +class CallTag(Tag): + __keyword__ = 'call' + + def __init__(self, keyword, attributes, **kwargs): + super(CallTag, self).__init__(keyword, attributes, + ('args'), ('expr',), ('expr',), **kwargs) + self.expression = attributes['expr'] + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs(attributes.get('args', ''), + **self.exception_kwargs) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.\ + difference(self.code.declared_identifiers) + + +class CallNamespaceTag(Tag): + + def __init__(self, namespace, defname, attributes, **kwargs): + super(CallNamespaceTag, self).__init__( + namespace + ":" + defname, + attributes, + tuple(attributes.keys()) + ('args', ), + (), + (), + **kwargs) + + self.expression = "%s.%s(%s)" % ( + namespace, + defname, + ",".join(["%s=%s" % (k, v) for k, v in + self.parsed_attributes.items() + if k != 'args']) + ) + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get('args', ''), + **self.exception_kwargs) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.\ + difference(self.code.declared_identifiers) + + +class InheritTag(Tag): + __keyword__ = 'inherit' + + def __init__(self, keyword, attributes, **kwargs): + super(InheritTag, self).__init__( + keyword, attributes, + ('file',), (), ('file',), **kwargs) + + +class PageTag(Tag): + __keyword__ = 'page' + + def __init__(self, keyword, attributes, **kwargs): + expressions = \ + ['cached', 'args', 'expression_filter', 'enable_loop'] + \ + [c for c in attributes if c.startswith('cache_')] + + super(PageTag, self).__init__( + keyword, + attributes, + expressions, + (), + (), + **kwargs) + self.body_decl = ast.FunctionArgs(attributes.get('args', ''), + **self.exception_kwargs) + self.filter_args = ast.ArgumentList( + attributes.get('expression_filter', ''), + **self.exception_kwargs) + + def declared_identifiers(self): + return self.body_decl.allargnames diff --git a/server/www/packages/packages-common/mako/pygen.py b/server/www/packages/packages-common/mako/pygen.py new file mode 100644 index 0000000..5d87bbd --- /dev/null +++ b/server/www/packages/packages-common/mako/pygen.py @@ -0,0 +1,303 @@ +# mako/pygen.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for generating and formatting literal Python code.""" + +import re +from mako import exceptions + + +class PythonPrinter(object): + + def __init__(self, stream): + # indentation counter + self.indent = 0 + + # a stack storing information about why we incremented + # the indentation counter, to help us determine if we + # should decrement it + self.indent_detail = [] + + # the string of whitespace multiplied by the indent + # counter to produce a line + self.indentstring = " " + + # the stream we are writing to + self.stream = stream + + # current line number + self.lineno = 1 + + # a list of lines that represents a buffered "block" of code, + # which can be later printed relative to an indent level + self.line_buffer = [] + + self.in_indent_lines = False + + self._reset_multi_line_flags() + + # mapping of generated python lines to template + # source lines + self.source_map = {} + + def _update_lineno(self, num): + self.lineno += num + + def start_source(self, lineno): + if self.lineno not in self.source_map: + self.source_map[self.lineno] = lineno + + def write_blanks(self, num): + self.stream.write("\n" * num) + self._update_lineno(num) + + def write_indented_block(self, block): + """print a line or lines of python which already contain indentation. + + The indentation of the total block of lines will be adjusted to that of + the current indent level.""" + self.in_indent_lines = False + for l in re.split(r'\r?\n', block): + self.line_buffer.append(l) + self._update_lineno(1) + + def writelines(self, *lines): + """print a series of lines of python.""" + for line in lines: + self.writeline(line) + + def writeline(self, line): + """print a line of python, indenting it according to the current + indent level. + + this also adjusts the indentation counter according to the + content of the line. + + """ + + if not self.in_indent_lines: + self._flush_adjusted_lines() + self.in_indent_lines = True + + if ( + line is None or + re.match(r"^\s*#", line) or + re.match(r"^\s*$", line) + ): + hastext = False + else: + hastext = True + + is_comment = line and len(line) and line[0] == '#' + + # see if this line should decrease the indentation level + if ( + not is_comment and + (not hastext or self._is_unindentor(line)) + ): + + if self.indent > 0: + self.indent -= 1 + # if the indent_detail stack is empty, the user + # probably put extra closures - the resulting + # module wont compile. + if len(self.indent_detail) == 0: + raise exceptions.SyntaxException( + "Too many whitespace closures") + self.indent_detail.pop() + + if line is None: + return + + # write the line + self.stream.write(self._indent_line(line) + "\n") + self._update_lineno(len(line.split("\n"))) + + # see if this line should increase the indentation level. + # note that a line can both decrase (before printing) and + # then increase (after printing) the indentation level. + + if re.search(r":[ \t]*(?:#.*)?$", line): + # increment indentation count, and also + # keep track of what the keyword was that indented us, + # if it is a python compound statement keyword + # where we might have to look for an "unindent" keyword + match = re.match(r"^\s*(if|try|elif|while|for|with)", line) + if match: + # its a "compound" keyword, so we will check for "unindentors" + indentor = match.group(1) + self.indent += 1 + self.indent_detail.append(indentor) + else: + indentor = None + # its not a "compound" keyword. but lets also + # test for valid Python keywords that might be indenting us, + # else assume its a non-indenting line + m2 = re.match(r"^\s*(def|class|else|elif|except|finally)", + line) + if m2: + self.indent += 1 + self.indent_detail.append(indentor) + + def close(self): + """close this printer, flushing any remaining lines.""" + self._flush_adjusted_lines() + + def _is_unindentor(self, line): + """return true if the given line is an 'unindentor', + relative to the last 'indent' event received. + + """ + + # no indentation detail has been pushed on; return False + if len(self.indent_detail) == 0: + return False + + indentor = self.indent_detail[-1] + + # the last indent keyword we grabbed is not a + # compound statement keyword; return False + if indentor is None: + return False + + # if the current line doesnt have one of the "unindentor" keywords, + # return False + match = re.match(r"^\s*(else|elif|except|finally).*\:", line) + if not match: + return False + + # whitespace matches up, we have a compound indentor, + # and this line has an unindentor, this + # is probably good enough + return True + + # should we decide that its not good enough, heres + # more stuff to check. + # keyword = match.group(1) + + # match the original indent keyword + # for crit in [ + # (r'if|elif', r'else|elif'), + # (r'try', r'except|finally|else'), + # (r'while|for', r'else'), + # ]: + # if re.match(crit[0], indentor) and re.match(crit[1], keyword): + # return True + + # return False + + def _indent_line(self, line, stripspace=''): + """indent the given line according to the current indent level. + + stripspace is a string of space that will be truncated from the + start of the line before indenting.""" + + return re.sub(r"^%s" % stripspace, self.indentstring + * self.indent, line) + + def _reset_multi_line_flags(self): + """reset the flags which would indicate we are in a backslashed + or triple-quoted section.""" + + self.backslashed, self.triplequoted = False, False + + def _in_multi_line(self, line): + """return true if the given line is part of a multi-line block, + via backslash or triple-quote.""" + + # we are only looking for explicitly joined lines here, not + # implicit ones (i.e. brackets, braces etc.). this is just to + # guard against the possibility of modifying the space inside of + # a literal multiline string with unfortunately placed + # whitespace + + current_state = (self.backslashed or self.triplequoted) + + if re.search(r"\\$", line): + self.backslashed = True + else: + self.backslashed = False + + triples = len(re.findall(r"\"\"\"|\'\'\'", line)) + if triples == 1 or triples % 2 != 0: + self.triplequoted = not self.triplequoted + + return current_state + + def _flush_adjusted_lines(self): + stripspace = None + self._reset_multi_line_flags() + + for entry in self.line_buffer: + if self._in_multi_line(entry): + self.stream.write(entry + "\n") + else: + entry = entry.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry): + stripspace = re.match(r"^([ \t]*)", entry).group(1) + self.stream.write(self._indent_line(entry, stripspace) + "\n") + + self.line_buffer = [] + self._reset_multi_line_flags() + + +def adjust_whitespace(text): + """remove the left-whitespace margin of a block of Python code.""" + + state = [False, False] + (backslashed, triplequoted) = (0, 1) + + def in_multi_line(line): + start_state = (state[backslashed] or state[triplequoted]) + + if re.search(r"\\$", line): + state[backslashed] = True + else: + state[backslashed] = False + + def match(reg, t): + m = re.match(reg, t) + if m: + return m, t[len(m.group(0)):] + else: + return None, t + + while line: + if state[triplequoted]: + m, line = match(r"%s" % state[triplequoted], line) + if m: + state[triplequoted] = False + else: + m, line = match(r".*?(?=%s|$)" % state[triplequoted], line) + else: + m, line = match(r'#', line) + if m: + return start_state + + m, line = match(r"\"\"\"|\'\'\'", line) + if m: + state[triplequoted] = m.group(0) + continue + + m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line) + + return start_state + + def _indent_line(line, stripspace=''): + return re.sub(r"^%s" % stripspace, '', line) + + lines = [] + stripspace = None + + for line in re.split(r'\r?\n', text): + if in_multi_line(line): + lines.append(line) + else: + line = line.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", line): + stripspace = re.match(r"^([ \t]*)", line).group(1) + lines.append(_indent_line(line, stripspace)) + return "\n".join(lines) diff --git a/server/www/packages/packages-common/mako/pyparser.py b/server/www/packages/packages-common/mako/pyparser.py new file mode 100644 index 0000000..96e5335 --- /dev/null +++ b/server/www/packages/packages-common/mako/pyparser.py @@ -0,0 +1,233 @@ +# mako/pyparser.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Handles parsing of Python code. + +Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler +module is used. +""" + +from mako import exceptions, util, compat +from mako.compat import arg_stringname +import operator + +if compat.py3k: + # words that cannot be assigned to (notably + # smaller than the total keys in __builtins__) + reserved = set(['True', 'False', 'None', 'print']) + + # the "id" attribute on a function node + arg_id = operator.attrgetter('arg') +else: + # words that cannot be assigned to (notably + # smaller than the total keys in __builtins__) + reserved = set(['True', 'False', 'None']) + + # the "id" attribute on a function node + arg_id = operator.attrgetter('id') + +import _ast +util.restore__ast(_ast) +from mako import _ast_util + + +def parse(code, mode='exec', **exception_kwargs): + """Parse an expression into AST""" + + try: + return _ast_util.parse(code, '', mode) + except Exception: + raise exceptions.SyntaxException( + "(%s) %s (%r)" % ( + compat.exception_as().__class__.__name__, + compat.exception_as(), + code[0:50] + ), **exception_kwargs) + + +class FindIdentifiers(_ast_util.NodeVisitor): + + def __init__(self, listener, **exception_kwargs): + self.in_function = False + self.in_assign_targets = False + self.local_ident_stack = set() + self.listener = listener + self.exception_kwargs = exception_kwargs + + def _add_declared(self, name): + if not self.in_function: + self.listener.declared_identifiers.add(name) + else: + self.local_ident_stack.add(name) + + def visit_ClassDef(self, node): + self._add_declared(node.name) + + def visit_Assign(self, node): + + # flip around the visiting of Assign so the expression gets + # evaluated first, in the case of a clause like "x=x+5" (x + # is undeclared) + + self.visit(node.value) + in_a = self.in_assign_targets + self.in_assign_targets = True + for n in node.targets: + self.visit(n) + self.in_assign_targets = in_a + + if compat.py3k: + + # ExceptHandler is in Python 2, but this block only works in + # Python 3 (and is required there) + + def visit_ExceptHandler(self, node): + if node.name is not None: + self._add_declared(node.name) + if node.type is not None: + self.visit(node.type) + for statement in node.body: + self.visit(statement) + + def visit_Lambda(self, node, *args): + self._visit_function(node, True) + + def visit_FunctionDef(self, node): + self._add_declared(node.name) + self._visit_function(node, False) + + def _expand_tuples(self, args): + for arg in args: + if isinstance(arg, _ast.Tuple): + for n in arg.elts: + yield n + else: + yield arg + + def _visit_function(self, node, islambda): + + # push function state onto stack. dont log any more + # identifiers as "declared" until outside of the function, + # but keep logging identifiers as "undeclared". track + # argument names in each function header so they arent + # counted as "undeclared" + + inf = self.in_function + self.in_function = True + + local_ident_stack = self.local_ident_stack + self.local_ident_stack = local_ident_stack.union([ + arg_id(arg) for arg in self._expand_tuples(node.args.args) + ]) + if islambda: + self.visit(node.body) + else: + for n in node.body: + self.visit(n) + self.in_function = inf + self.local_ident_stack = local_ident_stack + + def visit_For(self, node): + + # flip around visit + + self.visit(node.iter) + self.visit(node.target) + for statement in node.body: + self.visit(statement) + for statement in node.orelse: + self.visit(statement) + + def visit_Name(self, node): + if isinstance(node.ctx, _ast.Store): + # this is eqiuvalent to visit_AssName in + # compiler + self._add_declared(node.id) + elif node.id not in reserved and node.id \ + not in self.listener.declared_identifiers and node.id \ + not in self.local_ident_stack: + self.listener.undeclared_identifiers.add(node.id) + + def visit_Import(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + else: + self._add_declared(name.name.split('.')[0]) + + def visit_ImportFrom(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + else: + if name.name == '*': + raise exceptions.CompileException( + "'import *' is not supported, since all identifier " + "names must be explicitly declared. Please use the " + "form 'from import , , " + "...' instead.", **self.exception_kwargs) + self._add_declared(name.name) + + +class FindTuple(_ast_util.NodeVisitor): + + def __init__(self, listener, code_factory, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + self.code_factory = code_factory + + def visit_Tuple(self, node): + for n in node.elts: + p = self.code_factory(n, **self.exception_kwargs) + self.listener.codeargs.append(p) + self.listener.args.append(ExpressionGenerator(n).value()) + self.listener.declared_identifiers = \ + self.listener.declared_identifiers.union( + p.declared_identifiers) + self.listener.undeclared_identifiers = \ + self.listener.undeclared_identifiers.union( + p.undeclared_identifiers) + + +class ParseFunc(_ast_util.NodeVisitor): + + def __init__(self, listener, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + + def visit_FunctionDef(self, node): + self.listener.funcname = node.name + + argnames = [arg_id(arg) for arg in node.args.args] + if node.args.vararg: + argnames.append(arg_stringname(node.args.vararg)) + + if compat.py2k: + # kw-only args don't exist in Python 2 + kwargnames = [] + else: + kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs] + if node.args.kwarg: + kwargnames.append(arg_stringname(node.args.kwarg)) + self.listener.argnames = argnames + self.listener.defaults = node.args.defaults # ast + self.listener.kwargnames = kwargnames + if compat.py2k: + self.listener.kwdefaults = [] + else: + self.listener.kwdefaults = node.args.kw_defaults + self.listener.varargs = node.args.vararg + self.listener.kwargs = node.args.kwarg + + +class ExpressionGenerator(object): + + def __init__(self, astnode): + self.generator = _ast_util.SourceGenerator(' ' * 4) + self.generator.visit(astnode) + + def value(self): + return ''.join(self.generator.result) diff --git a/server/www/packages/packages-common/mako/runtime.py b/server/www/packages/packages-common/mako/runtime.py new file mode 100644 index 0000000..8d2f4a9 --- /dev/null +++ b/server/www/packages/packages-common/mako/runtime.py @@ -0,0 +1,909 @@ +# mako/runtime.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides runtime services for templates, including Context, +Namespace, and various helper functions.""" + +from mako import exceptions, util, compat +from mako.compat import compat_builtins +import sys + + +class Context(object): + + """Provides runtime namespace, output buffer, and various + callstacks for templates. + + See :ref:`runtime_toplevel` for detail on the usage of + :class:`.Context`. + + """ + + def __init__(self, buffer, **data): + self._buffer_stack = [buffer] + + self._data = data + + self._kwargs = data.copy() + self._with_template = None + self._outputting_as_unicode = None + self.namespaces = {} + + # "capture" function which proxies to the + # generic "capture" function + self._data['capture'] = compat.partial(capture, self) + + # "caller" stack used by def calls with content + self.caller_stack = self._data['caller'] = CallerStack() + + def _set_with_template(self, t): + self._with_template = t + illegal_names = t.reserved_names.intersection(self._data) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words passed to render(): %s" % + ", ".join(illegal_names)) + + @property + def lookup(self): + """Return the :class:`.TemplateLookup` associated + with this :class:`.Context`. + + """ + return self._with_template.lookup + + @property + def kwargs(self): + """Return the dictionary of top level keyword arguments associated + with this :class:`.Context`. + + This dictionary only includes the top-level arguments passed to + :meth:`.Template.render`. It does not include names produced within + the template execution such as local variable names or special names + such as ``self``, ``next``, etc. + + The purpose of this dictionary is primarily for the case that + a :class:`.Template` accepts arguments via its ``<%page>`` tag, + which are normally expected to be passed via :meth:`.Template.render`, + except the template is being called in an inheritance context, + using the ``body()`` method. :attr:`.Context.kwargs` can then be + used to propagate these arguments to the inheriting template:: + + ${next.body(**context.kwargs)} + + """ + return self._kwargs.copy() + + def push_caller(self, caller): + """Push a ``caller`` callable onto the callstack for + this :class:`.Context`.""" + + self.caller_stack.append(caller) + + def pop_caller(self): + """Pop a ``caller`` callable onto the callstack for this + :class:`.Context`.""" + + del self.caller_stack[-1] + + def keys(self): + """Return a list of all names established in this :class:`.Context`.""" + + return list(self._data.keys()) + + def __getitem__(self, key): + if key in self._data: + return self._data[key] + else: + return compat_builtins.__dict__[key] + + def _push_writer(self): + """push a capturing buffer onto this Context and return + the new writer function.""" + + buf = util.FastEncodingBuffer() + self._buffer_stack.append(buf) + return buf.write + + def _pop_buffer_and_writer(self): + """pop the most recent capturing buffer from this Context + and return the current writer after the pop. + + """ + + buf = self._buffer_stack.pop() + return buf, self._buffer_stack[-1].write + + def _push_buffer(self): + """push a capturing buffer onto this Context.""" + + self._push_writer() + + def _pop_buffer(self): + """pop the most recent capturing buffer from this Context.""" + + return self._buffer_stack.pop() + + def get(self, key, default=None): + """Return a value from this :class:`.Context`.""" + + return self._data.get(key, compat_builtins.__dict__.get(key, default)) + + def write(self, string): + """Write a string to this :class:`.Context` object's + underlying output buffer.""" + + self._buffer_stack[-1].write(string) + + def writer(self): + """Return the current writer function.""" + + return self._buffer_stack[-1].write + + def _copy(self): + c = Context.__new__(Context) + c._buffer_stack = self._buffer_stack + c._data = self._data.copy() + c._kwargs = self._kwargs + c._with_template = self._with_template + c._outputting_as_unicode = self._outputting_as_unicode + c.namespaces = self.namespaces + c.caller_stack = self.caller_stack + return c + + def _locals(self, d): + """Create a new :class:`.Context` with a copy of this + :class:`.Context`'s current state, + updated with the given dictionary. + + The :attr:`.Context.kwargs` collection remains + unaffected. + + + """ + + if not d: + return self + c = self._copy() + c._data.update(d) + return c + + def _clean_inheritance_tokens(self): + """create a new copy of this :class:`.Context`. with + tokens related to inheritance state removed.""" + + c = self._copy() + x = c._data + x.pop('self', None) + x.pop('parent', None) + x.pop('next', None) + return c + + +class CallerStack(list): + + def __init__(self): + self.nextcaller = None + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return len(self) and self._get_caller() and True or False + + def _get_caller(self): + # this method can be removed once + # codegen MAGIC_NUMBER moves past 7 + return self[-1] + + def __getattr__(self, key): + return getattr(self._get_caller(), key) + + def _push_frame(self): + frame = self.nextcaller or None + self.append(frame) + self.nextcaller = None + return frame + + def _pop_frame(self): + self.nextcaller = self.pop() + + +class Undefined(object): + + """Represents an undefined value in a template. + + All template modules have a constant value + ``UNDEFINED`` present which is an instance of this + object. + + """ + + def __str__(self): + raise NameError("Undefined") + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return False + +UNDEFINED = Undefined() +STOP_RENDERING = "" + + +class LoopStack(object): + + """a stack for LoopContexts that implements the context manager protocol + to automatically pop off the top of the stack on context exit + """ + + def __init__(self): + self.stack = [] + + def _enter(self, iterable): + self._push(iterable) + return self._top + + def _exit(self): + self._pop() + return self._top + + @property + def _top(self): + if self.stack: + return self.stack[-1] + else: + return self + + def _pop(self): + return self.stack.pop() + + def _push(self, iterable): + new = LoopContext(iterable) + if self.stack: + new.parent = self.stack[-1] + return self.stack.append(new) + + def __getattr__(self, key): + raise exceptions.RuntimeException("No loop context is established") + + def __iter__(self): + return iter(self._top) + + +class LoopContext(object): + + """A magic loop variable. + Automatically accessible in any ``% for`` block. + + See the section :ref:`loop_context` for usage + notes. + + :attr:`parent` -> :class:`.LoopContext` or ``None`` + The parent loop, if one exists. + :attr:`index` -> `int` + The 0-based iteration count. + :attr:`reverse_index` -> `int` + The number of iterations remaining. + :attr:`first` -> `bool` + ``True`` on the first iteration, ``False`` otherwise. + :attr:`last` -> `bool` + ``True`` on the last iteration, ``False`` otherwise. + :attr:`even` -> `bool` + ``True`` when ``index`` is even. + :attr:`odd` -> `bool` + ``True`` when ``index`` is odd. + """ + + def __init__(self, iterable): + self._iterable = iterable + self.index = 0 + self.parent = None + + def __iter__(self): + for i in self._iterable: + yield i + self.index += 1 + + @util.memoized_instancemethod + def __len__(self): + return len(self._iterable) + + @property + def reverse_index(self): + return len(self) - self.index - 1 + + @property + def first(self): + return self.index == 0 + + @property + def last(self): + return self.index == len(self) - 1 + + @property + def even(self): + return not self.odd + + @property + def odd(self): + return bool(self.index % 2) + + def cycle(self, *values): + """Cycle through values as the loop progresses. + """ + if not values: + raise ValueError("You must provide values to cycle through") + return values[self.index % len(values)] + + +class _NSAttr(object): + + def __init__(self, parent): + self.__parent = parent + + def __getattr__(self, key): + ns = self.__parent + while ns: + if hasattr(ns.module, key): + return getattr(ns.module, key) + else: + ns = ns.inherits + raise AttributeError(key) + + +class Namespace(object): + + """Provides access to collections of rendering methods, which + can be local, from other templates, or from imported modules. + + To access a particular rendering method referenced by a + :class:`.Namespace`, use plain attribute access: + + .. sourcecode:: mako + + ${some_namespace.foo(x, y, z)} + + :class:`.Namespace` also contains several built-in attributes + described here. + + """ + + def __init__(self, name, context, + callables=None, inherits=None, + populate_self=True, calling_uri=None): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = dict([(c.__name__, c) for c in callables]) + + callables = () + + module = None + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + + template = None + """The :class:`.Template` object referenced by this + :class:`.Namespace`, if any. + + """ + + context = None + """The :class:`.Context` object for this :class:`.Namespace`. + + Namespaces are often created with copies of contexts that + contain slightly different data, particularly in inheritance + scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one + can traverse an entire chain of templates that inherit from + one-another. + + """ + + filename = None + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + + If this is a pure module-based + :class:`.Namespace`, this evaluates to ``module.__file__``. If a + template-based namespace, it evaluates to the original + template file location. + + """ + + uri = None + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + + _templateuri = None + + @util.memoized_property + def attr(self): + """Access module level attributes by name. + + This accessor allows templates to supply "scalar" + attributes which are particularly handy in inheritance + relationships. + + .. seealso:: + + :ref:`inheritance_attr` + + :ref:`namespace_attr_for_includes` + + """ + return _NSAttr(self) + + def get_namespace(self, uri): + """Return a :class:`.Namespace` corresponding to the given ``uri``. + + If the given ``uri`` is a relative URI (i.e. it does not + contain a leading slash ``/``), the ``uri`` is adjusted to + be relative to the ``uri`` of the namespace itself. This + method is therefore mostly useful off of the built-in + ``local`` namespace, described in :ref:`namespace_local`. + + In + most cases, a template wouldn't need this function, and + should instead use the ``<%namespace>`` tag to load + namespaces. However, since all ``<%namespace>`` tags are + evaluated before the body of a template ever runs, + this method can be used to locate namespaces using + expressions that were generated within the body code of + the template, or to conditionally use a particular + namespace. + + """ + key = (self, uri) + if key in self.context.namespaces: + return self.context.namespaces[key] + else: + ns = TemplateNamespace(uri, self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri) + self.context.namespaces[key] = ns + return ns + + def get_template(self, uri): + """Return a :class:`.Template` from the given ``uri``. + + The ``uri`` resolution is relative to the ``uri`` of this + :class:`.Namespace` object's :class:`.Template`. + + """ + return _lookup_template(self.context, uri, self._templateuri) + + def get_cached(self, key, **kwargs): + """Return a value from the :class:`.Cache` referenced by this + :class:`.Namespace` object's :class:`.Template`. + + The advantage to this method versus direct access to the + :class:`.Cache` is that the configuration parameters + declared in ``<%page>`` take effect here, thereby calling + up the same configured backend as that configured + by ``<%page>``. + + """ + + return self.cache.get(key, **kwargs) + + @property + def cache(self): + """Return the :class:`.Cache` object referenced + by this :class:`.Namespace` object's + :class:`.Template`. + + """ + return self.template.cache + + def include_file(self, uri, **kwargs): + """Include a file at the given ``uri``.""" + + _include_file(self.context, uri, self._templateuri, **kwargs) + + def _populate(self, d, l): + for ident in l: + if ident == '*': + for (k, v) in self._get_star(): + d[k] = v + else: + d[ident] = getattr(self, ident) + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % + (self.name, key)) + setattr(self, key, val) + return val + + +class TemplateNamespace(Namespace): + + """A :class:`.Namespace` specific to a :class:`.Template` instance.""" + + def __init__(self, name, context, template=None, templateuri=None, + callables=None, inherits=None, + populate_self=True, calling_uri=None): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = dict([(c.__name__, c) for c in callables]) + + if templateuri is not None: + self.template = _lookup_template(context, templateuri, + calling_uri) + self._templateuri = self.template.module._template_uri + elif template is not None: + self.template = template + self._templateuri = template.module._template_uri + else: + raise TypeError("'template' argument is required.") + + if populate_self: + lclcallable, lclcontext = \ + _populate_self_namespace(context, self.template, + self_ns=self) + + @property + def module(self): + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + return self.template.module + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.template.filename + + @property + def uri(self): + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + return self.template.uri + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def get(key): + callable_ = self.template._get_def_callable(key) + return compat.partial(callable_, self.context) + for k in self.template.module._exports: + yield (k, get(k)) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.template.has_def(key): + callable_ = self.template._get_def_callable(key) + val = compat.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % + (self.name, key)) + setattr(self, key, val) + return val + + +class ModuleNamespace(Namespace): + + """A :class:`.Namespace` specific to a Python module instance.""" + + def __init__(self, name, context, module, + callables=None, inherits=None, + populate_self=True, calling_uri=None): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = dict([(c.__name__, c) for c in callables]) + + mod = __import__(module) + for token in module.split('.')[1:]: + mod = getattr(mod, token) + self.module = mod + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.module.__file__ + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + for key in dir(self.module): + if key[0] != '_': + callable_ = getattr(self.module, key) + if compat.callable(callable_): + yield key, compat.partial(callable_, self.context) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif hasattr(self.module, key): + callable_ = getattr(self.module, key) + val = compat.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % + (self.name, key)) + setattr(self, key, val) + return val + + +def supports_caller(func): + """Apply a caller_stack compatibility decorator to a plain + Python function. + + See the example in :ref:`namespaces_python_modules`. + + """ + + def wrap_stackframe(context, *args, **kwargs): + context.caller_stack._push_frame() + try: + return func(context, *args, **kwargs) + finally: + context.caller_stack._pop_frame() + return wrap_stackframe + + +def capture(context, callable_, *args, **kwargs): + """Execute the given template def, capturing the output into + a buffer. + + See the example in :ref:`namespaces_python_modules`. + + """ + + if not compat.callable(callable_): + raise exceptions.RuntimeException( + "capture() function expects a callable as " + "its argument (i.e. capture(func, *args, **kwargs))" + ) + context._push_buffer() + try: + callable_(*args, **kwargs) + finally: + buf = context._pop_buffer() + return buf.getvalue() + + +def _decorate_toplevel(fn): + def decorate_render(render_fn): + def go(context, *args, **kw): + def y(*args, **kw): + return render_fn(context, *args, **kw) + try: + y.__name__ = render_fn.__name__[7:] + except TypeError: + # < Python 2.4 + pass + return fn(y)(context, *args, **kw) + return go + return decorate_render + + +def _decorate_inline(context, fn): + def decorate_render(render_fn): + dec = fn(render_fn) + + def go(*args, **kw): + return dec(context, *args, **kw) + return go + return decorate_render + + +def _include_file(context, uri, calling_uri, **kwargs): + """locate the template from the given uri and include it in + the current output.""" + + template = _lookup_template(context, uri, calling_uri) + (callable_, ctx) = _populate_self_namespace( + context._clean_inheritance_tokens(), + template) + callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs)) + + +def _inherit_from(context, uri, calling_uri): + """called by the _inherit method in template modules to set + up the inheritance chain at the start of a template's + execution.""" + + if uri is None: + return None + template = _lookup_template(context, uri, calling_uri) + self_ns = context['self'] + ih = self_ns + while ih.inherits is not None: + ih = ih.inherits + lclcontext = context._locals({'next': ih}) + ih.inherits = TemplateNamespace("self:%s" % template.uri, + lclcontext, + template=template, + populate_self=False) + context._data['parent'] = lclcontext._data['local'] = ih.inherits + callable_ = getattr(template.module, '_mako_inherit', None) + if callable_ is not None: + ret = callable_(template, lclcontext) + if ret: + return ret + + gen_ns = getattr(template.module, '_mako_generate_namespaces', None) + if gen_ns is not None: + gen_ns(context) + return (template.callable_, lclcontext) + + +def _lookup_template(context, uri, relativeto): + lookup = context._with_template.lookup + if lookup is None: + raise exceptions.TemplateLookupException( + "Template '%s' has no TemplateLookup associated" % + context._with_template.uri) + uri = lookup.adjust_uri(uri, relativeto) + try: + return lookup.get_template(uri) + except exceptions.TopLevelLookupException: + raise exceptions.TemplateLookupException(str(compat.exception_as())) + + +def _populate_self_namespace(context, template, self_ns=None): + if self_ns is None: + self_ns = TemplateNamespace('self:%s' % template.uri, + context, template=template, + populate_self=False) + context._data['self'] = context._data['local'] = self_ns + if hasattr(template.module, '_mako_inherit'): + ret = template.module._mako_inherit(template, context) + if ret: + return ret + return (template.callable_, context) + + +def _render(template, callable_, args, data, as_unicode=False): + """create a Context and return the string + output of the given template and template callable.""" + + if as_unicode: + buf = util.FastEncodingBuffer(as_unicode=True) + elif template.bytestring_passthrough: + buf = compat.StringIO() + else: + buf = util.FastEncodingBuffer( + as_unicode=as_unicode, + encoding=template.output_encoding, + errors=template.encoding_errors) + context = Context(buf, **data) + context._outputting_as_unicode = as_unicode + context._set_with_template(template) + + _render_context(template, callable_, context, *args, + **_kwargs_for_callable(callable_, data)) + return context._pop_buffer().getvalue() + + +def _kwargs_for_callable(callable_, data): + argspec = compat.inspect_func_args(callable_) + # for normal pages, **pageargs is usually present + if argspec[2]: + return data + + # for rendering defs from the top level, figure out the args + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + kwargs = {} + for arg in namedargs: + if arg != 'context' and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _kwargs_for_include(callable_, data, **kwargs): + argspec = compat.inspect_func_args(callable_) + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + for arg in namedargs: + if arg != 'context' and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _render_context(tmpl, callable_, context, *args, **kwargs): + import mako.template as template + # create polymorphic 'self' namespace for this + # template with possibly updated context + if not isinstance(tmpl, template.DefTemplate): + # if main render method, call from the base of the inheritance stack + (inherit, lclcontext) = _populate_self_namespace(context, tmpl) + _exec_template(inherit, lclcontext, args=args, kwargs=kwargs) + else: + # otherwise, call the actual rendering method specified + (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent) + _exec_template(callable_, context, args=args, kwargs=kwargs) + + +def _exec_template(callable_, context, args=None, kwargs=None): + """execute a rendering callable given the callable, a + Context, and optional explicit arguments + + the contextual Template will be located if it exists, and + the error handling options specified on that Template will + be interpreted here. + """ + template = context._with_template + if template is not None and \ + (template.format_exceptions or template.error_handler): + try: + callable_(context, *args, **kwargs) + except Exception: + _render_error(template, context, compat.exception_as()) + except: + e = sys.exc_info()[0] + _render_error(template, context, e) + else: + callable_(context, *args, **kwargs) + + +def _render_error(template, context, error): + if template.error_handler: + result = template.error_handler(context, error) + if not result: + compat.reraise(*sys.exc_info()) + else: + error_template = exceptions.html_error_template() + if context._outputting_as_unicode: + context._buffer_stack[:] = [ + util.FastEncodingBuffer(as_unicode=True)] + else: + context._buffer_stack[:] = [util.FastEncodingBuffer( + error_template.output_encoding, + error_template.encoding_errors)] + + context._set_with_template(error_template) + error_template.render_context(context, error=error) diff --git a/server/www/packages/packages-common/mako/template.py b/server/www/packages/packages-common/mako/template.py new file mode 100644 index 0000000..facb7e5 --- /dev/null +++ b/server/www/packages/packages-common/mako/template.py @@ -0,0 +1,718 @@ +# mako/template.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides the Template class, a facade for parsing, generating and executing +template strings, as well as template runtime operations.""" + +from mako.lexer import Lexer +from mako import runtime, util, exceptions, codegen, cache, compat +import os +import re +import shutil +import stat +import sys +import tempfile +import types +import weakref + + +class Template(object): + + """Represents a compiled template. + + :class:`.Template` includes a reference to the original + template source (via the :attr:`.source` attribute) + as well as the source code of the + generated Python module (i.e. the :attr:`.code` attribute), + as well as a reference to an actual Python module. + + :class:`.Template` is constructed using either a literal string + representing the template text, or a filename representing a filesystem + path to a source file. + + :param text: textual template source. This argument is mutually + exclusive versus the ``filename`` parameter. + + :param filename: filename of the source template. This argument is + mutually exclusive versus the ``text`` parameter. + + :param buffer_filters: string list of filters to be applied + to the output of ``%def``\ s which are buffered, cached, or otherwise + filtered, after all filters + defined with the ``%def`` itself have been applied. Allows the + creation of default expression filters that let the output + of return-valued ``%def``\ s "opt out" of that filtering via + passing special attributes or objects. + + :param bytestring_passthrough: When ``True``, and ``output_encoding`` is + set to ``None``, and :meth:`.Template.render` is used to render, + the `StringIO` or `cStringIO` buffer will be used instead of the + default "fast" buffer. This allows raw bytestrings in the + output stream, such as in expressions, to pass straight + through to the buffer. This flag is forced + to ``True`` if ``disable_unicode`` is also configured. + + .. versionadded:: 0.4 + Added to provide the same behavior as that of the previous series. + + :param cache_args: Dictionary of cache configuration arguments that + will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`. + + :param cache_dir: + + .. deprecated:: 0.6 + Use the ``'dir'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_enabled: Boolean flag which enables caching of this + template. See :ref:`caching_toplevel`. + + :param cache_impl: String name of a :class:`.CacheImpl` caching + implementation to use. Defaults to ``'beaker'``. + + :param cache_type: + + .. deprecated:: 0.6 + Use the ``'type'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_url: + + .. deprecated:: 0.6 + Use the ``'url'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param default_filters: List of string filter names that will + be applied to all expressions. See :ref:`filtering_default_filters`. + + :param disable_unicode: Disables all awareness of Python Unicode + objects. See :ref:`unicode_disabled`. + + :param enable_loop: When ``True``, enable the ``loop`` context variable. + This can be set to ``False`` to support templates that may + be making usage of the name "``loop``". Individual templates can + re-enable the "loop" context by placing the directive + ``enable_loop="True"`` inside the ``<%page>`` tag -- see + :ref:`migrating_loop`. + + :param encoding_errors: Error parameter passed to ``encode()`` when + string encoding is performed. See :ref:`usage_unicode`. + + :param error_handler: Python callable which is called whenever + compile or runtime exceptions occur. The callable is passed + the current context as well as the exception. If the + callable returns ``True``, the exception is considered to + be handled, else it is re-raised after the function + completes. Is used to provide custom error-rendering + functions. + + :param format_exceptions: if ``True``, exceptions which occur during + the render phase of this template will be caught and + formatted into an HTML error page, which then becomes the + rendered result of the :meth:`.render` call. Otherwise, + runtime exceptions are propagated outwards. + + :param imports: String list of Python statements, typically individual + "import" lines, which will be placed into the module level + preamble of all generated Python modules. See the example + in :ref:`filtering_default_filters`. + + :param future_imports: String list of names to import from `__future__`. + These will be concatenated into a comma-separated string and inserted + into the beginning of the template, e.g. ``futures_imports=['FOO', + 'BAR']`` results in ``from __future__ import FOO, BAR``. If you're + interested in using features like the new division operator, you must + use future_imports to convey that to the renderer, as otherwise the + import will not appear as the first executed statement in the generated + code and will therefore not have the desired effect. + + :param input_encoding: Encoding of the template's source code. Can + be used in lieu of the coding comment. See + :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for + details on source encoding. + + :param lookup: a :class:`.TemplateLookup` instance that will be used + for all file lookups via the ``<%namespace>``, + ``<%include>``, and ``<%inherit>`` tags. See + :ref:`usage_templatelookup`. + + :param module_directory: Filesystem location where generated + Python module files will be placed. + + :param module_filename: Overrides the filename of the generated + Python module file. For advanced usage only. + + :param module_writer: A callable which overrides how the Python + module is written entirely. The callable is passed the + encoded source content of the module and the destination + path to be written to. The default behavior of module writing + uses a tempfile in conjunction with a file move in order + to make the operation atomic. So a user-defined module + writing function that mimics the default behavior would be: + + .. sourcecode:: python + + import tempfile + import os + import shutil + + def module_writer(source, outputpath): + (dest, name) = \\ + tempfile.mkstemp( + dir=os.path.dirname(outputpath) + ) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + from mako.template import Template + mytemplate = Template( + filename="admin_index.mako", + module_directory="/path/to/modules", + module_writer=module_writer + ) + + The function is provided for unusual configurations where + certain platform-specific permissions or other special + steps are needed. + + :param output_encoding: The encoding to use when :meth:`.render` + is called. + See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`. + + :param preprocessor: Python callable which will be passed + the full template source before it is parsed. The return + result of the callable will be used as the template source + code. + + :param lexer_cls: A :class:`.Lexer` class used to parse + the template. The :class:`.Lexer` class is used by + default. + + .. versionadded:: 0.7.4 + + :param strict_undefined: Replaces the automatic usage of + ``UNDEFINED`` for any undeclared variables not located in + the :class:`.Context` with an immediate raise of + ``NameError``. The advantage is immediate reporting of + missing variables which include the name. + + .. versionadded:: 0.3.6 + + :param uri: string URI or other identifier for this template. + If not provided, the ``uri`` is generated from the filesystem + path, or from the in-memory identity of a non-file-based + template. The primary usage of the ``uri`` is to provide a key + within :class:`.TemplateLookup`, as well as to generate the + file path of the generated Python module file, if + ``module_directory`` is specified. + + """ + + lexer_cls = Lexer + + def __init__(self, + text=None, + filename=None, + uri=None, + format_exceptions=False, + error_handler=None, + lookup=None, + output_encoding=None, + encoding_errors='strict', + module_directory=None, + cache_args=None, + cache_impl='beaker', + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + module_filename=None, + input_encoding=None, + disable_unicode=False, + module_writer=None, + bytestring_passthrough=False, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + preprocessor=None, + lexer_cls=None): + if uri: + self.module_id = re.sub(r'\W', "_", uri) + self.uri = uri + elif filename: + self.module_id = re.sub(r'\W', "_", filename) + drive, path = os.path.splitdrive(filename) + path = os.path.normpath(path).replace(os.path.sep, "/") + self.uri = path + else: + self.module_id = "memory:" + hex(id(self)) + self.uri = self.module_id + + u_norm = self.uri + if u_norm.startswith("/"): + u_norm = u_norm[1:] + u_norm = os.path.normpath(u_norm) + if u_norm.startswith(".."): + raise exceptions.TemplateLookupException( + "Template uri \"%s\" is invalid - " + "it cannot be relative outside " + "of the root path." % self.uri) + + self.input_encoding = input_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.disable_unicode = disable_unicode + self.bytestring_passthrough = bytestring_passthrough or disable_unicode + self.enable_loop = enable_loop + self.strict_undefined = strict_undefined + self.module_writer = module_writer + + if compat.py3k and disable_unicode: + raise exceptions.UnsupportedError( + "Mako for Python 3 does not " + "support disabling Unicode") + elif output_encoding and disable_unicode: + raise exceptions.UnsupportedError( + "output_encoding must be set to " + "None when disable_unicode is used.") + if default_filters is None: + if compat.py3k or self.disable_unicode: + self.default_filters = ['str'] + else: + self.default_filters = ['unicode'] + else: + self.default_filters = default_filters + self.buffer_filters = buffer_filters + + self.imports = imports + self.future_imports = future_imports + self.preprocessor = preprocessor + + if lexer_cls is not None: + self.lexer_cls = lexer_cls + + # if plain text, compile code in memory only + if text is not None: + (code, module) = _compile_text(self, text, filename) + self._code = code + self._source = text + ModuleInfo(module, None, self, filename, code, text) + elif filename is not None: + # if template filename and a module directory, load + # a filesystem-based module file, generating if needed + if module_filename is not None: + path = module_filename + elif module_directory is not None: + path = os.path.abspath( + os.path.join( + os.path.normpath(module_directory), + u_norm + ".py" + ) + ) + else: + path = None + module = self._compile_from_file(path, filename) + else: + raise exceptions.RuntimeException( + "Template requires text or filename") + + self.module = module + self.filename = filename + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.lookup = lookup + + self.module_directory = module_directory + + self._setup_cache_args( + cache_impl, cache_enabled, cache_args, + cache_type, cache_dir, cache_url + ) + + @util.memoized_property + def reserved_names(self): + if self.enable_loop: + return codegen.RESERVED_NAMES + else: + return codegen.RESERVED_NAMES.difference(['loop']) + + def _setup_cache_args(self, + cache_impl, cache_enabled, cache_args, + cache_type, cache_dir, cache_url): + self.cache_impl = cache_impl + self.cache_enabled = cache_enabled + if cache_args: + self.cache_args = cache_args + else: + self.cache_args = {} + + # transfer deprecated cache_* args + if cache_type: + self.cache_args['type'] = cache_type + if cache_dir: + self.cache_args['dir'] = cache_dir + if cache_url: + self.cache_args['url'] = cache_url + + def _compile_from_file(self, path, filename): + if path is not None: + util.verify_directory(os.path.dirname(path)) + filemtime = os.stat(filename)[stat.ST_MTIME] + if not os.path.exists(path) or \ + os.stat(path)[stat.ST_MTIME] < filemtime: + data = util.read_file(filename) + _compile_module_file( + self, + data, + filename, + path, + self.module_writer) + module = compat.load_module(self.module_id, path) + del sys.modules[self.module_id] + if module._magic_number != codegen.MAGIC_NUMBER: + data = util.read_file(filename) + _compile_module_file( + self, + data, + filename, + path, + self.module_writer) + module = compat.load_module(self.module_id, path) + del sys.modules[self.module_id] + ModuleInfo(module, path, self, filename, None, None) + else: + # template filename and no module directory, compile code + # in memory + data = util.read_file(filename) + code, module = _compile_text( + self, + data, + filename) + self._source = None + self._code = code + ModuleInfo(module, None, self, filename, code, None) + return module + + @property + def source(self): + """Return the template source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).source + + @property + def code(self): + """Return the module source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).code + + @util.memoized_property + def cache(self): + return cache.Cache(self) + + @property + def cache_dir(self): + return self.cache_args['dir'] + + @property + def cache_url(self): + return self.cache_args['url'] + + @property + def cache_type(self): + return self.cache_args['type'] + + def render(self, *args, **data): + """Render the output of this template as a string. + + If the template specifies an output encoding, the string + will be encoded accordingly, else the output is raw (raw + output uses `cStringIO` and can't handle multibyte + characters). A :class:`.Context` object is created corresponding + to the given data. Arguments that are explicitly declared + by this template's internal rendering method are also + pulled from the given ``*args``, ``**data`` members. + + """ + return runtime._render(self, self.callable_, args, data) + + def render_unicode(self, *args, **data): + """Render the output of this template as a unicode object.""" + + return runtime._render(self, + self.callable_, + args, + data, + as_unicode=True) + + def render_context(self, context, *args, **kwargs): + """Render this :class:`.Template` with the given context. + + The data is written to the context's buffer. + + """ + if getattr(context, '_with_template', None) is None: + context._set_with_template(self) + runtime._render_context(self, + self.callable_, + context, + *args, + **kwargs) + + def has_def(self, name): + return hasattr(self.module, "render_%s" % name) + + def get_def(self, name): + """Return a def of this template as a :class:`.DefTemplate`.""" + + return DefTemplate(self, getattr(self.module, "render_%s" % name)) + + def _get_def_callable(self, name): + return getattr(self.module, "render_%s" % name) + + @property + def last_modified(self): + return self.module._modified_time + + +class ModuleTemplate(Template): + + """A Template which is constructed given an existing Python module. + + e.g.:: + + t = Template("this is a template") + f = file("mymodule.py", "w") + f.write(t.code) + f.close() + + import mymodule + + t = ModuleTemplate(mymodule) + print t.render() + + """ + + def __init__(self, module, + module_filename=None, + template=None, + template_filename=None, + module_source=None, + template_source=None, + output_encoding=None, + encoding_errors='strict', + disable_unicode=False, + bytestring_passthrough=False, + format_exceptions=False, + error_handler=None, + lookup=None, + cache_args=None, + cache_impl='beaker', + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + ): + self.module_id = re.sub(r'\W', "_", module._template_uri) + self.uri = module._template_uri + self.input_encoding = module._source_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.disable_unicode = disable_unicode + self.bytestring_passthrough = bytestring_passthrough or disable_unicode + self.enable_loop = module._enable_loop + + if compat.py3k and disable_unicode: + raise exceptions.UnsupportedError( + "Mako for Python 3 does not " + "support disabling Unicode") + elif output_encoding and disable_unicode: + raise exceptions.UnsupportedError( + "output_encoding must be set to " + "None when disable_unicode is used.") + + self.module = module + self.filename = template_filename + ModuleInfo(module, + module_filename, + self, + template_filename, + module_source, + template_source) + + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.lookup = lookup + self._setup_cache_args( + cache_impl, cache_enabled, cache_args, + cache_type, cache_dir, cache_url + ) + + +class DefTemplate(Template): + + """A :class:`.Template` which represents a callable def in a parent + template.""" + + def __init__(self, parent, callable_): + self.parent = parent + self.callable_ = callable_ + self.output_encoding = parent.output_encoding + self.module = parent.module + self.encoding_errors = parent.encoding_errors + self.format_exceptions = parent.format_exceptions + self.error_handler = parent.error_handler + self.enable_loop = parent.enable_loop + self.lookup = parent.lookup + self.bytestring_passthrough = parent.bytestring_passthrough + + def get_def(self, name): + return self.parent.get_def(name) + + +class ModuleInfo(object): + + """Stores information about a module currently loaded into + memory, provides reverse lookups of template source, module + source code based on a module's identifier. + + """ + _modules = weakref.WeakValueDictionary() + + def __init__(self, + module, + module_filename, + template, + template_filename, + module_source, + template_source): + self.module = module + self.module_filename = module_filename + self.template_filename = template_filename + self.module_source = module_source + self.template_source = template_source + self._modules[module.__name__] = template._mmarker = self + if module_filename: + self._modules[module_filename] = self + + @classmethod + def get_module_source_metadata(cls, module_source, full_line_map=False): + source_map = re.search( + r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", + module_source, re.S).group(1) + source_map = compat.json.loads(source_map) + source_map['line_map'] = dict( + (int(k), int(v)) + for k, v in source_map['line_map'].items()) + if full_line_map: + f_line_map = source_map['full_line_map'] = [] + line_map = source_map['line_map'] + + curr_templ_line = 1 + for mod_line in range(1, max(line_map)): + if mod_line in line_map: + curr_templ_line = line_map[mod_line] + f_line_map.append(curr_templ_line) + return source_map + + @property + def code(self): + if self.module_source is not None: + return self.module_source + else: + return util.read_python_file(self.module_filename) + + @property + def source(self): + if self.template_source is not None: + if self.module._source_encoding and \ + not isinstance(self.template_source, compat.text_type): + return self.template_source.decode( + self.module._source_encoding) + else: + return self.template_source + else: + data = util.read_file(self.template_filename) + if self.module._source_encoding: + return data.decode(self.module._source_encoding) + else: + return data + + +def _compile(template, text, filename, generate_magic_comment): + lexer = template.lexer_cls(text, + filename, + disable_unicode=template.disable_unicode, + input_encoding=template.input_encoding, + preprocessor=template.preprocessor) + node = lexer.parse() + source = codegen.compile(node, + template.uri, + filename, + default_filters=template.default_filters, + buffer_filters=template.buffer_filters, + imports=template.imports, + future_imports=template.future_imports, + source_encoding=lexer.encoding, + generate_magic_comment=generate_magic_comment, + disable_unicode=template.disable_unicode, + strict_undefined=template.strict_undefined, + enable_loop=template.enable_loop, + reserved_names=template.reserved_names) + return source, lexer + + +def _compile_text(template, text, filename): + identifier = template.module_id + source, lexer = _compile(template, text, filename, + generate_magic_comment=template.disable_unicode) + + cid = identifier + if not compat.py3k and isinstance(cid, compat.text_type): + cid = cid.encode() + module = types.ModuleType(cid) + code = compile(source, cid, 'exec') + + # this exec() works for 2.4->3.3. + exec(code, module.__dict__, module.__dict__) + return (source, module) + + +def _compile_module_file(template, text, filename, outputpath, module_writer): + source, lexer = _compile(template, text, filename, + generate_magic_comment=True) + + if isinstance(source, compat.text_type): + source = source.encode(lexer.encoding or 'ascii') + + if module_writer: + module_writer(source, outputpath) + else: + # make tempfiles in the same location as the ultimate + # location. this ensures they're on the same filesystem, + # avoiding synchronization issues. + (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath)) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + +def _get_module_info_from_callable(callable_): + if compat.py3k: + return _get_module_info(callable_.__globals__['__name__']) + else: + return _get_module_info(callable_.func_globals['__name__']) + + +def _get_module_info(filename): + return ModuleInfo._modules[filename] diff --git a/server/www/packages/packages-common/mako/util.py b/server/www/packages/packages-common/mako/util.py new file mode 100644 index 0000000..c7dad65 --- /dev/null +++ b/server/www/packages/packages-common/mako/util.py @@ -0,0 +1,382 @@ +# mako/util.py +# Copyright (C) 2006-2015 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import re +import collections +import codecs +import os +from mako import compat +import operator + + +def update_wrapper(decorated, fn): + decorated.__wrapped__ = fn + decorated.__name__ = fn.__name__ + return decorated + + +class PluginLoader(object): + + def __init__(self, group): + self.group = group + self.impls = {} + + def load(self, name): + if name in self.impls: + return self.impls[name]() + else: + import pkg_resources + for impl in pkg_resources.iter_entry_points( + self.group, + name): + self.impls[name] = impl.load + return impl.load() + else: + from mako import exceptions + raise exceptions.RuntimeException( + "Can't load plugin %s %s" % + (self.group, name)) + + def register(self, name, modulepath, objname): + def load(): + mod = __import__(modulepath) + for token in modulepath.split(".")[1:]: + mod = getattr(mod, token) + return getattr(mod, objname) + self.impls[name] = load + + +def verify_directory(dir): + """create and/or verify a filesystem directory.""" + + tries = 0 + + while not os.path.exists(dir): + try: + tries += 1 + os.makedirs(dir, compat.octal("0775")) + except: + if tries > 5: + raise + + +def to_list(x, default=None): + if x is None: + return default + if not isinstance(x, (list, tuple)): + return [x] + else: + return x + + +class memoized_property(object): + + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +class memoized_instancemethod(object): + + """Decorate a method memoize its return value. + + Best applied to no-arg methods: memoization is not sensitive to + argument values, and will always return the same value even when + called with different arguments. + + """ + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + + def oneshot(*args, **kw): + result = self.fget(obj, *args, **kw) + memo = lambda *a, **kw: result + memo.__name__ = self.__name__ + memo.__doc__ = self.__doc__ + obj.__dict__[self.__name__] = memo + return result + oneshot.__name__ = self.__name__ + oneshot.__doc__ = self.__doc__ + return oneshot + + +class SetLikeDict(dict): + + """a dictionary that has some setlike methods on it""" + + def union(self, other): + """produce a 'union' of this dict and another (at the key level). + + values in the second dict take precedence over that of the first""" + x = SetLikeDict(**self) + x.update(other) + return x + + +class FastEncodingBuffer(object): + + """a very rudimentary buffer that is faster than StringIO, + but doesn't crash on unicode data like cStringIO.""" + + def __init__(self, encoding=None, errors='strict', as_unicode=False): + self.data = collections.deque() + self.encoding = encoding + if as_unicode: + self.delim = compat.u('') + else: + self.delim = '' + self.as_unicode = as_unicode + self.errors = errors + self.write = self.data.append + + def truncate(self): + self.data = collections.deque() + self.write = self.data.append + + def getvalue(self): + if self.encoding: + return self.delim.join(self.data).encode(self.encoding, + self.errors) + else: + return self.delim.join(self.data) + + +class LRUCache(dict): + + """A dictionary-like object that stores a limited number of items, + discarding lesser used items periodically. + + this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based + paradigm so that synchronization is not really needed. the size management + is inexact. + """ + + class _Item(object): + + def __init__(self, key, value): + self.key = key + self.value = value + self.timestamp = compat.time_func() + + def __repr__(self): + return repr(self.value) + + def __init__(self, capacity, threshold=.5): + self.capacity = capacity + self.threshold = threshold + + def __getitem__(self, key): + item = dict.__getitem__(self, key) + item.timestamp = compat.time_func() + return item.value + + def values(self): + return [i.value for i in dict.values(self)] + + def setdefault(self, key, value): + if key in self: + return self[key] + else: + self[key] = value + return value + + def __setitem__(self, key, value): + item = dict.get(self, key) + if item is None: + item = self._Item(key, value) + dict.__setitem__(self, key, item) + else: + item.value = value + self._manage_size() + + def _manage_size(self): + while len(self) > self.capacity + self.capacity * self.threshold: + bytime = sorted(dict.values(self), + key=operator.attrgetter('timestamp'), reverse=True) + for item in bytime[self.capacity:]: + try: + del self[item.key] + except KeyError: + # if we couldn't find a key, most likely some other thread + # broke in on us. loop around and try again + break + +# Regexp to match python magic encoding line +_PYTHON_MAGIC_COMMENT_re = re.compile( + r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)', + re.VERBOSE) + + +def parse_encoding(fp): + """Deduce the encoding of a Python source file (binary mode) from magic + comment. + + It does this in the same way as the `Python interpreter`__ + + .. __: http://docs.python.org/ref/encodings.html + + The ``fp`` argument should be a seekable file object in binary mode. + """ + pos = fp.tell() + fp.seek(0) + try: + line1 = fp.readline() + has_bom = line1.startswith(codecs.BOM_UTF8) + if has_bom: + line1 = line1[len(codecs.BOM_UTF8):] + + m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore')) + if not m: + try: + import parser + parser.suite(line1.decode('ascii', 'ignore')) + except (ImportError, SyntaxError): + # Either it's a real syntax error, in which case the source + # is not valid python source, or line2 is a continuation of + # line1, in which case we don't want to scan line2 for a magic + # comment. + pass + else: + line2 = fp.readline() + m = _PYTHON_MAGIC_COMMENT_re.match( + line2.decode('ascii', 'ignore')) + + if has_bom: + if m: + raise SyntaxError( + "python refuses to compile code with both a UTF8" + " byte-order-mark and a magic encoding comment") + return 'utf_8' + elif m: + return m.group(1) + else: + return None + finally: + fp.seek(pos) + + +def sorted_dict_repr(d): + """repr() a dictionary with the keys in order. + + Used by the lexer unit test to compare parse trees based on strings. + + """ + keys = list(d.keys()) + keys.sort() + return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}" + + +def restore__ast(_ast): + """Attempt to restore the required classes to the _ast module if it + appears to be missing them + """ + if hasattr(_ast, 'AST'): + return + _ast.PyCF_ONLY_AST = 2 << 9 + m = compile("""\ +def foo(): pass +class Bar(object): pass +if False: pass +baz = 'mako' +1 + 2 - 3 * 4 / 5 +6 // 7 % 8 << 9 >> 10 +11 & 12 ^ 13 | 14 +15 and 16 or 17 +-baz + (not +18) - ~17 +baz and 'foo' or 'bar' +(mako is baz == baz) is not baz != mako +mako > baz < mako >= baz <= mako +mako in baz not in mako""", '', 'exec', _ast.PyCF_ONLY_AST) + _ast.Module = type(m) + + for cls in _ast.Module.__mro__: + if cls.__name__ == 'mod': + _ast.mod = cls + elif cls.__name__ == 'AST': + _ast.AST = cls + + _ast.FunctionDef = type(m.body[0]) + _ast.ClassDef = type(m.body[1]) + _ast.If = type(m.body[2]) + + _ast.Name = type(m.body[3].targets[0]) + _ast.Store = type(m.body[3].targets[0].ctx) + _ast.Str = type(m.body[3].value) + + _ast.Sub = type(m.body[4].value.op) + _ast.Add = type(m.body[4].value.left.op) + _ast.Div = type(m.body[4].value.right.op) + _ast.Mult = type(m.body[4].value.right.left.op) + + _ast.RShift = type(m.body[5].value.op) + _ast.LShift = type(m.body[5].value.left.op) + _ast.Mod = type(m.body[5].value.left.left.op) + _ast.FloorDiv = type(m.body[5].value.left.left.left.op) + + _ast.BitOr = type(m.body[6].value.op) + _ast.BitXor = type(m.body[6].value.left.op) + _ast.BitAnd = type(m.body[6].value.left.left.op) + + _ast.Or = type(m.body[7].value.op) + _ast.And = type(m.body[7].value.values[0].op) + + _ast.Invert = type(m.body[8].value.right.op) + _ast.Not = type(m.body[8].value.left.right.op) + _ast.UAdd = type(m.body[8].value.left.right.operand.op) + _ast.USub = type(m.body[8].value.left.left.op) + + _ast.Or = type(m.body[9].value.op) + _ast.And = type(m.body[9].value.values[0].op) + + _ast.IsNot = type(m.body[10].value.ops[0]) + _ast.NotEq = type(m.body[10].value.ops[1]) + _ast.Is = type(m.body[10].value.left.ops[0]) + _ast.Eq = type(m.body[10].value.left.ops[1]) + + _ast.Gt = type(m.body[11].value.ops[0]) + _ast.Lt = type(m.body[11].value.ops[1]) + _ast.GtE = type(m.body[11].value.ops[2]) + _ast.LtE = type(m.body[11].value.ops[3]) + + _ast.In = type(m.body[12].value.ops[0]) + _ast.NotIn = type(m.body[12].value.ops[1]) + + +def read_file(path, mode='rb'): + fp = open(path, mode) + try: + data = fp.read() + return data + finally: + fp.close() + + +def read_python_file(path): + fp = open(path, "rb") + try: + encoding = parse_encoding(fp) + data = fp.read() + if encoding: + data = data.decode(encoding) + return data + finally: + fp.close() diff --git a/server/www/packages/packages-common/pymemcache/__init__.py b/server/www/packages/packages-common/pymemcache/__init__.py new file mode 100644 index 0000000..5b8f37a --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/__init__.py @@ -0,0 +1 @@ +__version__ = '1.3.5' diff --git a/server/www/packages/packages-common/pymemcache/client/__init__.py b/server/www/packages/packages-common/pymemcache/client/__init__.py new file mode 100644 index 0000000..a6ff93b --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/client/__init__.py @@ -0,0 +1,12 @@ +# API Backwards compatibility + +from pymemcache.client.base import Client # noqa +from pymemcache.client.base import PooledClient # noqa + +from pymemcache.exceptions import MemcacheError # noqa +from pymemcache.exceptions import MemcacheClientError # noqa +from pymemcache.exceptions import MemcacheUnknownCommandError # noqa +from pymemcache.exceptions import MemcacheIllegalInputError # noqa +from pymemcache.exceptions import MemcacheServerError # noqa +from pymemcache.exceptions import MemcacheUnknownError # noqa +from pymemcache.exceptions import MemcacheUnexpectedCloseError # noqa diff --git a/server/www/packages/packages-common/pymemcache/client/base.py b/server/www/packages/packages-common/pymemcache/client/base.py new file mode 100644 index 0000000..4eab540 --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/client/base.py @@ -0,0 +1,1095 @@ +# Copyright 2012 Pinterest.com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__author__ = "Charles Gordon" + +import errno +import socket +import six + +from pymemcache import pool + +from pymemcache.exceptions import ( + MemcacheClientError, + MemcacheUnknownCommandError, + MemcacheIllegalInputError, + MemcacheServerError, + MemcacheUnknownError, + MemcacheUnexpectedCloseError +) + + +RECV_SIZE = 4096 +VALID_STORE_RESULTS = { + b'set': (b'STORED',), + b'add': (b'STORED', b'NOT_STORED'), + b'replace': (b'STORED', b'NOT_STORED'), + b'append': (b'STORED', b'NOT_STORED'), + b'prepend': (b'STORED', b'NOT_STORED'), + b'cas': (b'STORED', b'EXISTS', b'NOT_FOUND'), +} + + +# Some of the values returned by the "stats" command +# need mapping into native Python types +STAT_TYPES = { + # General stats + b'version': six.binary_type, + b'rusage_user': lambda value: float(value.replace(b':', b'.')), + b'rusage_system': lambda value: float(value.replace(b':', b'.')), + b'hash_is_expanding': lambda value: int(value) != 0, + b'slab_reassign_running': lambda value: int(value) != 0, + + # Settings stats + b'inter': six.binary_type, + b'evictions': lambda value: value == b'on', + b'growth_factor': float, + b'stat_key_prefix': six.binary_type, + b'umask': lambda value: int(value, 8), + b'detail_enabled': lambda value: int(value) != 0, + b'cas_enabled': lambda value: int(value) != 0, + b'auth_enabled_sasl': lambda value: value == b'yes', + b'maxconns_fast': lambda value: int(value) != 0, + b'slab_reassign': lambda value: int(value) != 0, + b'slab_automove': lambda value: int(value) != 0, +} + +# Common helper functions. + + +def _check_key(key, key_prefix=b''): + """Checks key and add key_prefix.""" + if isinstance(key, six.text_type): + try: + key = key.encode('ascii') + except UnicodeEncodeError: + raise MemcacheIllegalInputError("No ascii key: %r" % (key,)) + key = key_prefix + key + if b' ' in key: + raise MemcacheIllegalInputError("Key contains spaces: %r" % (key,)) + if len(key) > 250: + raise MemcacheIllegalInputError("Key is too long: %r" % (key,)) + return key + + +class Client(object): + """ + A client for a single memcached server. + + *Keys and Values* + + Keys must have a __str__() method which should return a str with no more + than 250 ASCII characters and no whitespace or control characters. Unicode + strings must be encoded (as UTF-8, for example) unless they consist only + of ASCII characters that are neither whitespace nor control characters. + + Values must have a __str__() method to convert themselves to a byte + string. Unicode objects can be a problem since str() on a Unicode object + will attempt to encode it as ASCII (which will fail if the value contains + code points larger than U+127). You can fix this with a serializer or by + just calling encode on the string (using UTF-8, for instance). + + If you intend to use anything but str as a value, it is a good idea to use + a serializer and deserializer. The pymemcache.serde library has some + already implemented serializers, including one that is compatible with + the python-memcache library. + + *Serialization and Deserialization* + + The constructor takes two optional functions, one for "serialization" of + values, and one for "deserialization". The serialization function takes + two arguments, a key and a value, and returns a tuple of two elements, the + serialized value, and an integer in the range 0-65535 (the "flags"). The + deserialization function takes three parameters, a key, value and flags + and returns the deserialized value. + + Here is an example using JSON for non-str values: + + .. code-block:: python + + def serialize_json(key, value): + if type(value) == str: + return value, 1 + return json.dumps(value), 2 + + def deserialize_json(key, value, flags): + if flags == 1: + return value + + if flags == 2: + return json.loads(value) + + raise Exception("Unknown flags for value: {1}".format(flags)) + + *Error Handling* + + All of the methods in this class that talk to memcached can throw one of + the following exceptions: + + * MemcacheUnknownCommandError + * MemcacheClientError + * MemcacheServerError + * MemcacheUnknownError + * MemcacheUnexpectedCloseError + * MemcacheIllegalInputError + * socket.timeout + * socket.error + + Instances of this class maintain a persistent connection to memcached + which is terminated when any of these exceptions are raised. The next + call to a method on the object will result in a new connection being made + to memcached. + """ + + def __init__(self, + server, + serializer=None, + deserializer=None, + connect_timeout=None, + timeout=None, + no_delay=False, + ignore_exc=False, + socket_module=socket, + key_prefix=b'', + default_noreply=True): + """ + Constructor. + + Args: + server: tuple(hostname, port) + serializer: optional function, see notes in the class docs. + deserializer: optional function, see notes in the class docs. + connect_timeout: optional float, seconds to wait for a connection to + the memcached server. Defaults to "forever" (uses the underlying + default socket timeout, which can be very long). + timeout: optional float, seconds to wait for send or recv calls on + the socket connected to memcached. Defaults to "forever" (uses the + underlying default socket timeout, which can be very long). + no_delay: optional bool, set the TCP_NODELAY flag, which may help + with performance in some cases. Defaults to False. + ignore_exc: optional bool, True to cause the "get", "gets", + "get_many" and "gets_many" calls to treat any errors as cache + misses. Defaults to False. + socket_module: socket module to use, e.g. gevent.socket. Defaults to + the standard library's socket module. + key_prefix: Prefix of key. You can use this as namespace. Defaults + to b''. + default_noreply: bool, the default value for 'noreply' as passed to + store commands (except from cas, incr, and decr, which default to + False). + + Notes: + The constructor does not make a connection to memcached. The first + call to a method on the object will do that. + """ + self.server = server + self.serializer = serializer + self.deserializer = deserializer + self.connect_timeout = connect_timeout + self.timeout = timeout + self.no_delay = no_delay + self.ignore_exc = ignore_exc + self.socket_module = socket_module + self.sock = None + if isinstance(key_prefix, six.text_type): + key_prefix = key_prefix.encode('ascii') + if not isinstance(key_prefix, bytes): + raise TypeError("key_prefix should be bytes.") + self.key_prefix = key_prefix + self.default_noreply = default_noreply + + def check_key(self, key): + """Checks key and add key_prefix.""" + return _check_key(key, key_prefix=self.key_prefix) + + def _connect(self): + sock = self.socket_module.socket(self.socket_module.AF_INET, + self.socket_module.SOCK_STREAM) + sock.settimeout(self.connect_timeout) + sock.connect(self.server) + sock.settimeout(self.timeout) + if self.no_delay: + sock.setsockopt(self.socket_module.IPPROTO_TCP, + self.socket_module.TCP_NODELAY, 1) + self.sock = sock + + def close(self): + """Close the connection to memcached, if it is open. The next call to a + method that requires a connection will re-open it.""" + if self.sock is not None: + try: + self.sock.close() + except Exception: + pass + self.sock = None + + def set(self, key, value, expire=0, noreply=None): + """ + The memcached "set" command. + + Args: + key: str, see class docs for details. + value: str, see class docs for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + If no exception is raised, always returns True. If an exception is + raised, the set may or may not have occurred. If noreply is True, + then a successful return does not guarantee a successful set. + """ + if noreply is None: + noreply = self.default_noreply + return self._store_cmd(b'set', key, expire, noreply, value) + + def set_many(self, values, expire=0, noreply=None): + """ + A convenience function for setting multiple values. + + Args: + values: dict(str, str), a dict of keys and values, see class docs + for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + If no exception is raised, always returns True. Otherwise all, some + or none of the keys have been successfully set. If noreply is True + then a successful return does not guarantee that any keys were + successfully set (just that the keys were successfully sent). + """ + + # TODO: make this more performant by sending all the values first, then + # waiting for all the responses. + for key, value in six.iteritems(values): + self.set(key, value, expire, noreply) + return True + + set_multi = set_many + + def add(self, key, value, expire=0, noreply=None): + """ + The memcached "add" command. + + Args: + key: str, see class docs for details. + value: str, see class docs for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + If noreply is True, the return value is always True. Otherwise the + return value is True if the value was stgored, and False if it was + not (because the key already existed). + """ + if noreply is None: + noreply = self.default_noreply + return self._store_cmd(b'add', key, expire, noreply, value) + + def replace(self, key, value, expire=0, noreply=None): + """ + The memcached "replace" command. + + Args: + key: str, see class docs for details. + value: str, see class docs for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + If noreply is True, always returns True. Otherwise returns True if + the value was stored and False if it wasn't (because the key didn't + already exist). + """ + if noreply is None: + noreply = self.default_noreply + return self._store_cmd(b'replace', key, expire, noreply, value) + + def append(self, key, value, expire=0, noreply=None): + """ + The memcached "append" command. + + Args: + key: str, see class docs for details. + value: str, see class docs for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + True. + """ + if noreply is None: + noreply = self.default_noreply + return self._store_cmd(b'append', key, expire, noreply, value) + + def prepend(self, key, value, expire=0, noreply=None): + """ + The memcached "prepend" command. + + Args: + key: str, see class docs for details. + value: str, see class docs for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + True. + """ + if noreply is None: + noreply = self.default_noreply + return self._store_cmd(b'prepend', key, expire, noreply, value) + + def cas(self, key, value, cas, expire=0, noreply=False): + """ + The memcached "cas" command. + + Args: + key: str, see class docs for details. + value: str, see class docs for details. + cas: int or str that only contains the characters '0'-'9'. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, False to wait for the reply (the default). + + Returns: + If noreply is True, always returns True. Otherwise returns None if + the key didn't exist, False if it existed but had a different cas + value and True if it existed and was changed. + """ + return self._store_cmd(b'cas', key, expire, noreply, value, cas) + + def get(self, key): + """ + The memcached "get" command, but only for one key, as a convenience. + + Args: + key: str, see class docs for details. + + Returns: + The value for the key, or None if the key wasn't found. + """ + return self._fetch_cmd(b'get', [key], False).get(key, None) + + def get_many(self, keys): + """ + The memcached "get" command. + + Args: + keys: list(str), see class docs for details. + + Returns: + A dict in which the keys are elements of the "keys" argument list + and the values are values from the cache. The dict may contain all, + some or none of the given keys. + """ + if not keys: + return {} + + return self._fetch_cmd(b'get', keys, False) + + get_multi = get_many + + def gets(self, key): + """ + The memcached "gets" command for one key, as a convenience. + + Args: + key: str, see class docs for details. + + Returns: + A tuple of (key, cas), or (None, None) if the key was not found. + """ + return self._fetch_cmd(b'gets', [key], True).get(key, (None, None)) + + def gets_many(self, keys): + """ + The memcached "gets" command. + + Args: + keys: list(str), see class docs for details. + + Returns: + A dict in which the keys are elements of the "keys" argument list and + the values are tuples of (value, cas) from the cache. The dict may + contain all, some or none of the given keys. + """ + if not keys: + return {} + + return self._fetch_cmd(b'gets', keys, True) + + def delete(self, key, noreply=None): + """ + The memcached "delete" command. + + Args: + key: str, see class docs for details. + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + If noreply is True, always returns True. Otherwise returns True if + the key was deleted, and False if it wasn't found. + """ + if noreply is None: + noreply = self.default_noreply + cmd = b'delete ' + self.check_key(key) + if noreply: + cmd += b' noreply' + cmd += b'\r\n' + result = self._misc_cmd(cmd, b'delete', noreply) + if noreply: + return True + return result == b'DELETED' + + def delete_many(self, keys, noreply=None): + """ + A convenience function to delete multiple keys. + + Args: + keys: list(str), the list of keys to delete. + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + True. If an exception is raised then all, some or none of the keys + may have been deleted. Otherwise all the keys have been sent to + memcache for deletion and if noreply is False, they have been + acknowledged by memcache. + """ + if not keys: + return True + + if noreply is None: + noreply = self.default_noreply + + # TODO: make this more performant by sending all keys first, then + # waiting for all values. + for key in keys: + self.delete(key, noreply) + + return True + + delete_multi = delete_many + + def incr(self, key, value, noreply=False): + """ + The memcached "incr" command. + + Args: + key: str, see class docs for details. + value: int, the amount by which to increment the value. + noreply: optional bool, False to wait for the reply (the default). + + Returns: + If noreply is True, always returns None. Otherwise returns the new + value of the key, or None if the key wasn't found. + """ + key = self.check_key(key) + cmd = b'incr ' + key + b' ' + six.text_type(value).encode('ascii') + if noreply: + cmd += b' noreply' + cmd += b'\r\n' + result = self._misc_cmd(cmd, b'incr', noreply) + if noreply: + return None + if result == b'NOT_FOUND': + return None + return int(result) + + def decr(self, key, value, noreply=False): + """ + The memcached "decr" command. + + Args: + key: str, see class docs for details. + value: int, the amount by which to increment the value. + noreply: optional bool, False to wait for the reply (the default). + + Returns: + If noreply is True, always returns None. Otherwise returns the new + value of the key, or None if the key wasn't found. + """ + key = self.check_key(key) + cmd = b'decr ' + key + b' ' + six.text_type(value).encode('ascii') + if noreply: + cmd += b' noreply' + cmd += b'\r\n' + result = self._misc_cmd(cmd, b'decr', noreply) + if noreply: + return None + if result == b'NOT_FOUND': + return None + return int(result) + + def touch(self, key, expire=0, noreply=None): + """ + The memcached "touch" command. + + Args: + key: str, see class docs for details. + expire: optional int, number of seconds until the item is expired + from the cache, or zero for no expiry (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + True if the expiration time was updated, False if the key wasn't + found. + """ + if noreply is None: + noreply = self.default_noreply + key = self.check_key(key) + cmd = b'touch ' + key + b' ' + six.text_type(expire).encode('ascii') + if noreply: + cmd += b' noreply' + cmd += b'\r\n' + result = self._misc_cmd(cmd, b'touch', noreply) + if noreply: + return True + return result == b'TOUCHED' + + def stats(self, *args): + """ + The memcached "stats" command. + + The returned keys depend on what the "stats" command returns. + A best effort is made to convert values to appropriate Python + types, defaulting to strings when a conversion cannot be made. + + Args: + *arg: extra string arguments to the "stats" command. See the + memcached protocol documentation for more information. + + Returns: + A dict of the returned stats. + """ + result = self._fetch_cmd(b'stats', args, False) + + for key, value in six.iteritems(result): + converter = STAT_TYPES.get(key, int) + try: + result[key] = converter(value) + except Exception: + pass + + return result + + def version(self): + """ + The memcached "version" command. + + Returns: + A string of the memcached version. + """ + cmd = b"version\r\n" + result = self._misc_cmd(cmd, b'version', False) + + if not result.startswith(b'VERSION '): + raise MemcacheUnknownError("Received unexpected response: %s" % (result, )) + + return result[8:] + + def flush_all(self, delay=0, noreply=None): + """ + The memcached "flush_all" command. + + Args: + delay: optional int, the number of seconds to wait before flushing, + or zero to flush immediately (the default). + noreply: optional bool, True to not wait for the reply (defaults to + self.default_noreply). + + Returns: + True. + """ + if noreply is None: + noreply = self.default_noreply + cmd = b'flush_all ' + six.text_type(delay).encode('ascii') + if noreply: + cmd += b' noreply' + cmd += b'\r\n' + result = self._misc_cmd(cmd, b'flush_all', noreply) + if noreply: + return True + return result == b'OK' + + def quit(self): + """ + The memcached "quit" command. + + This will close the connection with memcached. Calling any other + method on this object will re-open the connection, so this object can + be re-used after quit. + """ + cmd = b"quit\r\n" + self._misc_cmd(cmd, b'quit', True) + self.close() + + def _raise_errors(self, line, name): + if line.startswith(b'ERROR'): + raise MemcacheUnknownCommandError(name) + + if line.startswith(b'CLIENT_ERROR'): + error = line[line.find(b' ') + 1:] + raise MemcacheClientError(error) + + if line.startswith(b'SERVER_ERROR'): + error = line[line.find(b' ') + 1:] + raise MemcacheServerError(error) + + def _fetch_cmd(self, name, keys, expect_cas): + checked_keys = dict((self.check_key(k), k) for k in keys) + cmd = name + b' ' + b' '.join(checked_keys) + b'\r\n' + + try: + if not self.sock: + self._connect() + + self.sock.sendall(cmd) + + buf = b'' + result = {} + while True: + buf, line = _readline(self.sock, buf) + self._raise_errors(line, name) + if line == b'END': + return result + elif line.startswith(b'VALUE'): + if expect_cas: + _, key, flags, size, cas = line.split() + else: + try: + _, key, flags, size = line.split() + except Exception as e: + raise ValueError("Unable to parse line %s: %s" + % (line, str(e))) + + buf, value = _readvalue(self.sock, buf, int(size)) + key = checked_keys[key] + + if self.deserializer: + value = self.deserializer(key, value, int(flags)) + + if expect_cas: + result[key] = (value, cas) + else: + result[key] = value + elif name == b'stats' and line.startswith(b'STAT'): + _, key, value = line.split() + result[key] = value + else: + raise MemcacheUnknownError(line[:32]) + except Exception: + self.close() + if self.ignore_exc: + return {} + raise + + def _store_cmd(self, name, key, expire, noreply, data, cas=None): + key = self.check_key(key) + if not self.sock: + self._connect() + + if self.serializer: + data, flags = self.serializer(key, data) + else: + flags = 0 + + if not isinstance(data, six.binary_type): + try: + data = six.text_type(data).encode('ascii') + except UnicodeEncodeError as e: + raise MemcacheIllegalInputError(str(e)) + + extra = b'' + if cas is not None: + extra += b' ' + cas + if noreply: + extra += b' noreply' + + cmd = (name + b' ' + key + b' ' + six.text_type(flags).encode('ascii') + + b' ' + six.text_type(expire).encode('ascii') + + b' ' + six.text_type(len(data)).encode('ascii') + extra + + b'\r\n' + data + b'\r\n') + + try: + self.sock.sendall(cmd) + + if noreply: + return True + + buf = b'' + buf, line = _readline(self.sock, buf) + self._raise_errors(line, name) + + if line in VALID_STORE_RESULTS[name]: + if line == b'STORED': + return True + if line == b'NOT_STORED': + return False + if line == b'NOT_FOUND': + return None + if line == b'EXISTS': + return False + else: + raise MemcacheUnknownError(line[:32]) + except Exception: + self.close() + raise + + def _misc_cmd(self, cmd, cmd_name, noreply): + if not self.sock: + self._connect() + + try: + self.sock.sendall(cmd) + + if noreply: + return + + _, line = _readline(self.sock, b'') + self._raise_errors(line, cmd_name) + + return line + except Exception: + self.close() + raise + + def __setitem__(self, key, value): + self.set(key, value, noreply=True) + + def __getitem__(self, key): + value = self.get(key) + if value is None: + raise KeyError + return value + + def __delitem__(self, key): + self.delete(key, noreply=True) + + +class PooledClient(object): + """A thread-safe pool of clients (with the same client api). + + Args: + max_pool_size: maximum pool size to use (going about this amount + triggers a runtime error), by default this is 2147483648L + when not provided (or none). + lock_generator: a callback/type that takes no arguments that will + be called to create a lock or sempahore that can + protect the pool from concurrent access (for example a + eventlet lock or semaphore could be used instead) + + Further arguments are interpreted as for :py:class:`.Client` constructor. + """ + + def __init__(self, + server, + serializer=None, + deserializer=None, + connect_timeout=None, + timeout=None, + no_delay=False, + ignore_exc=False, + socket_module=socket, + key_prefix=b'', + max_pool_size=None, + lock_generator=None): + self.server = server + self.serializer = serializer + self.deserializer = deserializer + self.connect_timeout = connect_timeout + self.timeout = timeout + self.no_delay = no_delay + self.ignore_exc = ignore_exc + self.socket_module = socket_module + if isinstance(key_prefix, six.text_type): + key_prefix = key_prefix.encode('ascii') + if not isinstance(key_prefix, bytes): + raise TypeError("key_prefix should be bytes.") + self.key_prefix = key_prefix + self.client_pool = pool.ObjectPool( + self._create_client, + after_remove=lambda client: client.close(), + max_size=max_pool_size, + lock_generator=lock_generator) + + def check_key(self, key): + """Checks key and add key_prefix.""" + return _check_key(key, key_prefix=self.key_prefix) + + def _create_client(self): + client = Client(self.server, + serializer=self.serializer, + deserializer=self.deserializer, + connect_timeout=self.connect_timeout, + timeout=self.timeout, + no_delay=self.no_delay, + # We need to know when it fails *always* so that we + # can remove/destroy it from the pool... + ignore_exc=False, + socket_module=self.socket_module, + key_prefix=self.key_prefix) + return client + + def close(self): + self.client_pool.clear() + + def set(self, key, value, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.set(key, value, expire=expire, noreply=noreply) + + def set_many(self, values, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.set_many(values, expire=expire, noreply=noreply) + + set_multi = set_many + + def replace(self, key, value, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.replace(key, value, expire=expire, noreply=noreply) + + def append(self, key, value, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.append(key, value, expire=expire, noreply=noreply) + + def prepend(self, key, value, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.prepend(key, value, expire=expire, noreply=noreply) + + def cas(self, key, value, cas, expire=0, noreply=False): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.cas(key, value, cas, + expire=expire, noreply=noreply) + + def get(self, key): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + try: + return client.get(key) + except Exception: + if self.ignore_exc: + return None + else: + raise + + def get_many(self, keys): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + try: + return client.get_many(keys) + except Exception: + if self.ignore_exc: + return {} + else: + raise + + get_multi = get_many + + def gets(self, key): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + try: + return client.gets(key) + except Exception: + if self.ignore_exc: + return (None, None) + else: + raise + + def gets_many(self, keys): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + try: + return client.gets_many(keys) + except Exception: + if self.ignore_exc: + return {} + else: + raise + + def delete(self, key, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.delete(key, noreply=noreply) + + def delete_many(self, keys, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.delete_many(keys, noreply=noreply) + + delete_multi = delete_many + + def add(self, key, value, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.add(key, value, expire=expire, noreply=noreply) + + def incr(self, key, value, noreply=False): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.incr(key, value, noreply=noreply) + + def decr(self, key, value, noreply=False): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.decr(key, value, noreply=noreply) + + def touch(self, key, expire=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.touch(key, expire=expire, noreply=noreply) + + def stats(self, *args): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + try: + return client.stats(*args) + except Exception: + if self.ignore_exc: + return {} + else: + raise + + def version(self): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.version() + + def flush_all(self, delay=0, noreply=True): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + return client.flush_all(delay=delay, noreply=noreply) + + def quit(self): + with self.client_pool.get_and_release(destroy_on_fail=True) as client: + try: + client.quit() + finally: + self.client_pool.destroy(client) + + def __setitem__(self, key, value): + self.set(key, value, noreply=True) + + def __getitem__(self, key): + value = self.get(key) + if value is None: + raise KeyError + return value + + def __delitem__(self, key): + self.delete(key, noreply=True) + + +def _readline(sock, buf): + """Read line of text from the socket. + + Read a line of text (delimited by "\r\n") from the socket, and + return that line along with any trailing characters read from the + socket. + + Args: + sock: Socket object, should be connected. + buf: String, zero or more characters, returned from an earlier + call to _readline or _readvalue (pass an empty string on the + first call). + + Returns: + A tuple of (buf, line) where line is the full line read from the + socket (minus the "\r\n" characters) and buf is any trailing + characters read after the "\r\n" was found (which may be an empty + string). + + """ + chunks = [] + last_char = b'' + + while True: + # We're reading in chunks, so "\r\n" could appear in one chunk, + # or across the boundary of two chunks, so we check for both + # cases. + + # This case must appear first, since the buffer could have + # later \r\n characters in it and we want to get the first \r\n. + if last_char == b'\r' and buf[0:1] == b'\n': + # Strip the last character from the last chunk. + chunks[-1] = chunks[-1][:-1] + return buf[1:], b''.join(chunks) + elif buf.find(b'\r\n') != -1: + before, sep, after = buf.partition(b"\r\n") + chunks.append(before) + return after, b''.join(chunks) + + if buf: + chunks.append(buf) + last_char = buf[-1:] + + buf = _recv(sock, RECV_SIZE) + if not buf: + raise MemcacheUnexpectedCloseError() + + +def _readvalue(sock, buf, size): + """Read specified amount of bytes from the socket. + + Read size bytes, followed by the "\r\n" characters, from the socket, + and return those bytes and any trailing bytes read after the "\r\n". + + Args: + sock: Socket object, should be connected. + buf: String, zero or more characters, returned from an earlier + call to _readline or _readvalue (pass an empty string on the + first call). + size: Integer, number of bytes to read from the socket. + + Returns: + A tuple of (buf, value) where value is the bytes read from the + socket (there will be exactly size bytes) and buf is trailing + characters read after the "\r\n" following the bytes (but not + including the \r\n). + + """ + chunks = [] + rlen = size + 2 + while rlen - len(buf) > 0: + if buf: + rlen -= len(buf) + chunks.append(buf) + buf = _recv(sock, RECV_SIZE) + if not buf: + raise MemcacheUnexpectedCloseError() + + # Now we need to remove the \r\n from the end. There are two cases we care + # about: the \r\n is all in the last buffer, or only the \n is in the last + # buffer, and we need to remove the \r from the penultimate buffer. + + if rlen == 1: + # replace the last chunk with the same string minus the last character, + # which is always '\r' in this case. + chunks[-1] = chunks[-1][:-1] + else: + # Just remove the "\r\n" from the latest chunk + chunks.append(buf[:rlen - 2]) + + return buf[rlen:], b''.join(chunks) + + +def _recv(sock, size): + """sock.recv() with retry on EINTR""" + while True: + try: + return sock.recv(size) + except IOError as e: + if e.errno != errno.EINTR: + raise diff --git a/server/www/packages/packages-common/pymemcache/client/hash.py b/server/www/packages/packages-common/pymemcache/client/hash.py new file mode 100644 index 0000000..55b6287 --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/client/hash.py @@ -0,0 +1,333 @@ +import socket +import time +import logging + +from pymemcache.client.base import Client, PooledClient, _check_key +from pymemcache.client.rendezvous import RendezvousHash + +logger = logging.getLogger(__name__) + + +class HashClient(object): + """ + A client for communicating with a cluster of memcached servers + """ + def __init__( + self, + servers, + hasher=RendezvousHash, + serializer=None, + deserializer=None, + connect_timeout=None, + timeout=None, + no_delay=False, + socket_module=socket, + key_prefix=b'', + max_pool_size=None, + lock_generator=None, + retry_attempts=2, + retry_timeout=1, + dead_timeout=60, + use_pooling=False, + ignore_exc=False, + ): + """ + Constructor. + + Args: + servers: list(tuple(hostname, port)) + hasher: optional class three functions ``get_node``, ``add_node``, + and ``remove_node`` + defaults to Rendezvous (HRW) hash. + + use_pooling: use py:class:`.PooledClient` as the default underlying + class. ``max_pool_size`` and ``lock_generator`` can + be used with this. default: False + + retry_attempts: Amount of times a client should be tried before it + is marked dead and removed from the pool. + retry_timeout (float): Time in seconds that should pass between retry + attempts. + dead_timeout (float): Time in seconds before attempting to add a node + back in the pool. + + Further arguments are interpreted as for :py:class:`.Client` + constructor. + + The default ``hasher`` is using a pure python implementation that can + be significantly improved performance wise by switching to a C based + version. We recommend using ``python-clandestined`` if having a C + dependency is acceptable. + """ + self.clients = {} + self.retry_attempts = retry_attempts + self.retry_timeout = retry_timeout + self.dead_timeout = dead_timeout + self.use_pooling = use_pooling + self.key_prefix = key_prefix + self.ignore_exc = ignore_exc + self._failed_clients = {} + self._dead_clients = {} + self._last_dead_check_time = time.time() + + self.hasher = hasher() + + self.default_kwargs = { + 'connect_timeout': connect_timeout, + 'timeout': timeout, + 'no_delay': no_delay, + 'socket_module': socket_module, + 'key_prefix': key_prefix, + 'serializer': serializer, + 'deserializer': deserializer, + } + + if use_pooling is True: + self.default_kwargs.update({ + 'max_pool_size': max_pool_size, + 'lock_generator': lock_generator + }) + + for server, port in servers: + self.add_server(server, port) + + def add_server(self, server, port): + key = '%s:%s' % (server, port) + + if self.use_pooling: + client = PooledClient( + (server, port), + **self.default_kwargs + ) + else: + client = Client((server, port), **self.default_kwargs) + + self.clients[key] = client + self.hasher.add_node(key) + + def remove_server(self, server, port): + dead_time = time.time() + self._failed_clients.pop((server, port)) + self._dead_clients[(server, port)] = dead_time + key = '%s:%s' % (server, port) + self.hasher.remove_node(key) + + def _get_client(self, key): + _check_key(key, self.key_prefix) + if len(self._dead_clients) > 0: + current_time = time.time() + ldc = self._last_dead_check_time + # we have dead clients and we have reached the + # timeout retry + if current_time - ldc > self.dead_timeout: + for server, dead_time in self._dead_clients.items(): + if current_time - dead_time > self.dead_timeout: + logger.debug( + 'bringing server back into rotation %s', + server + ) + self.add_server(*server) + self._last_dead_check_time = current_time + + server = self.hasher.get_node(key) + # We've ran out of servers to try + if server is None: + if self.ignore_exc is True: + return + raise Exception('All servers seem to be down right now') + + client = self.clients[server] + return client + + def _safely_run_func(self, client, func, default_val, *args, **kwargs): + try: + if client.server in self._failed_clients: + # This server is currently failing, lets check if it is in + # retry or marked as dead + failed_metadata = self._failed_clients[client.server] + + # we haven't tried our max amount yet, if it has been enough + # time lets just retry using it + if failed_metadata['attempts'] < self.retry_attempts: + failed_time = failed_metadata['failed_time'] + if time.time() - failed_time > self.retry_timeout: + logger.debug( + 'retrying failed server: %s', client.server + ) + result = func(*args, **kwargs) + # we were successful, lets remove it from the failed + # clients + self._failed_clients.pop(client.server) + return result + return default_val + else: + # We've reached our max retry attempts, we need to mark + # the sever as dead + logger.debug('marking server as dead: %s', client.server) + self.remove_server(*client.server) + + result = func(*args, **kwargs) + return result + + # Connecting to the server fail, we should enter + # retry mode + except socket.error: + # This client has never failed, lets mark it for failure + if ( + client.server not in self._failed_clients and + self.retry_attempts > 0 + ): + self._failed_clients[client.server] = { + 'failed_time': time.time(), + 'attempts': 0, + } + # We aren't allowing any retries, we should mark the server as + # dead immediately + elif ( + client.server not in self._failed_clients and + self.retry_attempts <= 0 + ): + self._failed_clients[client.server] = { + 'failed_time': time.time(), + 'attempts': 0, + } + logger.debug("marking server as dead %s", client.server) + self.remove_server(*client.server) + # This client has failed previously, we need to update the metadata + # to reflect that we have attempted it again + else: + failed_metadata = self._failed_clients[client.server] + failed_metadata['attempts'] += 1 + failed_metadata['failed_time'] = time.time() + self._failed_clients[client.server] = failed_metadata + + # if we haven't enabled ignore_exc, don't move on gracefully, just + # raise the exception + if not self.ignore_exc: + raise + + return default_val + except: + # any exceptions that aren't socket.error we need to handle + # gracefully as well + if not self.ignore_exc: + raise + + return default_val + + def _run_cmd(self, cmd, key, default_val, *args, **kwargs): + client = self._get_client(key) + + if client is None: + return False + + func = getattr(client, cmd) + args = list(args) + args.insert(0, key) + return self._safely_run_func( + client, func, default_val, *args, **kwargs + ) + + def set(self, key, *args, **kwargs): + return self._run_cmd('set', key, False, *args, **kwargs) + + def get(self, key, *args, **kwargs): + return self._run_cmd('get', key, None, *args, **kwargs) + + def incr(self, key, *args, **kwargs): + return self._run_cmd('incr', key, False, *args, **kwargs) + + def decr(self, key, *args, **kwargs): + return self._run_cmd('decr', key, False, *args, **kwargs) + + def set_many(self, values, *args, **kwargs): + client_batches = {} + end = [] + + for key, value in values.items(): + client = self._get_client(key) + + if client is None: + end.append(False) + continue + + if client.server not in client_batches: + client_batches[client.server] = {} + + client_batches[client.server][key] = value + + for server, values in client_batches.items(): + client = self.clients['%s:%s' % server] + new_args = list(args) + new_args.insert(0, values) + result = self._safely_run_func( + client, + client.set_many, False, *new_args, **kwargs + ) + end.append(result) + + return all(end) + + set_multi = set_many + + def get_many(self, keys, *args, **kwargs): + client_batches = {} + end = {} + + for key in keys: + client = self._get_client(key) + + if client is None: + end[key] = False + continue + + if client.server not in client_batches: + client_batches[client.server] = [] + + client_batches[client.server].append(key) + + for server, keys in client_batches.items(): + client = self.clients['%s:%s' % server] + new_args = list(args) + new_args.insert(0, keys) + result = self._safely_run_func( + client, + client.get_many, {}, *new_args, **kwargs + ) + end.update(result) + + return end + + get_multi = get_many + + def gets(self, key, *args, **kwargs): + return self._run_cmd('gets', key, None, *args, **kwargs) + + def add(self, key, *args, **kwargs): + return self._run_cmd('add', key, False, *args, **kwargs) + + def prepend(self, key, *args, **kwargs): + return self._run_cmd('prepend', key, False, *args, **kwargs) + + def append(self, key, *args, **kwargs): + return self._run_cmd('append', key, False, *args, **kwargs) + + def delete(self, key, *args, **kwargs): + return self._run_cmd('delete', key, False, *args, **kwargs) + + def delete_many(self, keys, *args, **kwargs): + for key in keys: + self._run_cmd('delete', key, False, *args, **kwargs) + return True + + delete_multi = delete_many + + def cas(self, key, *args, **kwargs): + return self._run_cmd('cas', key, False, *args, **kwargs) + + def replace(self, key, *args, **kwargs): + return self._run_cmd('replace', key, False, *args, **kwargs) + + def flush_all(self): + for _, client in self.clients.items(): + self._safely_run_func(client, client.flush_all, False) diff --git a/server/www/packages/packages-common/pymemcache/client/murmur3.py b/server/www/packages/packages-common/pymemcache/client/murmur3.py new file mode 100644 index 0000000..787eeaf --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/client/murmur3.py @@ -0,0 +1,51 @@ +def murmur3_32(data, seed=0): + """MurmurHash3 was written by Austin Appleby, and is placed in the + public domain. The author hereby disclaims copyright to this source + code.""" + + c1 = 0xcc9e2d51 + c2 = 0x1b873593 + + length = len(data) + h1 = seed + roundedEnd = (length & 0xfffffffc) # round down to 4 byte block + for i in range(0, roundedEnd, 4): + # little endian load order + k1 = (ord(data[i]) & 0xff) | ((ord(data[i + 1]) & 0xff) << 8) | \ + ((ord(data[i + 2]) & 0xff) << 16) | (ord(data[i + 3]) << 24) + k1 *= c1 + k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15) + k1 *= c2 + + h1 ^= k1 + h1 = (h1 << 13) | ((h1 & 0xffffffff) >> 19) # ROTL32(h1,13) + h1 = h1 * 5 + 0xe6546b64 + + # tail + k1 = 0 + + val = length & 0x03 + if val == 3: + k1 = (ord(data[roundedEnd + 2]) & 0xff) << 16 + # fallthrough + if val in [2, 3]: + k1 |= (ord(data[roundedEnd + 1]) & 0xff) << 8 + # fallthrough + if val in [1, 2, 3]: + k1 |= ord(data[roundedEnd]) & 0xff + k1 *= c1 + k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15) + k1 *= c2 + h1 ^= k1 + + # finalization + h1 ^= length + + # fmix(h1) + h1 ^= ((h1 & 0xffffffff) >> 16) + h1 *= 0x85ebca6b + h1 ^= ((h1 & 0xffffffff) >> 13) + h1 *= 0xc2b2ae35 + h1 ^= ((h1 & 0xffffffff) >> 16) + + return h1 & 0xffffffff diff --git a/server/www/packages/packages-common/pymemcache/client/rendezvous.py b/server/www/packages/packages-common/pymemcache/client/rendezvous.py new file mode 100644 index 0000000..32ecc2b --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/client/rendezvous.py @@ -0,0 +1,46 @@ +from pymemcache.client.murmur3 import murmur3_32 + + +class RendezvousHash(object): + """ + Implements the Highest Random Weight (HRW) hashing algorithm most + commonly referred to as rendezvous hashing. + + Originally developed as part of python-clandestined. + + Copyright (c) 2014 Ernest W. Durbin III + """ + def __init__(self, nodes=None, seed=0, hash_function=murmur3_32): + """ + Constructor. + """ + self.nodes = [] + self.seed = seed + if nodes is not None: + self.nodes = nodes + self.hash_function = lambda x: hash_function(x, seed) + + def add_node(self, node): + if node not in self.nodes: + self.nodes.append(node) + + def remove_node(self, node): + if node in self.nodes: + self.nodes.remove(node) + else: + raise ValueError("No such node %s to remove" % (node)) + + def get_node(self, key): + high_score = -1 + winner = None + + for node in self.nodes: + score = self.hash_function( + "%s-%s" % (str(node), str(key))) + + if score > high_score: + (high_score, winner) = (score, node) + elif score == high_score: + (high_score, winner) = (score, max(str(node), str(winner))) + + return winner diff --git a/server/www/packages/packages-common/pymemcache/exceptions.py b/server/www/packages/packages-common/pymemcache/exceptions.py new file mode 100644 index 0000000..416fa0a --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/exceptions.py @@ -0,0 +1,40 @@ +class MemcacheError(Exception): + "Base exception class" + pass + + +class MemcacheClientError(MemcacheError): + """Raised when memcached fails to parse the arguments to a request, likely + due to a malformed key and/or value, a bug in this library, or a version + mismatch with memcached.""" + pass + + +class MemcacheUnknownCommandError(MemcacheClientError): + """Raised when memcached fails to parse a request, likely due to a bug in + this library or a version mismatch with memcached.""" + pass + + +class MemcacheIllegalInputError(MemcacheClientError): + """Raised when a key or value is not legal for Memcache (see the class docs + for Client for more details).""" + pass + + +class MemcacheServerError(MemcacheError): + """Raised when memcached reports a failure while processing a request, + likely due to a bug or transient issue in memcached.""" + pass + + +class MemcacheUnknownError(MemcacheError): + """Raised when this library receives a response from memcached that it + cannot parse, likely due to a bug in this library or a version mismatch + with memcached.""" + pass + + +class MemcacheUnexpectedCloseError(MemcacheServerError): + "Raised when the connection with memcached closes unexpectedly." + pass diff --git a/server/www/packages/packages-common/pymemcache/fallback.py b/server/www/packages/packages-common/pymemcache/fallback.py new file mode 100644 index 0000000..d70d83c --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/fallback.py @@ -0,0 +1,123 @@ +# Copyright 2012 Pinterest.com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +A client for falling back to older memcached servers when performing reads. + +It is sometimes necessary to deploy memcached on new servers, or with a +different configuration. In theses cases, it is undesirable to start up an +empty memcached server and point traffic to it, since the cache will be cold, +and the backing store will have a large increase in traffic. + +This class attempts to solve that problem by providing an interface identical +to the Client interface, but which can fall back to older memcached servers +when reads to the primary server fail. The approach for upgrading memcached +servers or configuration then becomes: + + 1. Deploy a new host (or fleet) with memcached, possibly with a new + configuration. + 2. From your application servers, use FallbackClient to write and read from + the new cluster, and to read from the old cluster when there is a miss in + the new cluster. + 3. Wait until the new cache is warm enough to support the load. + 4. Switch from FallbackClient to a regular Client library for doing all + reads and writes to the new cluster. + 5. Take down the old cluster. + +Best Practices: +--------------- + - Make sure that the old client has "ignore_exc" set to True, so that it + treats failures like cache misses. That will allow you to take down the + old cluster before you switch away from FallbackClient. +""" + + +class FallbackClient(object): + def __init__(self, caches): + assert len(caches) > 0 + self.caches = caches + + def close(self): + "Close each of the memcached clients" + for cache in self.caches: + cache.close() + + def set(self, key, value, expire=0, noreply=True): + self.caches[0].set(key, value, expire, noreply) + + def add(self, key, value, expire=0, noreply=True): + self.caches[0].add(key, value, expire, noreply) + + def replace(self, key, value, expire=0, noreply=True): + self.caches[0].replace(key, value, expire, noreply) + + def append(self, key, value, expire=0, noreply=True): + self.caches[0].append(key, value, expire, noreply) + + def prepend(self, key, value, expire=0, noreply=True): + self.caches[0].prepend(key, value, expire, noreply) + + def cas(self, key, value, cas, expire=0, noreply=True): + self.caches[0].cas(key, value, cas, expire, noreply) + + def get(self, key): + for cache in self.caches: + result = cache.get(key) + if result is not None: + return result + return None + + def get_many(self, keys): + for cache in self.caches: + result = cache.get_many(keys) + if result: + return result + return [] + + def gets(self, key): + for cache in self.caches: + result = cache.gets(key) + if result is not None: + return result + return None + + def gets_many(self, keys): + for cache in self.caches: + result = cache.gets_many(keys) + if result: + return result + return [] + + def delete(self, key, noreply=True): + self.caches[0].delete(key, noreply) + + def incr(self, key, value, noreply=True): + self.caches[0].incr(key, value, noreply) + + def decr(self, key, value, noreply=True): + self.caches[0].decr(key, value, noreply) + + def touch(self, key, expire=0, noreply=True): + self.caches[0].touch(key, expire, noreply) + + def stats(self): + # TODO: ?? + pass + + def flush_all(self, delay=0, noreply=True): + self.caches[0].flush_all(delay, noreply) + + def quit(self): + # TODO: ?? + pass diff --git a/server/www/packages/packages-common/pymemcache/pool.py b/server/www/packages/packages-common/pymemcache/pool.py new file mode 100644 index 0000000..f800f90 --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/pool.py @@ -0,0 +1,114 @@ +# Copyright 2015 Yahoo.com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import contextlib +import sys +import threading + +import six + + +class ObjectPool(object): + """A pool of objects that release/creates/destroys as needed.""" + + def __init__(self, obj_creator, + after_remove=None, max_size=None, + lock_generator=None): + self._used_objs = collections.deque() + self._free_objs = collections.deque() + self._obj_creator = obj_creator + if lock_generator is None: + self._lock = threading.Lock() + else: + self._lock = lock_generator() + self._after_remove = after_remove + max_size = max_size or 2 ** 31 + if not isinstance(max_size, six.integer_types) or max_size < 0: + raise ValueError('"max_size" must be a positive integer') + self.max_size = max_size + + @property + def used(self): + return tuple(self._used_objs) + + @property + def free(self): + return tuple(self._free_objs) + + @contextlib.contextmanager + def get_and_release(self, destroy_on_fail=False): + obj = self.get() + try: + yield obj + except Exception: + exc_info = sys.exc_info() + if not destroy_on_fail: + self.release(obj) + else: + self.destroy(obj) + six.reraise(exc_info[0], exc_info[1], exc_info[2]) + self.release(obj) + + def get(self): + with self._lock: + if not self._free_objs: + curr_count = len(self._used_objs) + if curr_count >= self.max_size: + raise RuntimeError("Too many objects," + " %s >= %s" % (curr_count, + self.max_size)) + obj = self._obj_creator() + self._used_objs.append(obj) + return obj + else: + obj = self._free_objs.pop() + self._used_objs.append(obj) + return obj + + def destroy(self, obj, silent=True): + was_dropped = False + with self._lock: + try: + self._used_objs.remove(obj) + was_dropped = True + except ValueError: + if not silent: + raise + if was_dropped and self._after_remove is not None: + self._after_remove(obj) + + def release(self, obj, silent=True): + with self._lock: + try: + self._used_objs.remove(obj) + self._free_objs.append(obj) + except ValueError: + if not silent: + raise + + def clear(self): + if self._after_remove is not None: + needs_destroy = [] + with self._lock: + needs_destroy.extend(self._used_objs) + needs_destroy.extend(self._free_objs) + self._free_objs.clear() + self._used_objs.clear() + for obj in needs_destroy: + self._after_remove(obj) + else: + with self._lock: + self._free_objs.clear() + self._used_objs.clear() diff --git a/server/www/packages/packages-common/pymemcache/serde.py b/server/www/packages/packages-common/pymemcache/serde.py new file mode 100644 index 0000000..c7a00ee --- /dev/null +++ b/server/www/packages/packages-common/pymemcache/serde.py @@ -0,0 +1,69 @@ +# Copyright 2012 Pinterest.com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import pickle + +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + + +FLAG_PICKLE = 1 << 0 +FLAG_INTEGER = 1 << 1 +FLAG_LONG = 1 << 2 + + +def python_memcache_serializer(key, value): + flags = 0 + + if isinstance(value, str): + pass + elif isinstance(value, int): + flags |= FLAG_INTEGER + value = "%d" % value + elif isinstance(value, long): + flags |= FLAG_LONG + value = "%d" % value + else: + flags |= FLAG_PICKLE + output = StringIO() + pickler = pickle.Pickler(output, 0) + pickler.dump(value) + value = output.getvalue() + + return value, flags + + +def python_memcache_deserializer(key, value, flags): + if flags == 0: + return value + + if flags & FLAG_INTEGER: + return int(value) + + if flags & FLAG_LONG: + return long(value) + + if flags & FLAG_PICKLE: + try: + buf = StringIO(value) + unpickler = pickle.Unpickler(buf) + return unpickler.load() + except Exception: + logging.info('Pickle error', exc_info=True) + return None + + return value diff --git a/server/www/packages/packages-common/pymysql/__init__.py b/server/www/packages/packages-common/pymysql/__init__.py new file mode 100644 index 0000000..2236ff1 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/__init__.py @@ -0,0 +1,133 @@ +''' +PyMySQL: A pure-Python MySQL client library. + +Copyright (c) 2010, 2013 PyMySQL contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +''' + +VERSION = (0, 6, 7, None) + +from ._compat import text_type, JYTHON, IRONPYTHON +from .constants import FIELD_TYPE +from .converters import escape_dict, escape_sequence, escape_string +from .err import Warning, Error, InterfaceError, DataError, \ + DatabaseError, OperationalError, IntegrityError, InternalError, \ + NotSupportedError, ProgrammingError, MySQLError +from .times import Date, Time, Timestamp, \ + DateFromTicks, TimeFromTicks, TimestampFromTicks + +import sys + + +threadsafety = 1 +apilevel = "2.0" +paramstyle = "format" + +class DBAPISet(frozenset): + + + def __ne__(self, other): + if isinstance(other, set): + return frozenset.__ne__(self, other) + else: + return other not in self + + def __eq__(self, other): + if isinstance(other, frozenset): + return frozenset.__eq__(self, other) + else: + return other in self + + def __hash__(self): + return frozenset.__hash__(self) + + +STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, + FIELD_TYPE.VAR_STRING]) +BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB, + FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB]) +NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT, + FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG, + FIELD_TYPE.TINY, FIELD_TYPE.YEAR]) +DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE]) +TIME = DBAPISet([FIELD_TYPE.TIME]) +TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME]) +DATETIME = TIMESTAMP +ROWID = DBAPISet() + +def Binary(x): + """Return x as a binary type.""" + if isinstance(x, text_type) and not (JYTHON or IRONPYTHON): + return x.encode() + return bytes(x) + +def Connect(*args, **kwargs): + """ + Connect to the database; see connections.Connection.__init__() for + more information. + """ + from .connections import Connection + return Connection(*args, **kwargs) + +from pymysql import connections as _orig_conn +if _orig_conn.Connection.__init__.__doc__ is not None: + Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ + (""" +See connections.Connection.__init__() for information about defaults. +""") +del _orig_conn + +def get_client_info(): # for MySQLdb compatibility + return '.'.join(map(str, VERSION)) + +connect = Connection = Connect + +# we include a doctored version_info here for MySQLdb compatibility +version_info = (1,2,2,"final",0) + +NULL = "NULL" + +__version__ = get_client_info() + +def thread_safe(): + return True # match MySQLdb.thread_safe() + +def install_as_MySQLdb(): + """ + After this function is called, any application that imports MySQLdb or + _mysql will unwittingly actually use + """ + sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"] + +__all__ = [ + 'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date', + 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks', + 'DataError', 'DatabaseError', 'Error', 'FIELD_TYPE', 'IntegrityError', + 'InterfaceError', 'InternalError', 'MySQLError', 'NULL', 'NUMBER', + 'NotSupportedError', 'DBAPISet', 'OperationalError', 'ProgrammingError', + 'ROWID', 'STRING', 'TIME', 'TIMESTAMP', 'Warning', 'apilevel', 'connect', + 'connections', 'constants', 'converters', 'cursors', + 'escape_dict', 'escape_sequence', 'escape_string', 'get_client_info', + 'paramstyle', 'threadsafety', 'version_info', + + "install_as_MySQLdb", + + "NULL","__version__", + ] diff --git a/server/www/packages/packages-common/pymysql/_compat.py b/server/www/packages/packages-common/pymysql/_compat.py new file mode 100644 index 0000000..0c55346 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/_compat.py @@ -0,0 +1,18 @@ +import sys + +PY2 = sys.version_info[0] == 2 +PYPY = hasattr(sys, 'pypy_translation_info') +JYTHON = sys.platform.startswith('java') +IRONPYTHON = sys.platform == 'cli' +CPYTHON = not PYPY and not JYTHON and not IRONPYTHON + +if PY2: + range_type = xrange + text_type = unicode + long_type = long + str_type = basestring +else: + range_type = range + text_type = str + long_type = int + str_type = str diff --git a/server/www/packages/packages-common/pymysql/_socketio.py b/server/www/packages/packages-common/pymysql/_socketio.py new file mode 100644 index 0000000..6a11d42 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/_socketio.py @@ -0,0 +1,134 @@ +""" +SocketIO imported from socket module in Python 3. + +Copyright (c) 2001-2013 Python Software Foundation; All Rights Reserved. +""" + +from socket import * +import io +import errno + +__all__ = ['SocketIO'] + +EINTR = errno.EINTR +_blocking_errnos = (errno.EAGAIN, errno.EWOULDBLOCK) + +class SocketIO(io.RawIOBase): + + """Raw I/O implementation for stream sockets. + + This class supports the makefile() method on sockets. It provides + the raw I/O interface on top of a socket object. + """ + + # One might wonder why not let FileIO do the job instead. There are two + # main reasons why FileIO is not adapted: + # - it wouldn't work under Windows (where you can't used read() and + # write() on a socket handle) + # - it wouldn't work with socket timeouts (FileIO would ignore the + # timeout and consider the socket non-blocking) + + # XXX More docs + + def __init__(self, sock, mode): + if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): + raise ValueError("invalid mode: %r" % mode) + io.RawIOBase.__init__(self) + self._sock = sock + if "b" not in mode: + mode += "b" + self._mode = mode + self._reading = "r" in mode + self._writing = "w" in mode + self._timeout_occurred = False + + def readinto(self, b): + """Read up to len(b) bytes into the writable buffer *b* and return + the number of bytes read. If the socket is non-blocking and no bytes + are available, None is returned. + + If *b* is non-empty, a 0 return value indicates that the connection + was shutdown at the other end. + """ + self._checkClosed() + self._checkReadable() + if self._timeout_occurred: + raise IOError("cannot read from timed out object") + while True: + try: + return self._sock.recv_into(b) + except timeout: + self._timeout_occurred = True + raise + except error as e: + n = e.args[0] + if n == EINTR: + continue + if n in _blocking_errnos: + return None + raise + + def write(self, b): + """Write the given bytes or bytearray object *b* to the socket + and return the number of bytes written. This can be less than + len(b) if not all data could be written. If the socket is + non-blocking and no bytes could be written None is returned. + """ + self._checkClosed() + self._checkWritable() + try: + return self._sock.send(b) + except error as e: + # XXX what about EINTR? + if e.args[0] in _blocking_errnos: + return None + raise + + def readable(self): + """True if the SocketIO is open for reading. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._reading + + def writable(self): + """True if the SocketIO is open for writing. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._writing + + def seekable(self): + """True if the SocketIO is open for seeking. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return super().seekable() + + def fileno(self): + """Return the file descriptor of the underlying socket. + """ + self._checkClosed() + return self._sock.fileno() + + @property + def name(self): + if not self.closed: + return self.fileno() + else: + return -1 + + @property + def mode(self): + return self._mode + + def close(self): + """Close the SocketIO object. This doesn't close the underlying + socket, except if all references to it have disappeared. + """ + if self.closed: + return + io.RawIOBase.close(self) + self._sock._decref_socketios() + self._sock = None + diff --git a/server/www/packages/packages-common/pymysql/charset.py b/server/www/packages/packages-common/pymysql/charset.py new file mode 100644 index 0000000..1cf7d91 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/charset.py @@ -0,0 +1,262 @@ +MBLENGTH = { + 8:1, + 33:3, + 88:2, + 91:2 + } + + +class Charset(object): + def __init__(self, id, name, collation, is_default): + self.id, self.name, self.collation = id, name, collation + self.is_default = is_default == 'Yes' + + @property + def encoding(self): + name = self.name + if name == 'utf8mb4': + return 'utf8' + return name + + @property + def is_binary(self): + return self.id == 63 + + +class Charsets: + def __init__(self): + self._by_id = {} + + def add(self, c): + self._by_id[c.id] = c + + def by_id(self, id): + return self._by_id[id] + + def by_name(self, name): + name = name.lower() + for c in self._by_id.values(): + if c.name == name and c.is_default: + return c + +_charsets = Charsets() +""" +Generated with: + +mysql -N -s -e "select id, character_set_name, collation_name, is_default +from information_schema.collations order by id;" | python -c "import sys +for l in sys.stdin.readlines(): + id, name, collation, is_default = l.split(chr(9)) + print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \ + % (id, name, collation, is_default.strip()) +" + +""" +_charsets.add(Charset(1, 'big5', 'big5_chinese_ci', 'Yes')) +_charsets.add(Charset(2, 'latin2', 'latin2_czech_cs', '')) +_charsets.add(Charset(3, 'dec8', 'dec8_swedish_ci', 'Yes')) +_charsets.add(Charset(4, 'cp850', 'cp850_general_ci', 'Yes')) +_charsets.add(Charset(5, 'latin1', 'latin1_german1_ci', '')) +_charsets.add(Charset(6, 'hp8', 'hp8_english_ci', 'Yes')) +_charsets.add(Charset(7, 'koi8r', 'koi8r_general_ci', 'Yes')) +_charsets.add(Charset(8, 'latin1', 'latin1_swedish_ci', 'Yes')) +_charsets.add(Charset(9, 'latin2', 'latin2_general_ci', 'Yes')) +_charsets.add(Charset(10, 'swe7', 'swe7_swedish_ci', 'Yes')) +_charsets.add(Charset(11, 'ascii', 'ascii_general_ci', 'Yes')) +_charsets.add(Charset(12, 'ujis', 'ujis_japanese_ci', 'Yes')) +_charsets.add(Charset(13, 'sjis', 'sjis_japanese_ci', 'Yes')) +_charsets.add(Charset(14, 'cp1251', 'cp1251_bulgarian_ci', '')) +_charsets.add(Charset(15, 'latin1', 'latin1_danish_ci', '')) +_charsets.add(Charset(16, 'hebrew', 'hebrew_general_ci', 'Yes')) +_charsets.add(Charset(18, 'tis620', 'tis620_thai_ci', 'Yes')) +_charsets.add(Charset(19, 'euckr', 'euckr_korean_ci', 'Yes')) +_charsets.add(Charset(20, 'latin7', 'latin7_estonian_cs', '')) +_charsets.add(Charset(21, 'latin2', 'latin2_hungarian_ci', '')) +_charsets.add(Charset(22, 'koi8u', 'koi8u_general_ci', 'Yes')) +_charsets.add(Charset(23, 'cp1251', 'cp1251_ukrainian_ci', '')) +_charsets.add(Charset(24, 'gb2312', 'gb2312_chinese_ci', 'Yes')) +_charsets.add(Charset(25, 'greek', 'greek_general_ci', 'Yes')) +_charsets.add(Charset(26, 'cp1250', 'cp1250_general_ci', 'Yes')) +_charsets.add(Charset(27, 'latin2', 'latin2_croatian_ci', '')) +_charsets.add(Charset(28, 'gbk', 'gbk_chinese_ci', 'Yes')) +_charsets.add(Charset(29, 'cp1257', 'cp1257_lithuanian_ci', '')) +_charsets.add(Charset(30, 'latin5', 'latin5_turkish_ci', 'Yes')) +_charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', '')) +_charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes')) +_charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes')) +_charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', '')) +_charsets.add(Charset(35, 'ucs2', 'ucs2_general_ci', 'Yes')) +_charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes')) +_charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes')) +_charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes')) +_charsets.add(Charset(39, 'macroman', 'macroman_general_ci', 'Yes')) +_charsets.add(Charset(40, 'cp852', 'cp852_general_ci', 'Yes')) +_charsets.add(Charset(41, 'latin7', 'latin7_general_ci', 'Yes')) +_charsets.add(Charset(42, 'latin7', 'latin7_general_cs', '')) +_charsets.add(Charset(43, 'macce', 'macce_bin', '')) +_charsets.add(Charset(44, 'cp1250', 'cp1250_croatian_ci', '')) +_charsets.add(Charset(45, 'utf8mb4', 'utf8mb4_general_ci', 'Yes')) +_charsets.add(Charset(46, 'utf8mb4', 'utf8mb4_bin', '')) +_charsets.add(Charset(47, 'latin1', 'latin1_bin', '')) +_charsets.add(Charset(48, 'latin1', 'latin1_general_ci', '')) +_charsets.add(Charset(49, 'latin1', 'latin1_general_cs', '')) +_charsets.add(Charset(50, 'cp1251', 'cp1251_bin', '')) +_charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes')) +_charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', '')) +_charsets.add(Charset(53, 'macroman', 'macroman_bin', '')) +_charsets.add(Charset(54, 'utf16', 'utf16_general_ci', 'Yes')) +_charsets.add(Charset(55, 'utf16', 'utf16_bin', '')) +_charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes')) +_charsets.add(Charset(58, 'cp1257', 'cp1257_bin', '')) +_charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes')) +_charsets.add(Charset(60, 'utf32', 'utf32_general_ci', 'Yes')) +_charsets.add(Charset(61, 'utf32', 'utf32_bin', '')) +_charsets.add(Charset(63, 'binary', 'binary', 'Yes')) +_charsets.add(Charset(64, 'armscii8', 'armscii8_bin', '')) +_charsets.add(Charset(65, 'ascii', 'ascii_bin', '')) +_charsets.add(Charset(66, 'cp1250', 'cp1250_bin', '')) +_charsets.add(Charset(67, 'cp1256', 'cp1256_bin', '')) +_charsets.add(Charset(68, 'cp866', 'cp866_bin', '')) +_charsets.add(Charset(69, 'dec8', 'dec8_bin', '')) +_charsets.add(Charset(70, 'greek', 'greek_bin', '')) +_charsets.add(Charset(71, 'hebrew', 'hebrew_bin', '')) +_charsets.add(Charset(72, 'hp8', 'hp8_bin', '')) +_charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', '')) +_charsets.add(Charset(74, 'koi8r', 'koi8r_bin', '')) +_charsets.add(Charset(75, 'koi8u', 'koi8u_bin', '')) +_charsets.add(Charset(77, 'latin2', 'latin2_bin', '')) +_charsets.add(Charset(78, 'latin5', 'latin5_bin', '')) +_charsets.add(Charset(79, 'latin7', 'latin7_bin', '')) +_charsets.add(Charset(80, 'cp850', 'cp850_bin', '')) +_charsets.add(Charset(81, 'cp852', 'cp852_bin', '')) +_charsets.add(Charset(82, 'swe7', 'swe7_bin', '')) +_charsets.add(Charset(83, 'utf8', 'utf8_bin', '')) +_charsets.add(Charset(84, 'big5', 'big5_bin', '')) +_charsets.add(Charset(85, 'euckr', 'euckr_bin', '')) +_charsets.add(Charset(86, 'gb2312', 'gb2312_bin', '')) +_charsets.add(Charset(87, 'gbk', 'gbk_bin', '')) +_charsets.add(Charset(88, 'sjis', 'sjis_bin', '')) +_charsets.add(Charset(89, 'tis620', 'tis620_bin', '')) +_charsets.add(Charset(90, 'ucs2', 'ucs2_bin', '')) +_charsets.add(Charset(91, 'ujis', 'ujis_bin', '')) +_charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes')) +_charsets.add(Charset(93, 'geostd8', 'geostd8_bin', '')) +_charsets.add(Charset(94, 'latin1', 'latin1_spanish_ci', '')) +_charsets.add(Charset(95, 'cp932', 'cp932_japanese_ci', 'Yes')) +_charsets.add(Charset(96, 'cp932', 'cp932_bin', '')) +_charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes')) +_charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', '')) +_charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', '')) +_charsets.add(Charset(101, 'utf16', 'utf16_unicode_ci', '')) +_charsets.add(Charset(102, 'utf16', 'utf16_icelandic_ci', '')) +_charsets.add(Charset(103, 'utf16', 'utf16_latvian_ci', '')) +_charsets.add(Charset(104, 'utf16', 'utf16_romanian_ci', '')) +_charsets.add(Charset(105, 'utf16', 'utf16_slovenian_ci', '')) +_charsets.add(Charset(106, 'utf16', 'utf16_polish_ci', '')) +_charsets.add(Charset(107, 'utf16', 'utf16_estonian_ci', '')) +_charsets.add(Charset(108, 'utf16', 'utf16_spanish_ci', '')) +_charsets.add(Charset(109, 'utf16', 'utf16_swedish_ci', '')) +_charsets.add(Charset(110, 'utf16', 'utf16_turkish_ci', '')) +_charsets.add(Charset(111, 'utf16', 'utf16_czech_ci', '')) +_charsets.add(Charset(112, 'utf16', 'utf16_danish_ci', '')) +_charsets.add(Charset(113, 'utf16', 'utf16_lithuanian_ci', '')) +_charsets.add(Charset(114, 'utf16', 'utf16_slovak_ci', '')) +_charsets.add(Charset(115, 'utf16', 'utf16_spanish2_ci', '')) +_charsets.add(Charset(116, 'utf16', 'utf16_roman_ci', '')) +_charsets.add(Charset(117, 'utf16', 'utf16_persian_ci', '')) +_charsets.add(Charset(118, 'utf16', 'utf16_esperanto_ci', '')) +_charsets.add(Charset(119, 'utf16', 'utf16_hungarian_ci', '')) +_charsets.add(Charset(120, 'utf16', 'utf16_sinhala_ci', '')) +_charsets.add(Charset(128, 'ucs2', 'ucs2_unicode_ci', '')) +_charsets.add(Charset(129, 'ucs2', 'ucs2_icelandic_ci', '')) +_charsets.add(Charset(130, 'ucs2', 'ucs2_latvian_ci', '')) +_charsets.add(Charset(131, 'ucs2', 'ucs2_romanian_ci', '')) +_charsets.add(Charset(132, 'ucs2', 'ucs2_slovenian_ci', '')) +_charsets.add(Charset(133, 'ucs2', 'ucs2_polish_ci', '')) +_charsets.add(Charset(134, 'ucs2', 'ucs2_estonian_ci', '')) +_charsets.add(Charset(135, 'ucs2', 'ucs2_spanish_ci', '')) +_charsets.add(Charset(136, 'ucs2', 'ucs2_swedish_ci', '')) +_charsets.add(Charset(137, 'ucs2', 'ucs2_turkish_ci', '')) +_charsets.add(Charset(138, 'ucs2', 'ucs2_czech_ci', '')) +_charsets.add(Charset(139, 'ucs2', 'ucs2_danish_ci', '')) +_charsets.add(Charset(140, 'ucs2', 'ucs2_lithuanian_ci', '')) +_charsets.add(Charset(141, 'ucs2', 'ucs2_slovak_ci', '')) +_charsets.add(Charset(142, 'ucs2', 'ucs2_spanish2_ci', '')) +_charsets.add(Charset(143, 'ucs2', 'ucs2_roman_ci', '')) +_charsets.add(Charset(144, 'ucs2', 'ucs2_persian_ci', '')) +_charsets.add(Charset(145, 'ucs2', 'ucs2_esperanto_ci', '')) +_charsets.add(Charset(146, 'ucs2', 'ucs2_hungarian_ci', '')) +_charsets.add(Charset(147, 'ucs2', 'ucs2_sinhala_ci', '')) +_charsets.add(Charset(159, 'ucs2', 'ucs2_general_mysql500_ci', '')) +_charsets.add(Charset(160, 'utf32', 'utf32_unicode_ci', '')) +_charsets.add(Charset(161, 'utf32', 'utf32_icelandic_ci', '')) +_charsets.add(Charset(162, 'utf32', 'utf32_latvian_ci', '')) +_charsets.add(Charset(163, 'utf32', 'utf32_romanian_ci', '')) +_charsets.add(Charset(164, 'utf32', 'utf32_slovenian_ci', '')) +_charsets.add(Charset(165, 'utf32', 'utf32_polish_ci', '')) +_charsets.add(Charset(166, 'utf32', 'utf32_estonian_ci', '')) +_charsets.add(Charset(167, 'utf32', 'utf32_spanish_ci', '')) +_charsets.add(Charset(168, 'utf32', 'utf32_swedish_ci', '')) +_charsets.add(Charset(169, 'utf32', 'utf32_turkish_ci', '')) +_charsets.add(Charset(170, 'utf32', 'utf32_czech_ci', '')) +_charsets.add(Charset(171, 'utf32', 'utf32_danish_ci', '')) +_charsets.add(Charset(172, 'utf32', 'utf32_lithuanian_ci', '')) +_charsets.add(Charset(173, 'utf32', 'utf32_slovak_ci', '')) +_charsets.add(Charset(174, 'utf32', 'utf32_spanish2_ci', '')) +_charsets.add(Charset(175, 'utf32', 'utf32_roman_ci', '')) +_charsets.add(Charset(176, 'utf32', 'utf32_persian_ci', '')) +_charsets.add(Charset(177, 'utf32', 'utf32_esperanto_ci', '')) +_charsets.add(Charset(178, 'utf32', 'utf32_hungarian_ci', '')) +_charsets.add(Charset(179, 'utf32', 'utf32_sinhala_ci', '')) +_charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', '')) +_charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', '')) +_charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', '')) +_charsets.add(Charset(195, 'utf8', 'utf8_romanian_ci', '')) +_charsets.add(Charset(196, 'utf8', 'utf8_slovenian_ci', '')) +_charsets.add(Charset(197, 'utf8', 'utf8_polish_ci', '')) +_charsets.add(Charset(198, 'utf8', 'utf8_estonian_ci', '')) +_charsets.add(Charset(199, 'utf8', 'utf8_spanish_ci', '')) +_charsets.add(Charset(200, 'utf8', 'utf8_swedish_ci', '')) +_charsets.add(Charset(201, 'utf8', 'utf8_turkish_ci', '')) +_charsets.add(Charset(202, 'utf8', 'utf8_czech_ci', '')) +_charsets.add(Charset(203, 'utf8', 'utf8_danish_ci', '')) +_charsets.add(Charset(204, 'utf8', 'utf8_lithuanian_ci', '')) +_charsets.add(Charset(205, 'utf8', 'utf8_slovak_ci', '')) +_charsets.add(Charset(206, 'utf8', 'utf8_spanish2_ci', '')) +_charsets.add(Charset(207, 'utf8', 'utf8_roman_ci', '')) +_charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', '')) +_charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', '')) +_charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', '')) +_charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', '')) +_charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', '')) +_charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', '')) +_charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', '')) +_charsets.add(Charset(226, 'utf8mb4', 'utf8mb4_latvian_ci', '')) +_charsets.add(Charset(227, 'utf8mb4', 'utf8mb4_romanian_ci', '')) +_charsets.add(Charset(228, 'utf8mb4', 'utf8mb4_slovenian_ci', '')) +_charsets.add(Charset(229, 'utf8mb4', 'utf8mb4_polish_ci', '')) +_charsets.add(Charset(230, 'utf8mb4', 'utf8mb4_estonian_ci', '')) +_charsets.add(Charset(231, 'utf8mb4', 'utf8mb4_spanish_ci', '')) +_charsets.add(Charset(232, 'utf8mb4', 'utf8mb4_swedish_ci', '')) +_charsets.add(Charset(233, 'utf8mb4', 'utf8mb4_turkish_ci', '')) +_charsets.add(Charset(234, 'utf8mb4', 'utf8mb4_czech_ci', '')) +_charsets.add(Charset(235, 'utf8mb4', 'utf8mb4_danish_ci', '')) +_charsets.add(Charset(236, 'utf8mb4', 'utf8mb4_lithuanian_ci', '')) +_charsets.add(Charset(237, 'utf8mb4', 'utf8mb4_slovak_ci', '')) +_charsets.add(Charset(238, 'utf8mb4', 'utf8mb4_spanish2_ci', '')) +_charsets.add(Charset(239, 'utf8mb4', 'utf8mb4_roman_ci', '')) +_charsets.add(Charset(240, 'utf8mb4', 'utf8mb4_persian_ci', '')) +_charsets.add(Charset(241, 'utf8mb4', 'utf8mb4_esperanto_ci', '')) +_charsets.add(Charset(242, 'utf8mb4', 'utf8mb4_hungarian_ci', '')) +_charsets.add(Charset(243, 'utf8mb4', 'utf8mb4_sinhala_ci', '')) + + +charset_by_name = _charsets.by_name +charset_by_id = _charsets.by_id + + +def charset_to_encoding(name): + """Convert MySQL's charset name to Python's codec name""" + if name == 'utf8mb4': + return 'utf8' + return name diff --git a/server/www/packages/packages-common/pymysql/connections.py b/server/www/packages/packages-common/pymysql/connections.py new file mode 100644 index 0000000..f6c06ce --- /dev/null +++ b/server/www/packages/packages-common/pymysql/connections.py @@ -0,0 +1,1317 @@ +# Python implementation of the MySQL client-server protocol +# http://dev.mysql.com/doc/internals/en/client-server-protocol.html +# Error codes: +# http://dev.mysql.com/doc/refman/5.5/en/error-messages-client.html +from __future__ import print_function +from ._compat import PY2, range_type, text_type, str_type, JYTHON, IRONPYTHON + +import errno +from functools import partial +import hashlib +import io +import os +import socket +import struct +import sys +import traceback +import warnings + +from .charset import MBLENGTH, charset_by_name, charset_by_id +from .constants import CLIENT, COMMAND, FIELD_TYPE, SERVER_STATUS +from .converters import ( + escape_item, encoders, decoders, escape_string, through) +from .cursors import Cursor +from .optionfile import Parser +from .util import byte2int, int2byte +from . import err + +try: + import ssl + SSL_ENABLED = True +except ImportError: + ssl = None + SSL_ENABLED = False + +try: + import getpass + DEFAULT_USER = getpass.getuser() + del getpass +except ImportError: + DEFAULT_USER = None + + +DEBUG = False + +_py_version = sys.version_info[:2] + + +# socket.makefile() in Python 2 is not usable because very inefficient and +# bad behavior about timeout. +# XXX: ._socketio doesn't work under IronPython. +if _py_version == (2, 7) and not IRONPYTHON: + # read method of file-like returned by sock.makefile() is very slow. + # So we copy io-based one from Python 3. + from ._socketio import SocketIO + + def _makefile(sock, mode): + return io.BufferedReader(SocketIO(sock, mode)) +elif _py_version == (2, 6): + # Python 2.6 doesn't have fast io module. + # So we make original one. + class SockFile(object): + def __init__(self, sock): + self._sock = sock + + def read(self, n): + read = self._sock.recv(n) + if len(read) == n: + return read + while True: + data = self._sock.recv(n-len(read)) + if not data: + return read + read += data + if len(read) == n: + return read + + def _makefile(sock, mode): + assert mode == 'rb' + return SockFile(sock) +else: + # socket.makefile in Python 3 is nice. + def _makefile(sock, mode): + return sock.makefile(mode) + + +TEXT_TYPES = set([ + FIELD_TYPE.BIT, + FIELD_TYPE.BLOB, + FIELD_TYPE.LONG_BLOB, + FIELD_TYPE.MEDIUM_BLOB, + FIELD_TYPE.STRING, + FIELD_TYPE.TINY_BLOB, + FIELD_TYPE.VAR_STRING, + FIELD_TYPE.VARCHAR, + FIELD_TYPE.GEOMETRY]) + +sha_new = partial(hashlib.new, 'sha1') + +NULL_COLUMN = 251 +UNSIGNED_CHAR_COLUMN = 251 +UNSIGNED_SHORT_COLUMN = 252 +UNSIGNED_INT24_COLUMN = 253 +UNSIGNED_INT64_COLUMN = 254 + +DEFAULT_CHARSET = 'latin1' + +MAX_PACKET_LEN = 2**24-1 + + +def dump_packet(data): # pragma: no cover + def is_ascii(data): + if 65 <= byte2int(data) <= 122: + if isinstance(data, int): + return chr(data) + return data + return '.' + + try: + print("packet length:", len(data)) + print("method call[1]:", sys._getframe(1).f_code.co_name) + print("method call[2]:", sys._getframe(2).f_code.co_name) + print("method call[3]:", sys._getframe(3).f_code.co_name) + print("method call[4]:", sys._getframe(4).f_code.co_name) + print("method call[5]:", sys._getframe(5).f_code.co_name) + print("-" * 88) + except ValueError: + pass + dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)] + for d in dump_data: + print(' '.join(map(lambda x: "{:02X}".format(byte2int(x)), d)) + + ' ' * (16 - len(d)) + ' ' * 2 + + ' '.join(map(lambda x: "{}".format(is_ascii(x)), d))) + print("-" * 88) + print() + + +def _scramble(password, message): + if not password: + return b'\0' + if DEBUG: print('password=' + str(password)) + stage1 = sha_new(password).digest() + stage2 = sha_new(stage1).digest() + s = sha_new() + s.update(message) + s.update(stage2) + result = s.digest() + return _my_crypt(result, stage1) + + +def _my_crypt(message1, message2): + length = len(message1) + result = struct.pack('B', length) + for i in range_type(length): + x = (struct.unpack('B', message1[i:i+1])[0] ^ + struct.unpack('B', message2[i:i+1])[0]) + result += struct.pack('B', x) + return result + +# old_passwords support ported from libmysql/password.c +SCRAMBLE_LENGTH_323 = 8 + + +class RandStruct_323(object): + def __init__(self, seed1, seed2): + self.max_value = 0x3FFFFFFF + self.seed1 = seed1 % self.max_value + self.seed2 = seed2 % self.max_value + + def my_rnd(self): + self.seed1 = (self.seed1 * 3 + self.seed2) % self.max_value + self.seed2 = (self.seed1 + self.seed2 + 33) % self.max_value + return float(self.seed1) / float(self.max_value) + + +def _scramble_323(password, message): + hash_pass = _hash_password_323(password) + hash_message = _hash_password_323(message[:SCRAMBLE_LENGTH_323]) + hash_pass_n = struct.unpack(">LL", hash_pass) + hash_message_n = struct.unpack(">LL", hash_message) + + rand_st = RandStruct_323(hash_pass_n[0] ^ hash_message_n[0], + hash_pass_n[1] ^ hash_message_n[1]) + outbuf = io.BytesIO() + for _ in range_type(min(SCRAMBLE_LENGTH_323, len(message))): + outbuf.write(int2byte(int(rand_st.my_rnd() * 31) + 64)) + extra = int2byte(int(rand_st.my_rnd() * 31)) + out = outbuf.getvalue() + outbuf = io.BytesIO() + for c in out: + outbuf.write(int2byte(byte2int(c) ^ byte2int(extra))) + return outbuf.getvalue() + + +def _hash_password_323(password): + nr = 1345345333 + add = 7 + nr2 = 0x12345671 + + for c in [byte2int(x) for x in password if x not in (' ', '\t')]: + nr ^= (((nr & 63) + add) * c) + (nr << 8) & 0xFFFFFFFF + nr2 = (nr2 + ((nr2 << 8) ^ nr)) & 0xFFFFFFFF + add = (add + c) & 0xFFFFFFFF + + r1 = nr & ((1 << 31) - 1) # kill sign bits + r2 = nr2 & ((1 << 31) - 1) + return struct.pack(">LL", r1, r2) + + +def pack_int24(n): + return struct.pack(' len(self._data): + raise Exception('Invalid advance amount (%s) for cursor. ' + 'Position=%s' % (length, new_position)) + self._position = new_position + + def rewind(self, position=0): + """Set the position of the data buffer cursor to 'position'.""" + if position < 0 or position > len(self._data): + raise Exception("Invalid position to rewind cursor to: %s." % position) + self._position = position + + def get_bytes(self, position, length=1): + """Get 'length' bytes starting at 'position'. + + Position is start of payload (first four packet header bytes are not + included) starting at index '0'. + + No error checking is done. If requesting outside end of buffer + an empty string (or string shorter than 'length') may be returned! + """ + return self._data[position:(position+length)] + + if PY2: + def read_uint8(self): + result = ord(self._data[self._position]) + self._position += 1 + return result + else: + def read_uint8(self): + result = self._data[self._position] + self._position += 1 + return result + + def read_uint16(self): + result = struct.unpack_from(' 2: + use_unicode = True + + if db is not None and database is None: + database = db + if passwd is not None and not password: + password = passwd + + if compress or named_pipe: + raise NotImplementedError("compress and named_pipe arguments are not supported") + + if local_infile: + client_flag |= CLIENT.LOCAL_FILES + + if ssl and ('capath' in ssl or 'cipher' in ssl): + raise NotImplementedError('ssl options capath and cipher are not supported') + + self.ssl = False + if ssl: + if not SSL_ENABLED: + raise NotImplementedError("ssl module not found") + self.ssl = True + client_flag |= CLIENT.SSL + for k in ('key', 'cert', 'ca'): + v = None + if k in ssl: + v = ssl[k] + setattr(self, k, v) + + if read_default_group and not read_default_file: + if sys.platform.startswith("win"): + read_default_file = "c:\\my.ini" + else: + read_default_file = "/etc/my.cnf" + + if read_default_file: + if not read_default_group: + read_default_group = "client" + + cfg = Parser() + cfg.read(os.path.expanduser(read_default_file)) + + def _config(key, arg): + if arg: + return arg + try: + return cfg.get(read_default_group, key) + except Exception: + return arg + + user = _config("user", user) + password = _config("password", password) + host = _config("host", host) + database = _config("database", database) + unix_socket = _config("socket", unix_socket) + port = int(_config("port", port)) + charset = _config("default-character-set", charset) + + self.host = host + self.port = port + self.user = user or DEFAULT_USER + self.password = password or "" + self.db = database + self.no_delay = no_delay + self.unix_socket = unix_socket + if charset: + self.charset = charset + self.use_unicode = True + else: + self.charset = DEFAULT_CHARSET + self.use_unicode = False + + if use_unicode is not None: + self.use_unicode = use_unicode + + self.encoding = charset_by_name(self.charset).encoding + + client_flag |= CLIENT.CAPABILITIES | CLIENT.MULTI_STATEMENTS + if self.db: + client_flag |= CLIENT.CONNECT_WITH_DB + self.client_flag = client_flag + + self.cursorclass = cursorclass + self.connect_timeout = connect_timeout + + self._result = None + self._affected_rows = 0 + self.host_info = "Not connected" + + #: specified autocommit mode. None means use server default. + self.autocommit_mode = autocommit + + self.encoders = encoders # Need for MySQLdb compatibility. + self.decoders = conv + self.sql_mode = sql_mode + self.init_command = init_command + self.max_allowed_packet = max_allowed_packet + if defer_connect: + self.socket = None + else: + self.connect() + + def close(self): + """Send the quit message and close the socket""" + if self.socket is None: + raise err.Error("Already closed") + send_data = struct.pack('= 5: + self.client_flag |= CLIENT.MULTI_RESULTS + + if self.user is None: + raise ValueError("Did not specify a username") + + charset_id = charset_by_name(self.charset).id + if isinstance(self.user, text_type): + self.user = self.user.encode(self.encoding) + + data_init = struct.pack('= i + 6: + lang, stat, cap_h, salt_len = struct.unpack('= i + salt_len: + # salt_len includes auth_plugin_data_part_1 and filler + self.salt += data[i:i+salt_len] + # TODO: AUTH PLUGIN NAME may appeare here. + + def get_server_info(self): + return self.server_version + + Warning = err.Warning + Error = err.Error + InterfaceError = err.InterfaceError + DatabaseError = err.DatabaseError + DataError = err.DataError + OperationalError = err.OperationalError + IntegrityError = err.IntegrityError + InternalError = err.InternalError + ProgrammingError = err.ProgrammingError + NotSupportedError = err.NotSupportedError + + +class MySQLResult(object): + + def __init__(self, connection): + self.connection = connection + self.affected_rows = None + self.insert_id = None + self.server_status = None + self.warning_count = 0 + self.message = None + self.field_count = 0 + self.description = None + self.rows = None + self.has_next = None + self.unbuffered_active = False + + def __del__(self): + if self.unbuffered_active: + self._finish_unbuffered_query() + + def read(self): + try: + first_packet = self.connection._read_packet() + + if first_packet.is_ok_packet(): + self._read_ok_packet(first_packet) + elif first_packet.is_load_local_packet(): + self._read_load_local_packet(first_packet) + else: + self._read_result_packet(first_packet) + finally: + self.connection = False + + def init_unbuffered_query(self): + self.unbuffered_active = True + first_packet = self.connection._read_packet() + + if first_packet.is_ok_packet(): + self._read_ok_packet(first_packet) + self.unbuffered_active = False + self.connection = None + else: + self.field_count = first_packet.read_length_encoded_integer() + self._get_descriptions() + + # Apparently, MySQLdb picks this number because it's the maximum + # value of a 64bit unsigned integer. Since we're emulating MySQLdb, + # we set it to this instead of None, which would be preferred. + self.affected_rows = 18446744073709551615 + + def _read_ok_packet(self, first_packet): + ok_packet = OKPacketWrapper(first_packet) + self.affected_rows = ok_packet.affected_rows + self.insert_id = ok_packet.insert_id + self.server_status = ok_packet.server_status + self.warning_count = ok_packet.warning_count + self.message = ok_packet.message + self.has_next = ok_packet.has_next + + def _read_load_local_packet(self, first_packet): + load_packet = LoadLocalPacketWrapper(first_packet) + sender = LoadLocalFile(load_packet.filename, self.connection) + sender.send_data() + + ok_packet = self.connection._read_packet() + if not ok_packet.is_ok_packet(): + raise err.OperationalError(2014, "Commands Out of Sync") + self._read_ok_packet(ok_packet) + + def _check_packet_is_eof(self, packet): + if packet.is_eof_packet(): + eof_packet = EOFPacketWrapper(packet) + self.warning_count = eof_packet.warning_count + self.has_next = eof_packet.has_next + return True + return False + + def _read_result_packet(self, first_packet): + self.field_count = first_packet.read_length_encoded_integer() + self._get_descriptions() + self._read_rowdata_packet() + + def _read_rowdata_packet_unbuffered(self): + # Check if in an active query + if not self.unbuffered_active: + return + + # EOF + packet = self.connection._read_packet() + if self._check_packet_is_eof(packet): + self.unbuffered_active = False + self.connection = None + self.rows = None + return + + row = self._read_row_from_packet(packet) + self.affected_rows = 1 + self.rows = (row,) # rows should tuple of row for MySQL-python compatibility. + return row + + def _finish_unbuffered_query(self): + # After much reading on the MySQL protocol, it appears that there is, + # in fact, no way to stop MySQL from sending all the data after + # executing a query, so we just spin, and wait for an EOF packet. + while self.unbuffered_active: + packet = self.connection._read_packet() + if self._check_packet_is_eof(packet): + self.unbuffered_active = False + self.connection = None # release reference to kill cyclic reference. + + def _read_rowdata_packet(self): + """Read a rowdata packet for each data row in the result set.""" + rows = [] + while True: + packet = self.connection._read_packet() + if self._check_packet_is_eof(packet): + self.connection = None # release reference to kill cyclic reference. + break + rows.append(self._read_row_from_packet(packet)) + + self.affected_rows = len(rows) + self.rows = tuple(rows) + + def _read_row_from_packet(self, packet): + row = [] + for encoding, converter in self.converters: + data = packet.read_length_coded_string() + if data is not None: + if encoding is not None: + data = data.decode(encoding) + if DEBUG: print("DEBUG: DATA = ", data) + if converter is not None: + data = converter(data) + row.append(data) + return tuple(row) + + def _get_descriptions(self): + """Read a column descriptor packet for each column in the result.""" + self.fields = [] + self.converters = [] + use_unicode = self.connection.use_unicode + description = [] + for i in range_type(self.field_count): + field = self.connection._read_packet(FieldDescriptorPacket) + self.fields.append(field) + description.append(field.description()) + field_type = field.type_code + if use_unicode: + if field_type in TEXT_TYPES: + charset = charset_by_id(field.charsetnr) + if charset.is_binary: + # TEXTs with charset=binary means BINARY types. + encoding = None + else: + encoding = charset.encoding + else: + encoding = 'ascii' + else: + encoding = None + converter = self.connection.decoders.get(field_type) + if converter is through: + converter = None + if DEBUG: print("DEBUG: field={}, converter={}".format(field, converter)) + self.converters.append((encoding, converter)) + + eof_packet = self.connection._read_packet() + assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF' + self.description = tuple(description) + + +class LoadLocalFile(object): + def __init__(self, filename, connection): + self.filename = filename + self.connection = connection + + def send_data(self): + """Send data packets from the local file to the server""" + if not self.connection.socket: + raise err.InterfaceError("(0, '')") + + # sequence id is 2 as we already sent a query packet + seq_id = 2 + try: + with open(self.filename, 'rb') as open_file: + chunk_size = self.connection.max_allowed_packet + packet = b"" + + while True: + chunk = open_file.read(chunk_size) + if not chunk: + break + packet = struct.pack('>> datetime_or_None('2007-02-25 23:06:20') + datetime.datetime(2007, 2, 25, 23, 6, 20) + >>> datetime_or_None('2007-02-25T23:06:20') + datetime.datetime(2007, 2, 25, 23, 6, 20) + + Illegal values are returned as None: + + >>> datetime_or_None('2007-02-31T23:06:20') is None + True + >>> datetime_or_None('0000-00-00 00:00:00') is None + True + + """ + if ' ' in obj: + sep = ' ' + elif 'T' in obj: + sep = 'T' + else: + return convert_date(obj) + + try: + ymd, hms = obj.split(sep, 1) + usecs = '0' + if '.' in hms: + hms, usecs = hms.split('.') + usecs = float('0.' + usecs) * 1e6 + return datetime.datetime(*[ int(x) for x in ymd.split('-')+hms.split(':')+[usecs] ]) + except ValueError: + return convert_date(obj) + + +def convert_timedelta(obj): + """Returns a TIME column as a timedelta object: + + >>> timedelta_or_None('25:06:17') + datetime.timedelta(1, 3977) + >>> timedelta_or_None('-25:06:17') + datetime.timedelta(-2, 83177) + + Illegal values are returned as None: + + >>> timedelta_or_None('random crap') is None + True + + Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but + can accept values as (+|-)DD HH:MM:SS. The latter format will not + be parsed correctly by this function. + """ + try: + microseconds = 0 + if "." in obj: + (obj, tail) = obj.split('.') + microseconds = float('0.' + tail) * 1e6 + hours, minutes, seconds = obj.split(':') + negate = 1 + if hours.startswith("-"): + hours = hours[1:] + negate = -1 + tdelta = datetime.timedelta( + hours = int(hours), + minutes = int(minutes), + seconds = int(seconds), + microseconds = int(microseconds) + ) * negate + return tdelta + except ValueError: + return None + +def convert_time(obj): + """Returns a TIME column as a time object: + + >>> time_or_None('15:06:17') + datetime.time(15, 6, 17) + + Illegal values are returned as None: + + >>> time_or_None('-25:06:17') is None + True + >>> time_or_None('random crap') is None + True + + Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but + can accept values as (+|-)DD HH:MM:SS. The latter format will not + be parsed correctly by this function. + + Also note that MySQL's TIME column corresponds more closely to + Python's timedelta and not time. However if you want TIME columns + to be treated as time-of-day and not a time offset, then you can + use set this function as the converter for FIELD_TYPE.TIME. + """ + try: + microseconds = 0 + if "." in obj: + (obj, tail) = obj.split('.') + microseconds = float('0.' + tail) * 1e6 + hours, minutes, seconds = obj.split(':') + return datetime.time(hour=int(hours), minute=int(minutes), + second=int(seconds), microsecond=int(microseconds)) + except ValueError: + return None + +def convert_date(obj): + """Returns a DATE column as a date object: + + >>> date_or_None('2007-02-26') + datetime.date(2007, 2, 26) + + Illegal values are returned as None: + + >>> date_or_None('2007-02-31') is None + True + >>> date_or_None('0000-00-00') is None + True + + """ + try: + return datetime.date(*[ int(x) for x in obj.split('-', 2) ]) + except ValueError: + return None + + +def convert_mysql_timestamp(timestamp): + """Convert a MySQL TIMESTAMP to a Timestamp object. + + MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME: + + >>> mysql_timestamp_converter('2007-02-25 22:32:17') + datetime.datetime(2007, 2, 25, 22, 32, 17) + + MySQL < 4.1 uses a big string of numbers: + + >>> mysql_timestamp_converter('20070225223217') + datetime.datetime(2007, 2, 25, 22, 32, 17) + + Illegal values are returned as None: + + >>> mysql_timestamp_converter('2007-02-31 22:32:17') is None + True + >>> mysql_timestamp_converter('00000000000000') is None + True + + """ + if timestamp[4] == '-': + return convert_datetime(timestamp) + timestamp += "0"*(14-len(timestamp)) # padding + year, month, day, hour, minute, second = \ + int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \ + int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14]) + try: + return datetime.datetime(year, month, day, hour, minute, second) + except ValueError: + return None + +def convert_set(s): + return set(s.split(",")) + + +def through(x): + return x + + +#def convert_bit(b): +# b = "\x00" * (8 - len(b)) + b # pad w/ zeroes +# return struct.unpack(">Q", b)[0] +# +# the snippet above is right, but MySQLdb doesn't process bits, +# so we shouldn't either +convert_bit = through + + +def convert_characters(connection, field, data): + field_charset = charset_by_id(field.charsetnr).name + encoding = charset_to_encoding(field_charset) + if field.flags & FLAG.SET: + return convert_set(data.decode(encoding)) + if field.flags & FLAG.BINARY: + return data + + if connection.use_unicode: + data = data.decode(encoding) + elif connection.charset != field_charset: + data = data.decode(encoding) + data = data.encode(connection.encoding) + return data + +encoders = { + bool: escape_bool, + int: escape_int, + long_type: escape_int, + float: escape_float, + str: escape_str, + text_type: escape_unicode, + tuple: escape_sequence, + list: escape_sequence, + set: escape_sequence, + dict: escape_dict, + type(None): escape_None, + datetime.date: escape_date, + datetime.datetime: escape_datetime, + datetime.timedelta: escape_timedelta, + datetime.time: escape_time, + time.struct_time: escape_struct_time, + Decimal: escape_object, +} + +if not PY2 or JYTHON or IRONPYTHON: + encoders[bytes] = escape_bytes + +decoders = { + FIELD_TYPE.BIT: convert_bit, + FIELD_TYPE.TINY: int, + FIELD_TYPE.SHORT: int, + FIELD_TYPE.LONG: int, + FIELD_TYPE.FLOAT: float, + FIELD_TYPE.DOUBLE: float, + FIELD_TYPE.LONGLONG: int, + FIELD_TYPE.INT24: int, + FIELD_TYPE.YEAR: int, + FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp, + FIELD_TYPE.DATETIME: convert_datetime, + FIELD_TYPE.TIME: convert_timedelta, + FIELD_TYPE.DATE: convert_date, + FIELD_TYPE.SET: convert_set, + FIELD_TYPE.BLOB: through, + FIELD_TYPE.TINY_BLOB: through, + FIELD_TYPE.MEDIUM_BLOB: through, + FIELD_TYPE.LONG_BLOB: through, + FIELD_TYPE.STRING: through, + FIELD_TYPE.VAR_STRING: through, + FIELD_TYPE.VARCHAR: through, + FIELD_TYPE.DECIMAL: Decimal, + FIELD_TYPE.NEWDECIMAL: Decimal, +} + + +# for MySQLdb compatibility +conversions = decoders + +def Thing2Literal(obj): + return escape_str(str(obj)) diff --git a/server/www/packages/packages-common/pymysql/cursors.py b/server/www/packages/packages-common/pymysql/cursors.py new file mode 100644 index 0000000..266e137 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/cursors.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, absolute_import +from functools import partial +import re +import warnings + +from ._compat import range_type, text_type, PY2 + +from . import err + + +#: Regular expression for :meth:`Cursor.executemany`. +#: executemany only suports simple bulk insert. +#: You can use it to load large dataset. +RE_INSERT_VALUES = re.compile(r"""(INSERT\s.+\sVALUES\s+)(\(\s*%s\s*(?:,\s*%s\s*)*\))(\s*(?:ON DUPLICATE.*)?)\Z""", + re.IGNORECASE | re.DOTALL) + + +class Cursor(object): + ''' + This is the object you use to interact with the database. + ''' + + #: Max stetement size which :meth:`executemany` generates. + #: + #: Max size of allowed statement is max_allowed_packet - packet_header_size. + #: Default value of max_allowed_packet is 1048576. + max_stmt_length = 1024000 + + def __init__(self, connection): + ''' + Do not create an instance of a Cursor yourself. Call + connections.Connection.cursor(). + ''' + self.connection = connection + self.description = None + self.rownumber = 0 + self.rowcount = -1 + self.arraysize = 1 + self._executed = None + self._result = None + self._rows = None + + def close(self): + ''' + Closing a cursor just exhausts all remaining data. + ''' + conn = self.connection + if conn is None: + return + try: + while self.nextset(): + pass + finally: + self.connection = None + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + del exc_info + self.close() + + def _get_db(self): + if not self.connection: + raise err.ProgrammingError("Cursor closed") + return self.connection + + def _check_executed(self): + if not self._executed: + raise err.ProgrammingError("execute() first") + + def _conv_row(self, row): + return row + + def setinputsizes(self, *args): + """Does nothing, required by DB API.""" + + def setoutputsizes(self, *args): + """Does nothing, required by DB API.""" + + def _nextset(self, unbuffered=False): + """Get the next query set""" + conn = self._get_db() + current_result = self._result + if current_result is None or current_result is not conn._result: + return None + if not current_result.has_next: + return None + conn.next_result(unbuffered=unbuffered) + self._do_get_result() + return True + + def nextset(self): + return self._nextset(False) + + def _ensure_bytes(self, x, encoding=None): + if isinstance(x, text_type): + x = x.encode(encoding) + elif isinstance(x, (tuple, list)): + x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x) + return x + + def _escape_args(self, args, conn): + ensure_bytes = partial(self._ensure_bytes, encoding=conn.encoding) + + if isinstance(args, (tuple, list)): + if PY2: + args = tuple(map(ensure_bytes, args)) + return tuple(conn.escape(arg) for arg in args) + elif isinstance(args, dict): + if PY2: + args = dict((ensure_bytes(key), ensure_bytes(val)) for + (key, val) in args.items()) + return dict((key, conn.escape(val)) for (key, val) in args.items()) + else: + # If it's not a dictionary let's try escaping it anyways. + # Worst case it will throw a Value error + if PY2: + ensure_bytes(args) + return conn.escape(args) + + def mogrify(self, query, args=None): + """ + Returns the exact string that is sent to the database by calling the + execute() method. + + This method follows the extension to the DB API 2.0 followed by Psycopg. + """ + conn = self._get_db() + if PY2: # Use bytes on Python 2 always + query = self._ensure_bytes(query, encoding=conn.encoding) + + if args is not None: + query = query % self._escape_args(args, conn) + + return query + + def execute(self, query, args=None): + '''Execute a query''' + while self.nextset(): + pass + + query = self.mogrify(query, args) + + result = self._query(query) + self._executed = query + return result + + def executemany(self, query, args): + """Run several data against one query + + PyMySQL can execute bulkinsert for query like 'INSERT ... VALUES (%s)'. + In other form of queries, just run :meth:`execute` many times. + """ + if not args: + return + + m = RE_INSERT_VALUES.match(query) + if m: + q_prefix = m.group(1) + q_values = m.group(2).rstrip() + q_postfix = m.group(3) or '' + assert q_values[0] == '(' and q_values[-1] == ')' + return self._do_execute_many(q_prefix, q_values, q_postfix, args, + self.max_stmt_length, + self._get_db().encoding) + + self.rowcount = sum(self.execute(query, arg) for arg in args) + return self.rowcount + + def _do_execute_many(self, prefix, values, postfix, args, max_stmt_length, encoding): + conn = self._get_db() + escape = self._escape_args + if isinstance(prefix, text_type): + prefix = prefix.encode(encoding) + if PY2 and isinstance(values, text_type): + values = values.encode(encoding) + if isinstance(postfix, text_type): + postfix = postfix.encode(encoding) + sql = bytearray(prefix) + args = iter(args) + v = values % escape(next(args), conn) + if isinstance(v, text_type): + if PY2: + v = v.encode(encoding) + else: + v = v.encode(encoding, 'surrogateescape') + sql += v + rows = 0 + for arg in args: + v = values % escape(arg, conn) + if isinstance(v, text_type): + if PY2: + v = v.encode(encoding) + else: + v = v.encode(encoding, 'surrogateescape') + if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length: + rows += self.execute(sql + postfix) + sql = bytearray(prefix) + else: + sql += b',' + sql += v + rows += self.execute(sql + postfix) + self.rowcount = rows + return rows + + def callproc(self, procname, args=()): + """Execute stored procedure procname with args + + procname -- string, name of procedure to execute on server + + args -- Sequence of parameters to use with procedure + + Returns the original args. + + Compatibility warning: PEP-249 specifies that any modified + parameters must be returned. This is currently impossible + as they are only available by storing them in a server + variable and then retrieved by a query. Since stored + procedures return zero or more result sets, there is no + reliable way to get at OUT or INOUT parameters via callproc. + The server variables are named @_procname_n, where procname + is the parameter above and n is the position of the parameter + (from zero). Once all result sets generated by the procedure + have been fetched, you can issue a SELECT @_procname_0, ... + query using .execute() to get any OUT or INOUT values. + + Compatibility warning: The act of calling a stored procedure + itself creates an empty result set. This appears after any + result sets generated by the procedure. This is non-standard + behavior with respect to the DB-API. Be sure to use nextset() + to advance through all result sets; otherwise you may get + disconnected. + """ + conn = self._get_db() + for index, arg in enumerate(args): + q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg)) + self._query(q) + self.nextset() + + q = "CALL %s(%s)" % (procname, + ','.join(['@_%s_%d' % (procname, i) + for i in range_type(len(args))])) + self._query(q) + self._executed = q + return args + + def fetchone(self): + ''' Fetch the next row ''' + self._check_executed() + if self._rows is None or self.rownumber >= len(self._rows): + return None + result = self._rows[self.rownumber] + self.rownumber += 1 + return result + + def fetchmany(self, size=None): + ''' Fetch several rows ''' + self._check_executed() + if self._rows is None: + return () + end = self.rownumber + (size or self.arraysize) + result = self._rows[self.rownumber:end] + self.rownumber = min(end, len(self._rows)) + return result + + def fetchall(self): + ''' Fetch all the rows ''' + self._check_executed() + if self._rows is None: + return () + if self.rownumber: + result = self._rows[self.rownumber:] + else: + result = self._rows + self.rownumber = len(self._rows) + return result + + def scroll(self, value, mode='relative'): + self._check_executed() + if mode == 'relative': + r = self.rownumber + value + elif mode == 'absolute': + r = value + else: + raise err.ProgrammingError("unknown scroll mode %s" % mode) + + if not (0 <= r < len(self._rows)): + raise IndexError("out of range") + self.rownumber = r + + def _query(self, q): + conn = self._get_db() + self._last_executed = q + conn.query(q) + self._do_get_result() + return self.rowcount + + def _do_get_result(self): + conn = self._get_db() + + self.rownumber = 0 + self._result = result = conn._result + + self.rowcount = result.affected_rows + self.description = result.description + self.lastrowid = result.insert_id + self._rows = result.rows + + if result.warning_count > 0: + self._show_warnings(conn) + + def _show_warnings(self, conn): + if self._result and self._result.has_next: + return + ws = conn.show_warnings() + if ws is None: + return + for w in ws: + msg = w[-1] + if PY2: + if isinstance(msg, unicode): + msg = msg.encode('utf-8', 'replace') + warnings.warn(str(msg), err.Warning, 4) + + def __iter__(self): + return iter(self.fetchone, None) + + Warning = err.Warning + Error = err.Error + InterfaceError = err.InterfaceError + DatabaseError = err.DatabaseError + DataError = err.DataError + OperationalError = err.OperationalError + IntegrityError = err.IntegrityError + InternalError = err.InternalError + ProgrammingError = err.ProgrammingError + NotSupportedError = err.NotSupportedError + + +class DictCursorMixin(object): + # You can override this to use OrderedDict or other dict-like types. + dict_type = dict + + def _do_get_result(self): + super(DictCursorMixin, self)._do_get_result() + fields = [] + if self.description: + for f in self._result.fields: + name = f.name + if name in fields: + name = f.table_name + '.' + name + fields.append(name) + self._fields = fields + + if fields and self._rows: + self._rows = [self._conv_row(r) for r in self._rows] + + def _conv_row(self, row): + if row is None: + return None + return self.dict_type(zip(self._fields, row)) + + +class DictCursor(DictCursorMixin, Cursor): + """A cursor which returns results as a dictionary""" + + +class SSCursor(Cursor): + """ + Unbuffered Cursor, mainly useful for queries that return a lot of data, + or for connections to remote servers over a slow network. + + Instead of copying every row of data into a buffer, this will fetch + rows as needed. The upside of this, is the client uses much less memory, + and rows are returned much faster when traveling over a slow network, + or if the result set is very big. + + There are limitations, though. The MySQL protocol doesn't support + returning the total number of rows, so the only way to tell how many rows + there are is to iterate over every row returned. Also, it currently isn't + possible to scroll backwards, as only the current row is held in memory. + """ + + def _conv_row(self, row): + return row + + def close(self): + conn = self.connection + if conn is None: + return + + if self._result is not None and self._result is conn._result: + self._result._finish_unbuffered_query() + + try: + while self.nextset(): + pass + finally: + self.connection = None + + def _query(self, q): + conn = self._get_db() + self._last_executed = q + conn.query(q, unbuffered=True) + self._do_get_result() + return self.rowcount + + def nextset(self): + return self._nextset(unbuffered=True) + + def read_next(self): + """ Read next row """ + return self._conv_row(self._result._read_rowdata_packet_unbuffered()) + + def fetchone(self): + """ Fetch next row """ + self._check_executed() + row = self.read_next() + if row is None: + return None + self.rownumber += 1 + return row + + def fetchall(self): + """ + Fetch all, as per MySQLdb. Pretty useless for large queries, as + it is buffered. See fetchall_unbuffered(), if you want an unbuffered + generator version of this method. + """ + return list(self.fetchall_unbuffered()) + + def fetchall_unbuffered(self): + """ + Fetch all, implemented as a generator, which isn't to standard, + however, it doesn't make sense to return everything in a list, as that + would use ridiculous memory for large result sets. + """ + return iter(self.fetchone, None) + + def __iter__(self): + return self.fetchall_unbuffered() + + def fetchmany(self, size=None): + """ Fetch many """ + self._check_executed() + if size is None: + size = self.arraysize + + rows = [] + for i in range_type(size): + row = self.read_next() + if row is None: + break + rows.append(row) + self.rownumber += 1 + return rows + + def scroll(self, value, mode='relative'): + self._check_executed() + + if mode == 'relative': + if value < 0: + raise err.NotSupportedError( + "Backwards scrolling not supported by this cursor") + + for _ in range_type(value): + self.read_next() + self.rownumber += value + elif mode == 'absolute': + if value < self.rownumber: + raise err.NotSupportedError( + "Backwards scrolling not supported by this cursor") + + end = value - self.rownumber + for _ in range_type(end): + self.read_next() + self.rownumber = value + else: + raise err.ProgrammingError("unknown scroll mode %s" % mode) + + +class SSDictCursor(DictCursorMixin, SSCursor): + """ An unbuffered cursor, which returns results as a dictionary """ diff --git a/server/www/packages/packages-common/pymysql/err.py b/server/www/packages/packages-common/pymysql/err.py new file mode 100644 index 0000000..9b6f24e --- /dev/null +++ b/server/www/packages/packages-common/pymysql/err.py @@ -0,0 +1,120 @@ +import struct + +from .constants import ER + + +class MySQLError(Exception): + """Exception related to operation with MySQL.""" + + +class Warning(Warning, MySQLError): + """Exception raised for important warnings like data truncations + while inserting, etc.""" + + +class Error(MySQLError): + """Exception that is the base class of all other error exceptions + (not Warning).""" + + +class InterfaceError(Error): + """Exception raised for errors that are related to the database + interface rather than the database itself.""" + + +class DatabaseError(Error): + """Exception raised for errors that are related to the + database.""" + + +class DataError(DatabaseError): + """Exception raised for errors that are due to problems with the + processed data like division by zero, numeric value out of range, + etc.""" + + +class OperationalError(DatabaseError): + """Exception raised for errors that are related to the database's + operation and not necessarily under the control of the programmer, + e.g. an unexpected disconnect occurs, the data source name is not + found, a transaction could not be processed, a memory allocation + error occurred during processing, etc.""" + + +class IntegrityError(DatabaseError): + """Exception raised when the relational integrity of the database + is affected, e.g. a foreign key check fails, duplicate key, + etc.""" + + +class InternalError(DatabaseError): + """Exception raised when the database encounters an internal + error, e.g. the cursor is not valid anymore, the transaction is + out of sync, etc.""" + + +class ProgrammingError(DatabaseError): + """Exception raised for programming errors, e.g. table not found + or already exists, syntax error in the SQL statement, wrong number + of parameters specified, etc.""" + + +class NotSupportedError(DatabaseError): + """Exception raised in case a method or database API was used + which is not supported by the database, e.g. requesting a + .rollback() on a connection that does not support transaction or + has transactions turned off.""" + + +error_map = {} + +def _map_error(exc, *errors): + for error in errors: + error_map[error] = exc + +_map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR, + ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME, + ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE, + ER.INVALID_GROUP_FUNC_USE, ER.UNSUPPORTED_EXTENSION, + ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION) +_map_error(DataError, ER.WARN_DATA_TRUNCATED, ER.WARN_NULL_TO_NOTNULL, + ER.WARN_DATA_OUT_OF_RANGE, ER.NO_DEFAULT, ER.PRIMARY_CANT_HAVE_NULL, + ER.DATA_TOO_LONG, ER.DATETIME_FUNCTION_OVERFLOW) +_map_error(IntegrityError, ER.DUP_ENTRY, ER.NO_REFERENCED_ROW, + ER.NO_REFERENCED_ROW_2, ER.ROW_IS_REFERENCED, ER.ROW_IS_REFERENCED_2, + ER.CANNOT_ADD_FOREIGN, ER.BAD_NULL_ERROR) +_map_error(NotSupportedError, ER.WARNING_NOT_COMPLETE_ROLLBACK, + ER.NOT_SUPPORTED_YET, ER.FEATURE_DISABLED, ER.UNKNOWN_STORAGE_ENGINE) +_map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR, + ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR, + ER.COLUMNACCESS_DENIED_ERROR) + +del _map_error, ER + + +def _get_error_info(data): + errno = struct.unpack('= 2 and value[0] == value[-1] == quote: + return value[1:-1] + return value + + def get(self, section, option): + value = configparser.RawConfigParser.get(self, section, option) + return self.__remove_quotes(value) diff --git a/server/www/packages/packages-common/pymysql/times.py b/server/www/packages/packages-common/pymysql/times.py new file mode 100644 index 0000000..c47db09 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/times.py @@ -0,0 +1,16 @@ +from time import localtime +from datetime import date, datetime, time, timedelta + +Date = date +Time = time +TimeDelta = timedelta +Timestamp = datetime + +def DateFromTicks(ticks): + return date(*localtime(ticks)[:3]) + +def TimeFromTicks(ticks): + return time(*localtime(ticks)[3:6]) + +def TimestampFromTicks(ticks): + return datetime(*localtime(ticks)[:6]) diff --git a/server/www/packages/packages-common/pymysql/util.py b/server/www/packages/packages-common/pymysql/util.py new file mode 100644 index 0000000..cc622e5 --- /dev/null +++ b/server/www/packages/packages-common/pymysql/util.py @@ -0,0 +1,19 @@ +import struct + +def byte2int(b): + if isinstance(b, int): + return b + else: + return struct.unpack("!B", b)[0] + +def int2byte(i): + return struct.pack("!B", i) + +def join_bytes(bs): + if len(bs) == 0: + return "" + else: + rv = bs[0] + for b in bs[1:]: + rv += b + return rv diff --git a/server/www/packages/packages-common/rsa/__init__.py b/server/www/packages/packages-common/rsa/__init__.py new file mode 100644 index 0000000..21433a7 --- /dev/null +++ b/server/www/packages/packages-common/rsa/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""RSA module + +Module for calculating large primes, and RSA encryption, decryption, signing +and verification. Includes generating public and private keys. + +WARNING: this implementation does not use random padding, compression of the +cleartext input to prevent repetitions, or other common security improvements. +Use with care. + +If you want to have a more secure implementation, use the functions from the +``rsa.pkcs1`` module. + +""" + +__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly" +__date__ = "2015-07-29" +__version__ = '3.2' + +from rsa.key import newkeys, PrivateKey, PublicKey +from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \ + VerificationError + +# Do doctest if we're run directly +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey', + 'PrivateKey', 'DecryptionError', 'VerificationError'] + diff --git a/server/www/packages/packages-common/rsa/_compat.py b/server/www/packages/packages-common/rsa/_compat.py new file mode 100644 index 0000000..3c4eb81 --- /dev/null +++ b/server/www/packages/packages-common/rsa/_compat.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python compatibility wrappers.""" + + +from __future__ import absolute_import + +import sys +from struct import pack + +try: + MAX_INT = sys.maxsize +except AttributeError: + MAX_INT = sys.maxint + +MAX_INT64 = (1 << 63) - 1 +MAX_INT32 = (1 << 31) - 1 +MAX_INT16 = (1 << 15) - 1 + +# Determine the word size of the processor. +if MAX_INT == MAX_INT64: + # 64-bit processor. + MACHINE_WORD_SIZE = 64 +elif MAX_INT == MAX_INT32: + # 32-bit processor. + MACHINE_WORD_SIZE = 32 +else: + # Else we just assume 64-bit processor keeping up with modern times. + MACHINE_WORD_SIZE = 64 + + +try: + # < Python3 + unicode_type = unicode + have_python3 = False +except NameError: + # Python3. + unicode_type = str + have_python3 = True + +# Fake byte literals. +if str is unicode_type: + def byte_literal(s): + return s.encode('latin1') +else: + def byte_literal(s): + return s + +# ``long`` is no more. Do type detection using this instead. +try: + integer_types = (int, long) +except NameError: + integer_types = (int,) + +b = byte_literal + +try: + # Python 2.6 or higher. + bytes_type = bytes +except NameError: + # Python 2.5 + bytes_type = str + + +# To avoid calling b() multiple times in tight loops. +ZERO_BYTE = b('\x00') +EMPTY_BYTE = b('') + + +def is_bytes(obj): + """ + Determines whether the given value is a byte string. + + :param obj: + The value to test. + :returns: + ``True`` if ``value`` is a byte string; ``False`` otherwise. + """ + return isinstance(obj, bytes_type) + + +def is_integer(obj): + """ + Determines whether the given value is an integer. + + :param obj: + The value to test. + :returns: + ``True`` if ``value`` is an integer; ``False`` otherwise. + """ + return isinstance(obj, integer_types) + + +def byte(num): + """ + Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) + representation. + + Use it as a replacement for ``chr`` where you are expecting a byte + because this will work on all current versions of Python:: + + :param num: + An unsigned integer between 0 and 255 (both inclusive). + :returns: + A single byte. + """ + return pack("B", num) + + +def get_word_alignment(num, force_arch=64, + _machine_word_size=MACHINE_WORD_SIZE): + """ + Returns alignment details for the given number based on the platform + Python is running on. + + :param num: + Unsigned integral number. + :param force_arch: + If you don't want to use 64-bit unsigned chunks, set this to + anything other than 64. 32-bit chunks will be preferred then. + Default 64 will be used when on a 64-bit machine. + :param _machine_word_size: + (Internal) The machine word size used for alignment. + :returns: + 4-tuple:: + + (word_bits, word_bytes, + max_uint, packing_format_type) + """ + max_uint64 = 0xffffffffffffffff + max_uint32 = 0xffffffff + max_uint16 = 0xffff + max_uint8 = 0xff + + if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32: + # 64-bit unsigned integer. + return 64, 8, max_uint64, "Q" + elif num > max_uint16: + # 32-bit unsigned integer + return 32, 4, max_uint32, "L" + elif num > max_uint8: + # 16-bit unsigned integer. + return 16, 2, max_uint16, "H" + else: + # 8-bit unsigned integer. + return 8, 1, max_uint8, "B" diff --git a/server/www/packages/packages-common/rsa/_version133.py b/server/www/packages/packages-common/rsa/_version133.py new file mode 100644 index 0000000..230a03c --- /dev/null +++ b/server/www/packages/packages-common/rsa/_version133.py @@ -0,0 +1,442 @@ +"""RSA module +pri = k[1] //Private part of keys d,p,q + +Module for calculating large primes, and RSA encryption, decryption, +signing and verification. Includes generating public and private keys. + +WARNING: this code implements the mathematics of RSA. It is not suitable for +real-world secure cryptography purposes. It has not been reviewed by a security +expert. It does not include padding of data. There are many ways in which the +output of this module, when used without any modification, can be sucessfully +attacked. +""" + +__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer" +__date__ = "2010-02-05" +__version__ = '1.3.3' + +# NOTE: Python's modulo can return negative numbers. We compensate for +# this behaviour using the abs() function + +from cPickle import dumps, loads +import base64 +import math +import os +import random +import sys +import types +import zlib + +from rsa._compat import byte + +# Display a warning that this insecure version is imported. +import warnings +warnings.warn('Insecure version of the RSA module is imported as %s, be careful' + % __name__) + +def gcd(p, q): + """Returns the greatest common divisor of p and q + + + >>> gcd(42, 6) + 6 + """ + if p>> (128*256 + 64)*256 + + 15 + 8405007 + >>> l = [128, 64, 15] + >>> bytes2int(l) + 8405007 + """ + + if not (type(bytes) is types.ListType or type(bytes) is types.StringType): + raise TypeError("You must pass a string or a list") + + # Convert byte stream to integer + integer = 0 + for byte in bytes: + integer *= 256 + if type(byte) is types.StringType: byte = ord(byte) + integer += byte + + return integer + +def int2bytes(number): + """Converts a number to a string of bytes + + >>> bytes2int(int2bytes(123456789)) + 123456789 + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (byte(number & 0xFF), string) + number /= 256 + + return string + +def fast_exponentiation(a, p, n): + """Calculates r = a^p mod n + """ + result = a % n + remainders = [] + while p != 1: + remainders.append(p & 1) + p = p >> 1 + while remainders: + rem = remainders.pop() + result = ((a ** rem) * result ** 2) % n + return result + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits rounded up + to whole bytes""" + + nbytes = ceil(nbits/8.) + randomdata = os.urandom(nbytes) + return bytes2int(randomdata) + +def ceil(x): + """ceil(x) -> int(math.ceil(x))""" + + return int(math.ceil(x)) + +def randint(minvalue, maxvalue): + """Returns a random integer x with minvalue <= x <= maxvalue""" + + # Safety - get a lot of random data even if the range is fairly + # small + min_nbits = 32 + + # The range of the random numbers we need to generate + range = maxvalue - minvalue + + # Which is this number of bytes + rangebytes = ceil(math.log(range, 2) / 8.) + + # Convert to bits, but make sure it's always at least min_nbits*2 + rangebits = max(rangebytes * 8, min_nbits * 2) + + # Take a random number of bits between min_nbits and rangebits + nbits = random.randint(min_nbits, rangebits) + + return (read_random_int(nbits) % range) + minvalue + +def fermat_little_theorem(p): + """Returns 1 if p may be prime, and something else if p definitely + is not prime""" + + a = randint(1, p-1) + return fast_exponentiation(a, p-1, p) + +def jacobi(a, b): + """Calculates the value of the Jacobi symbol (a/b) + """ + + if a % b == 0: + return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + b, a = a, b % a + else: + if ((b ** 2 - 1) >> 3) & 1: + result = -result + a = a >> 1 + return result + +def jacobi_witness(x, n): + """Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + """ + + j = jacobi(x, n) % n + f = fast_exponentiation(x, (n-1)/2, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number if composite, and True if it's + probably prime. + """ + + q = 0.5 # Property of the jacobi_witness function + + # t = int(math.ceil(k / math.log(1/q, 2))) + t = ceil(k / math.log(1/q, 2)) + for i in range(t+1): + x = randint(1, n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + + >>> is_prime(42) + 0 + >>> is_prime(41) + 1 + """ + + """ + if not fermat_little_theorem(number) == 1: + # Not prime, according to Fermat's little theorem + return False + """ + + if randomized_primality_testing(number, 5): + # Prime, according to Jacobi + return True + + # Not prime + return False + + +def getprime(nbits): + """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In + other words: nbits is rounded up to whole bytes. + + >>> p = getprime(8) + >>> is_prime(p-1) + 0 + >>> is_prime(p) + 1 + >>> is_prime(p+1) + 0 + """ + + nbytes = int(math.ceil(nbits/8.)) + + while True: + integer = read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): break + + # Retry if not prime + + return integer + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + 1 + >>> are_relatively_prime(2, 4) + 0 + """ + + d = gcd(a, b) + return (d == 1) + +def find_p_q(nbits): + """Returns a tuple of two different primes of nbits bits""" + + p = getprime(nbits) + while True: + q = getprime(nbits) + if not q == p: break + + return (p, q) + +def extended_euclid_gcd(a, b): + """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb + """ + + if b == 0: + return (a, 1, 0) + + q = abs(a % b) + r = long(a / b) + (d, k, l) = extended_euclid_gcd(b, q) + + return (d, l, k - l*r) + +# Main function: calculate encryption and decryption keys +def calculate_keys(p, q, nbits): + """Calculates an encryption and a decryption key for p and q, and + returns them as a tuple (e, d)""" + + n = p * q + phi_n = (p-1) * (q-1) + + while True: + # Make sure e has enough bits so we ensure "wrapping" through + # modulo n + e = getprime(max(8, nbits/2)) + if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break + + (d, i, j) = extended_euclid_gcd(e, phi_n) + + if not d == 1: + raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) + + if not (e * i) % phi_n == 1: + raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) + + return (e, i) + + +def gen_keys(nbits): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + """ + + while True: + (p, q) = find_p_q(nbits) + (e, d) = calculate_keys(p, q, nbits) + + # For some reason, d is sometimes negative. We don't know how + # to fix it (yet), so we keep trying until everything is shiny + if d > 0: break + + return (p, q, e, d) + +def gen_pubpriv_keys(nbits): + """Generates public and private keys, and returns them as (pub, + priv). + + The public key consists of a dict {e: ..., , n: ....). The private + key consists of a dict {d: ...., p: ...., q: ....). + """ + + (p, q, e, d) = gen_keys(nbits) + + return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo + n""" + + if type(message) is types.IntType: + return encrypt_int(long(message), ekey, n) + + if not type(message) is types.LongType: + raise TypeError("You must pass a long or an int") + + if message > 0 and \ + math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)): + raise OverflowError("The message is too long") + + return fast_exponentiation(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working + modulo n""" + + return encrypt_int(cyphertext, dkey, n) + +def sign_int(message, dkey, n): + """Signs 'message' using key 'dkey', working modulo n""" + + return decrypt_int(message, dkey, n) + +def verify_int(signed, ekey, n): + """verifies 'signed' using key 'ekey', working modulo n""" + + return encrypt_int(signed, ekey, n) + +def picklechops(chops): + """Pickles and base64encodes it's argument chops""" + + value = zlib.compress(dumps(chops)) + encoded = base64.encodestring(value) + return encoded.strip() + +def unpicklechops(string): + """base64decodes and unpickes it's argument string into chops""" + + return loads(zlib.decompress(base64.decodestring(string))) + +def chopstring(message, key, n, funcref): + """Splits 'message' into chops that are at most as long as n, + converts these into integers, and calls funcref(integer, key, n) + for each chop. + + Used by 'encrypt' and 'sign'. + """ + + msglen = len(message) + mbits = msglen * 8 + nbits = int(math.floor(math.log(n, 2))) + nbytes = nbits / 8 + blocks = msglen / nbytes + + if msglen % nbytes > 0: + blocks += 1 + + cypher = [] + + for bindex in range(blocks): + offset = bindex * nbytes + block = message[offset:offset+nbytes] + value = bytes2int(block) + cypher.append(funcref(value, key, n)) + + return picklechops(cypher) + +def gluechops(chops, key, n, funcref): + """Glues chops back together into a string. calls + funcref(integer, key, n) for each chop. + + Used by 'decrypt' and 'verify'. + """ + message = "" + + chops = unpicklechops(chops) + + for cpart in chops: + mpart = funcref(cpart, key, n) + message += int2bytes(mpart) + + return message + +def encrypt(message, key): + """Encrypts a string 'message' with the public key 'key'""" + + return chopstring(message, key['e'], key['n'], encrypt_int) + +def sign(message, key): + """Signs a string 'message' with the private key 'key'""" + + return chopstring(message, key['d'], key['p']*key['q'], decrypt_int) + +def decrypt(cypher, key): + """Decrypts a cypher with the private key 'key'""" + + return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) + +def verify(cypher, key): + """Verifies a cypher with the public key 'key'""" + + return gluechops(cypher, key['e'], key['n'], encrypt_int) + +# Do doctest if we're not imported +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"] + diff --git a/server/www/packages/packages-common/rsa/_version200.py b/server/www/packages/packages-common/rsa/_version200.py new file mode 100644 index 0000000..f915653 --- /dev/null +++ b/server/www/packages/packages-common/rsa/_version200.py @@ -0,0 +1,529 @@ +"""RSA module + +Module for calculating large primes, and RSA encryption, decryption, +signing and verification. Includes generating public and private keys. + +WARNING: this implementation does not use random padding, compression of the +cleartext input to prevent repetitions, or other common security improvements. +Use with care. + +""" + +__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead" +__date__ = "2010-02-08" +__version__ = '2.0' + +import math +import os +import random +import sys +import types +from rsa._compat import byte + +# Display a warning that this insecure version is imported. +import warnings +warnings.warn('Insecure version of the RSA module is imported as %s' % __name__) + + +def bit_size(number): + """Returns the number of bits required to hold a specific long number""" + + return int(math.ceil(math.log(number,2))) + +def gcd(p, q): + """Returns the greatest common divisor of p and q + >>> gcd(48, 180) + 12 + """ + # Iterateive Version is faster and uses much less stack space + while q != 0: + if p < q: (p,q) = (q,p) + (p,q) = (q, p % q) + return p + + +def bytes2int(bytes): + """Converts a list of bytes or a string to an integer + + >>> (((128 * 256) + 64) * 256) + 15 + 8405007 + >>> l = [128, 64, 15] + >>> bytes2int(l) #same as bytes2int('\x80@\x0f') + 8405007 + """ + + if not (type(bytes) is types.ListType or type(bytes) is types.StringType): + raise TypeError("You must pass a string or a list") + + # Convert byte stream to integer + integer = 0 + for byte in bytes: + integer *= 256 + if type(byte) is types.StringType: byte = ord(byte) + integer += byte + + return integer + +def int2bytes(number): + """ + Converts a number to a string of bytes + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (byte(number & 0xFF), string) + number /= 256 + + return string + +def to64(number): + """Converts a number in the range of 0 to 63 into base 64 digit + character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'. + + >>> to64(10) + 'A' + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + if 0 <= number <= 9: #00-09 translates to '0' - '9' + return byte(number + 48) + + if 10 <= number <= 35: + return byte(number + 55) #10-35 translates to 'A' - 'Z' + + if 36 <= number <= 61: + return byte(number + 61) #36-61 translates to 'a' - 'z' + + if number == 62: # 62 translates to '-' (minus) + return byte(45) + + if number == 63: # 63 translates to '_' (underscore) + return byte(95) + + raise ValueError('Invalid Base64 value: %i' % number) + + +def from64(number): + """Converts an ordinal character value in the range of + 0-9,A-Z,a-z,-,_ to a number in the range of 0-63. + + >>> from64(49) + 1 + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9 + return(number - 48) + + if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35 + return(number - 55) + + if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61 + return(number - 61) + + if number == 45: #ord('-') translates to 62 + return(62) + + if number == 95: #ord('_') translates to 63 + return(63) + + raise ValueError('Invalid Base64 value: %i' % number) + + +def int2str64(number): + """Converts a number to a string of base64 encoded characters in + the range of '0'-'9','A'-'Z,'a'-'z','-','_'. + + >>> int2str64(123456789) + '7MyqL' + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (to64(number & 0x3F), string) + number /= 64 + + return string + + +def str642int(string): + """Converts a base64 encoded string into an integer. + The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_' + + >>> str642int('7MyqL') + 123456789 + """ + + if not (type(string) is types.ListType or type(string) is types.StringType): + raise TypeError("You must pass a string or a list") + + integer = 0 + for byte in string: + integer *= 64 + if type(byte) is types.StringType: byte = ord(byte) + integer += from64(byte) + + return integer + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits rounded up + to whole bytes""" + + nbytes = int(math.ceil(nbits/8.)) + randomdata = os.urandom(nbytes) + return bytes2int(randomdata) + +def randint(minvalue, maxvalue): + """Returns a random integer x with minvalue <= x <= maxvalue""" + + # Safety - get a lot of random data even if the range is fairly + # small + min_nbits = 32 + + # The range of the random numbers we need to generate + range = (maxvalue - minvalue) + 1 + + # Which is this number of bytes + rangebytes = ((bit_size(range) + 7) / 8) + + # Convert to bits, but make sure it's always at least min_nbits*2 + rangebits = max(rangebytes * 8, min_nbits * 2) + + # Take a random number of bits between min_nbits and rangebits + nbits = random.randint(min_nbits, rangebits) + + return (read_random_int(nbits) % range) + minvalue + +def jacobi(a, b): + """Calculates the value of the Jacobi symbol (a/b) + where both a and b are positive integers, and b is odd + """ + + if a == 0: return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + a, b = b % a, a + else: + if (((b * b) - 1) >> 3) & 1: + result = -result + a >>= 1 + if a == 0: return 0 + return result + +def jacobi_witness(x, n): + """Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + """ + + j = jacobi(x, n) % n + f = pow(x, (n-1)/2, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number is composite, and True if it's + probably prime. + """ + + # 50% of Jacobi-witnesses can report compositness of non-prime numbers + + for i in range(k): + x = randint(1, n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + + >>> is_prime(42) + 0 + >>> is_prime(41) + 1 + """ + + if randomized_primality_testing(number, 6): + # Prime, according to Jacobi + return True + + # Not prime + return False + + +def getprime(nbits): + """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In + other words: nbits is rounded up to whole bytes. + + >>> p = getprime(8) + >>> is_prime(p-1) + 0 + >>> is_prime(p) + 1 + >>> is_prime(p+1) + 0 + """ + + while True: + integer = read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): break + + # Retry if not prime + + return integer + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + 1 + >>> are_relatively_prime(2, 4) + 0 + """ + + d = gcd(a, b) + return (d == 1) + +def find_p_q(nbits): + """Returns a tuple of two different primes of nbits bits""" + pbits = nbits + (nbits/16) #Make sure that p and q aren't too close + qbits = nbits - (nbits/16) #or the factoring programs can factor n + p = getprime(pbits) + while True: + q = getprime(qbits) + #Make sure p and q are different. + if not q == p: break + return (p, q) + +def extended_gcd(a, b): + """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + """ + # r = gcd(a,b) i = multiplicitive inverse of a mod b + # or j = multiplicitive inverse of b mod a + # Neg return values for i or j are made positive mod b or a respectively + # Iterateive Version is faster and uses much less stack space + x = 0 + y = 1 + lx = 1 + ly = 0 + oa = a #Remember original a/b to remove + ob = b #negative values from return results + while b != 0: + q = long(a/b) + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)),x) + (y, ly) = ((ly - (q * y)),y) + if (lx < 0): lx += ob #If neg wrap modulo orignal b + if (ly < 0): ly += oa #If neg wrap modulo orignal a + return (a, lx, ly) #Return only positive values + +# Main function: calculate encryption and decryption keys +def calculate_keys(p, q, nbits): + """Calculates an encryption and a decryption key for p and q, and + returns them as a tuple (e, d)""" + + n = p * q + phi_n = (p-1) * (q-1) + + while True: + # Make sure e has enough bits so we ensure "wrapping" through + # modulo n + e = max(65537,getprime(nbits/4)) + if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break + + (d, i, j) = extended_gcd(e, phi_n) + + if not d == 1: + raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) + if (i < 0): + raise Exception("New extended_gcd shouldn't return negative values") + if not (e * i) % phi_n == 1: + raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) + + return (e, i) + + +def gen_keys(nbits): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + """ + + (p, q) = find_p_q(nbits) + (e, d) = calculate_keys(p, q, nbits) + + return (p, q, e, d) + +def newkeys(nbits): + """Generates public and private keys, and returns them as (pub, + priv). + + The public key consists of a dict {e: ..., , n: ....). The private + key consists of a dict {d: ...., p: ...., q: ....). + """ + nbits = max(9,nbits) # Don't let nbits go below 9 bits + (p, q, e, d) = gen_keys(nbits) + + return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo n""" + + if type(message) is types.IntType: + message = long(message) + + if not type(message) is types.LongType: + raise TypeError("You must pass a long or int") + + if message < 0 or message > n: + raise OverflowError("The message is too long") + + #Note: Bit exponents start at zero (bit counts start at 1) this is correct + safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) + message += (1 << safebit) #add safebit to ensure folding + + return pow(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working + modulo n""" + + message = pow(cyphertext, dkey, n) + + safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) + message -= (1 << safebit) #remove safebit before decode + + return message + +def encode64chops(chops): + """base64encodes chops and combines them into a ',' delimited string""" + + chips = [] #chips are character chops + + for value in chops: + chips.append(int2str64(value)) + + #delimit chops with comma + encoded = ','.join(chips) + + return encoded + +def decode64chops(string): + """base64decodes and makes a ',' delimited string into chops""" + + chips = string.split(',') #split chops at commas + + chops = [] + + for string in chips: #make char chops (chips) into chops + chops.append(str642int(string)) + + return chops + +def chopstring(message, key, n, funcref): + """Chops the 'message' into integers that fit into n, + leaving room for a safebit to be added to ensure that all + messages fold during exponentiation. The MSB of the number n + is not independant modulo n (setting it could cause overflow), so + use the next lower bit for the safebit. Therefore reserve 2-bits + in the number n for non-data bits. Calls specified encryption + function for each chop. + + Used by 'encrypt' and 'sign'. + """ + + msglen = len(message) + mbits = msglen * 8 + #Set aside 2-bits so setting of safebit won't overflow modulo n. + nbits = bit_size(n) - 2 # leave room for safebit + nbytes = nbits / 8 + blocks = msglen / nbytes + + if msglen % nbytes > 0: + blocks += 1 + + cypher = [] + + for bindex in range(blocks): + offset = bindex * nbytes + block = message[offset:offset+nbytes] + value = bytes2int(block) + cypher.append(funcref(value, key, n)) + + return encode64chops(cypher) #Encode encrypted ints to base64 strings + +def gluechops(string, key, n, funcref): + """Glues chops back together into a string. calls + funcref(integer, key, n) for each chop. + + Used by 'decrypt' and 'verify'. + """ + message = "" + + chops = decode64chops(string) #Decode base64 strings into integer chops + + for cpart in chops: + mpart = funcref(cpart, key, n) #Decrypt each chop + message += int2bytes(mpart) #Combine decrypted strings into a msg + + return message + +def encrypt(message, key): + """Encrypts a string 'message' with the public key 'key'""" + if 'n' not in key: + raise Exception("You must use the public key with encrypt") + + return chopstring(message, key['e'], key['n'], encrypt_int) + +def sign(message, key): + """Signs a string 'message' with the private key 'key'""" + if 'p' not in key: + raise Exception("You must use the private key with sign") + + return chopstring(message, key['d'], key['p']*key['q'], encrypt_int) + +def decrypt(cypher, key): + """Decrypts a string 'cypher' with the private key 'key'""" + if 'p' not in key: + raise Exception("You must use the private key with decrypt") + + return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) + +def verify(cypher, key): + """Verifies a string 'cypher' with the public key 'key'""" + if 'n' not in key: + raise Exception("You must use the public key with verify") + + return gluechops(cypher, key['e'], key['n'], decrypt_int) + +# Do doctest if we're not imported +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"] + diff --git a/server/www/packages/packages-common/rsa/asn1.py b/server/www/packages/packages-common/rsa/asn1.py new file mode 100644 index 0000000..706e6cf --- /dev/null +++ b/server/www/packages/packages-common/rsa/asn1.py @@ -0,0 +1,35 @@ +'''ASN.1 definitions. + +Not all ASN.1-handling code use these definitions, but when it does, they should be here. +''' + +from pyasn1.type import univ, namedtype, tag + +class PubKeyHeader(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('oid', univ.ObjectIdentifier()), + namedtype.NamedType('parameters', univ.Null()), + ) + +class OpenSSLPubKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('header', PubKeyHeader()), + + # This little hack (the implicit tag) allows us to get a Bit String as Octet String + namedtype.NamedType('key', univ.OctetString().subtype( + implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))), + ) + + +class AsnPubKey(univ.Sequence): + '''ASN.1 contents of DER encoded public key: + + RSAPublicKey ::= SEQUENCE { + modulus INTEGER, -- n + publicExponent INTEGER, -- e + ''' + + componentType = namedtype.NamedTypes( + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + ) diff --git a/server/www/packages/packages-common/rsa/bigfile.py b/server/www/packages/packages-common/rsa/bigfile.py new file mode 100644 index 0000000..516cf56 --- /dev/null +++ b/server/www/packages/packages-common/rsa/bigfile.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Large file support + + - break a file into smaller blocks, and encrypt them, and store the + encrypted blocks in another file. + + - take such an encrypted files, decrypt its blocks, and reconstruct the + original file. + +The encrypted file format is as follows, where || denotes byte concatenation: + + FILE := VERSION || BLOCK || BLOCK ... + + BLOCK := LENGTH || DATA + + LENGTH := varint-encoded length of the subsequent data. Varint comes from + Google Protobuf, and encodes an integer into a variable number of bytes. + Each byte uses the 7 lowest bits to encode the value. The highest bit set + to 1 indicates the next byte is also part of the varint. The last byte will + have this bit set to 0. + +This file format is called the VARBLOCK format, in line with the varint format +used to denote the block sizes. + +''' + +from rsa import key, common, pkcs1, varblock +from rsa._compat import byte + +def encrypt_bigfile(infile, outfile, pub_key): + '''Encrypts a file, writing it to 'outfile' in VARBLOCK format. + + :param infile: file-like object to read the cleartext from + :param outfile: file-like object to write the crypto in VARBLOCK format to + :param pub_key: :py:class:`rsa.PublicKey` to encrypt with + + ''' + + if not isinstance(pub_key, key.PublicKey): + raise TypeError('Public key required, but got %r' % pub_key) + + key_bytes = common.bit_size(pub_key.n) // 8 + blocksize = key_bytes - 11 # keep space for PKCS#1 padding + + # Write the version number to the VARBLOCK file + outfile.write(byte(varblock.VARBLOCK_VERSION)) + + # Encrypt and write each block + for block in varblock.yield_fixedblocks(infile, blocksize): + crypto = pkcs1.encrypt(block, pub_key) + + varblock.write_varint(outfile, len(crypto)) + outfile.write(crypto) + +def decrypt_bigfile(infile, outfile, priv_key): + '''Decrypts an encrypted VARBLOCK file, writing it to 'outfile' + + :param infile: file-like object to read the crypto in VARBLOCK format from + :param outfile: file-like object to write the cleartext to + :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with + + ''' + + if not isinstance(priv_key, key.PrivateKey): + raise TypeError('Private key required, but got %r' % priv_key) + + for block in varblock.yield_varblocks(infile): + cleartext = pkcs1.decrypt(block, priv_key) + outfile.write(cleartext) + +__all__ = ['encrypt_bigfile', 'decrypt_bigfile'] + diff --git a/server/www/packages/packages-common/rsa/cli.py b/server/www/packages/packages-common/rsa/cli.py new file mode 100644 index 0000000..527cc49 --- /dev/null +++ b/server/www/packages/packages-common/rsa/cli.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Commandline scripts. + +These scripts are called by the executables defined in setup.py. +''' + +from __future__ import with_statement, print_function + +import abc +import sys +from optparse import OptionParser + +import rsa +import rsa.bigfile +import rsa.pkcs1 + +HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys()) + +def keygen(): + '''Key generator.''' + + # Parse the CLI options + parser = OptionParser(usage='usage: %prog [options] keysize', + description='Generates a new RSA keypair of "keysize" bits.') + + parser.add_option('--pubout', type='string', + help='Output filename for the public key. The public key is ' + 'not saved if this option is not present. You can use ' + 'pyrsa-priv2pub to create the public key file later.') + + parser.add_option('-o', '--out', type='string', + help='Output filename for the private key. The key is ' + 'written to stdout if this option is not present.') + + parser.add_option('--form', + help='key format of the private and public keys - default PEM', + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv[1:]) + + if len(cli_args) != 1: + parser.print_help() + raise SystemExit(1) + + try: + keysize = int(cli_args[0]) + except ValueError: + parser.print_help() + print('Not a valid number: %s' % cli_args[0], file=sys.stderr) + raise SystemExit(1) + + print('Generating %i-bit key' % keysize, file=sys.stderr) + (pub_key, priv_key) = rsa.newkeys(keysize) + + + # Save public key + if cli.pubout: + print('Writing public key to %s' % cli.pubout, file=sys.stderr) + data = pub_key.save_pkcs1(format=cli.form) + with open(cli.pubout, 'wb') as outfile: + outfile.write(data) + + # Save private key + data = priv_key.save_pkcs1(format=cli.form) + + if cli.out: + print('Writing private key to %s' % cli.out, file=sys.stderr) + with open(cli.out, 'wb') as outfile: + outfile.write(data) + else: + print('Writing private key to stdout', file=sys.stderr) + sys.stdout.write(data) + + +class CryptoOperation(object): + '''CLI callable that operates with input, output, and a key.''' + + __metaclass__ = abc.ABCMeta + + keyname = 'public' # or 'private' + usage = 'usage: %%prog [options] %(keyname)s_key' + description = None + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \ + 'not specified.' + output_help = 'Name of the file to write the %(operation_past)s file ' \ + 'to. Written to stdout if this option is not present.' + expected_cli_args = 1 + has_output = True + + key_class = rsa.PublicKey + + def __init__(self): + self.usage = self.usage % self.__class__.__dict__ + self.input_help = self.input_help % self.__class__.__dict__ + self.output_help = self.output_help % self.__class__.__dict__ + + @abc.abstractmethod + def perform_operation(self, indata, key, cli_args=None): + '''Performs the program's operation. + + Implement in a subclass. + + :returns: the data to write to the output. + ''' + + def __call__(self): + '''Runs the program.''' + + (cli, cli_args) = self.parse_cli() + + key = self.read_key(cli_args[0], cli.keyform) + + indata = self.read_infile(cli.input) + + print(self.operation_progressive.title(), file=sys.stderr) + outdata = self.perform_operation(indata, key, cli_args) + + if self.has_output: + self.write_outfile(outdata, cli.output) + + def parse_cli(self): + '''Parse the CLI options + + :returns: (cli_opts, cli_args) + ''' + + parser = OptionParser(usage=self.usage, description=self.description) + + parser.add_option('-i', '--input', type='string', help=self.input_help) + + if self.has_output: + parser.add_option('-o', '--output', type='string', help=self.output_help) + + parser.add_option('--keyform', + help='Key format of the %s key - default PEM' % self.keyname, + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv[1:]) + + if len(cli_args) != self.expected_cli_args: + parser.print_help() + raise SystemExit(1) + + return (cli, cli_args) + + def read_key(self, filename, keyform): + '''Reads a public or private key.''' + + print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr) + with open(filename, 'rb') as keyfile: + keydata = keyfile.read() + + return self.key_class.load_pkcs1(keydata, keyform) + + def read_infile(self, inname): + '''Read the input file''' + + if inname: + print('Reading input from %s' % inname, file=sys.stderr) + with open(inname, 'rb') as infile: + return infile.read() + + print('Reading input from stdin', file=sys.stderr) + return sys.stdin.read() + + def write_outfile(self, outdata, outname): + '''Write the output file''' + + if outname: + print('Writing output to %s' % outname, file=sys.stderr) + with open(outname, 'wb') as outfile: + outfile.write(outdata) + else: + print('Writing output to stdout', file=sys.stderr) + sys.stdout.write(outdata) + +class EncryptOperation(CryptoOperation): + '''Encrypts a file.''' + + keyname = 'public' + description = ('Encrypts a file. The file must be shorter than the key ' + 'length in order to be encrypted. For larger files, use the ' + 'pyrsa-encrypt-bigfile command.') + operation = 'encrypt' + operation_past = 'encrypted' + operation_progressive = 'encrypting' + + + def perform_operation(self, indata, pub_key, cli_args=None): + '''Encrypts files.''' + + return rsa.encrypt(indata, pub_key) + +class DecryptOperation(CryptoOperation): + '''Decrypts a file.''' + + keyname = 'private' + description = ('Decrypts a file. The original file must be shorter than ' + 'the key length in order to have been encrypted. For larger ' + 'files, use the pyrsa-decrypt-bigfile command.') + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + key_class = rsa.PrivateKey + + def perform_operation(self, indata, priv_key, cli_args=None): + '''Decrypts files.''' + + return rsa.decrypt(indata, priv_key) + +class SignOperation(CryptoOperation): + '''Signs a file.''' + + keyname = 'private' + usage = 'usage: %%prog [options] private_key hash_method' + description = ('Signs a file, outputs the signature. Choose the hash ' + 'method from %s' % ', '.join(HASH_METHODS)) + operation = 'sign' + operation_past = 'signature' + operation_progressive = 'Signing' + key_class = rsa.PrivateKey + expected_cli_args = 2 + + output_help = ('Name of the file to write the signature to. Written ' + 'to stdout if this option is not present.') + + def perform_operation(self, indata, priv_key, cli_args): + '''Decrypts files.''' + + hash_method = cli_args[1] + if hash_method not in HASH_METHODS: + raise SystemExit('Invalid hash method, choose one of %s' % + ', '.join(HASH_METHODS)) + + return rsa.sign(indata, priv_key, hash_method) + +class VerifyOperation(CryptoOperation): + '''Verify a signature.''' + + keyname = 'public' + usage = 'usage: %%prog [options] public_key signature_file' + description = ('Verifies a signature, exits with status 0 upon success, ' + 'prints an error message and exits with status 1 upon error.') + operation = 'verify' + operation_past = 'verified' + operation_progressive = 'Verifying' + key_class = rsa.PublicKey + expected_cli_args = 2 + has_output = False + + def perform_operation(self, indata, pub_key, cli_args): + '''Decrypts files.''' + + signature_file = cli_args[1] + + with open(signature_file, 'rb') as sigfile: + signature = sigfile.read() + + try: + rsa.verify(indata, signature, pub_key) + except rsa.VerificationError: + raise SystemExit('Verification failed.') + + print('Verification OK', file=sys.stderr) + + +class BigfileOperation(CryptoOperation): + '''CryptoOperation that doesn't read the entire file into memory.''' + + def __init__(self): + CryptoOperation.__init__(self) + + self.file_objects = [] + + def __del__(self): + '''Closes any open file handles.''' + + for fobj in self.file_objects: + fobj.close() + + def __call__(self): + '''Runs the program.''' + + (cli, cli_args) = self.parse_cli() + + key = self.read_key(cli_args[0], cli.keyform) + + # Get the file handles + infile = self.get_infile(cli.input) + outfile = self.get_outfile(cli.output) + + # Call the operation + print(self.operation_progressive.title(), file=sys.stderr) + self.perform_operation(infile, outfile, key, cli_args) + + def get_infile(self, inname): + '''Returns the input file object''' + + if inname: + print('Reading input from %s' % inname, file=sys.stderr) + fobj = open(inname, 'rb') + self.file_objects.append(fobj) + else: + print('Reading input from stdin', file=sys.stderr) + fobj = sys.stdin + + return fobj + + def get_outfile(self, outname): + '''Returns the output file object''' + + if outname: + print('Will write output to %s' % outname, file=sys.stderr) + fobj = open(outname, 'wb') + self.file_objects.append(fobj) + else: + print('Will write output to stdout', file=sys.stderr) + fobj = sys.stdout + + return fobj + +class EncryptBigfileOperation(BigfileOperation): + '''Encrypts a file to VARBLOCK format.''' + + keyname = 'public' + description = ('Encrypts a file to an encrypted VARBLOCK file. The file ' + 'can be larger than the key length, but the output file is only ' + 'compatible with Python-RSA.') + operation = 'encrypt' + operation_past = 'encrypted' + operation_progressive = 'encrypting' + + def perform_operation(self, infile, outfile, pub_key, cli_args=None): + '''Encrypts files to VARBLOCK.''' + + return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key) + +class DecryptBigfileOperation(BigfileOperation): + '''Decrypts a file in VARBLOCK format.''' + + keyname = 'private' + description = ('Decrypts an encrypted VARBLOCK file that was encrypted ' + 'with pyrsa-encrypt-bigfile') + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + key_class = rsa.PrivateKey + + def perform_operation(self, infile, outfile, priv_key, cli_args=None): + '''Decrypts a VARBLOCK file.''' + + return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key) + + +encrypt = EncryptOperation() +decrypt = DecryptOperation() +sign = SignOperation() +verify = VerifyOperation() +encrypt_bigfile = EncryptBigfileOperation() +decrypt_bigfile = DecryptBigfileOperation() + diff --git a/server/www/packages/packages-common/rsa/common.py b/server/www/packages/packages-common/rsa/common.py new file mode 100644 index 0000000..39feb8c --- /dev/null +++ b/server/www/packages/packages-common/rsa/common.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Common functionality shared by several modules.''' + + +def bit_size(num): + ''' + Number of bits needed to represent a integer excluding any prefix + 0 bits. + + As per definition from http://wiki.python.org/moin/BitManipulation and + to match the behavior of the Python 3 API. + + Usage:: + + >>> bit_size(1023) + 10 + >>> bit_size(1024) + 11 + >>> bit_size(1025) + 11 + + :param num: + Integer value. If num is 0, returns 0. Only the absolute value of the + number is considered. Therefore, signed integers will be abs(num) + before the number's bit length is determined. + :returns: + Returns the number of bits in the integer. + ''' + if num == 0: + return 0 + if num < 0: + num = -num + + # Make sure this is an int and not a float. + num & 1 + + hex_num = "%x" % num + return ((len(hex_num) - 1) * 4) + { + '0':0, '1':1, '2':2, '3':2, + '4':3, '5':3, '6':3, '7':3, + '8':4, '9':4, 'a':4, 'b':4, + 'c':4, 'd':4, 'e':4, 'f':4, + }[hex_num[0]] + + +def _bit_size(number): + ''' + Returns the number of bits required to hold a specific long number. + ''' + if number < 0: + raise ValueError('Only nonnegative numbers possible: %s' % number) + + if number == 0: + return 0 + + # This works, even with very large numbers. When using math.log(number, 2), + # you'll get rounding errors and it'll fail. + bits = 0 + while number: + bits += 1 + number >>= 1 + + return bits + + +def byte_size(number): + ''' + Returns the number of bytes required to hold a specific long number. + + The number of bytes is rounded up. + + Usage:: + + >>> byte_size(1 << 1023) + 128 + >>> byte_size((1 << 1024) - 1) + 128 + >>> byte_size(1 << 1024) + 129 + + :param number: + An unsigned integer + :returns: + The number of bytes required to hold a specific long number. + ''' + quanta, mod = divmod(bit_size(number), 8) + if mod or number == 0: + quanta += 1 + return quanta + #return int(math.ceil(bit_size(number) / 8.0)) + + +def extended_gcd(a, b): + '''Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + ''' + # r = gcd(a,b) i = multiplicitive inverse of a mod b + # or j = multiplicitive inverse of b mod a + # Neg return values for i or j are made positive mod b or a respectively + # Iterateive Version is faster and uses much less stack space + x = 0 + y = 1 + lx = 1 + ly = 0 + oa = a #Remember original a/b to remove + ob = b #negative values from return results + while b != 0: + q = a // b + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)),x) + (y, ly) = ((ly - (q * y)),y) + if (lx < 0): lx += ob #If neg wrap modulo orignal b + if (ly < 0): ly += oa #If neg wrap modulo orignal a + return (a, lx, ly) #Return only positive values + + +def inverse(x, n): + '''Returns x^-1 (mod n) + + >>> inverse(7, 4) + 3 + >>> (inverse(143, 4) * 143) % 4 + 1 + ''' + + (divider, inv, _) = extended_gcd(x, n) + + if divider != 1: + raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n)) + + return inv + + +def crt(a_values, modulo_values): + '''Chinese Remainder Theorem. + + Calculates x such that x = a[i] (mod m[i]) for each i. + + :param a_values: the a-values of the above equation + :param modulo_values: the m-values of the above equation + :returns: x such that x = a[i] (mod m[i]) for each i + + + >>> crt([2, 3], [3, 5]) + 8 + + >>> crt([2, 3, 2], [3, 5, 7]) + 23 + + >>> crt([2, 3, 0], [7, 11, 15]) + 135 + ''' + + m = 1 + x = 0 + + for modulo in modulo_values: + m *= modulo + + for (m_i, a_i) in zip(modulo_values, a_values): + M_i = m // m_i + inv = inverse(M_i, m_i) + + x = (x + a_i * M_i * inv) % m + + return x + +if __name__ == '__main__': + import doctest + doctest.testmod() + diff --git a/server/www/packages/packages-common/rsa/core.py b/server/www/packages/packages-common/rsa/core.py new file mode 100644 index 0000000..90dfee8 --- /dev/null +++ b/server/www/packages/packages-common/rsa/core.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Core mathematical operations. + +This is the actual core RSA implementation, which is only defined +mathematically on integers. +''' + + +from rsa._compat import is_integer + +def assert_int(var, name): + + if is_integer(var): + return + + raise TypeError('%s should be an integer, not %s' % (name, var.__class__)) + +def encrypt_int(message, ekey, n): + '''Encrypts a message using encryption key 'ekey', working modulo n''' + + assert_int(message, 'message') + assert_int(ekey, 'ekey') + assert_int(n, 'n') + + if message < 0: + raise ValueError('Only non-negative numbers are supported') + + if message > n: + raise OverflowError("The message %i is too long for n=%i" % (message, n)) + + return pow(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + '''Decrypts a cypher text using the decryption key 'dkey', working + modulo n''' + + assert_int(cyphertext, 'cyphertext') + assert_int(dkey, 'dkey') + assert_int(n, 'n') + + message = pow(cyphertext, dkey, n) + return message + diff --git a/server/www/packages/packages-common/rsa/key.py b/server/www/packages/packages-common/rsa/key.py new file mode 100644 index 0000000..b6de7b3 --- /dev/null +++ b/server/www/packages/packages-common/rsa/key.py @@ -0,0 +1,612 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''RSA key generation code. + +Create new keys with the newkeys() function. It will give you a PublicKey and a +PrivateKey object. + +Loading and saving keys requires the pyasn1 module. This module is imported as +late as possible, such that other functionality will remain working in absence +of pyasn1. + +''' + +import logging +from rsa._compat import b, bytes_type + +import rsa.prime +import rsa.pem +import rsa.common + +log = logging.getLogger(__name__) + + + +class AbstractKey(object): + '''Abstract superclass for private and public keys.''' + + @classmethod + def load_pkcs1(cls, keyfile, format='PEM'): + r'''Loads a key in PKCS#1 DER or PEM format. + + :param keyfile: contents of a DER- or PEM-encoded file that contains + the public key. + :param format: the format of the file to load; 'PEM' or 'DER' + + :return: a PublicKey object + + ''' + + methods = { + 'PEM': cls._load_pkcs1_pem, + 'DER': cls._load_pkcs1_der, + } + + if format not in methods: + formats = ', '.join(sorted(methods.keys())) + raise ValueError('Unsupported format: %r, try one of %s' % (format, + formats)) + + method = methods[format] + return method(keyfile) + + def save_pkcs1(self, format='PEM'): + '''Saves the public key in PKCS#1 DER or PEM format. + + :param format: the format to save; 'PEM' or 'DER' + :returns: the DER- or PEM-encoded public key. + + ''' + + methods = { + 'PEM': self._save_pkcs1_pem, + 'DER': self._save_pkcs1_der, + } + + if format not in methods: + formats = ', '.join(sorted(methods.keys())) + raise ValueError('Unsupported format: %r, try one of %s' % (format, + formats)) + + method = methods[format] + return method() + +class PublicKey(AbstractKey): + '''Represents a public RSA key. + + This key is also known as the 'encryption key'. It contains the 'n' and 'e' + values. + + Supports attributes as well as dictionary-like access. Attribute accesss is + faster, though. + + >>> PublicKey(5, 3) + PublicKey(5, 3) + + >>> key = PublicKey(5, 3) + >>> key.n + 5 + >>> key['n'] + 5 + >>> key.e + 3 + >>> key['e'] + 3 + + ''' + + __slots__ = ('n', 'e') + + def __init__(self, n, e): + self.n = n + self.e = e + + def __getitem__(self, key): + return getattr(self, key) + + def __repr__(self): + return 'PublicKey(%i, %i)' % (self.n, self.e) + + def __eq__(self, other): + if other is None: + return False + + if not isinstance(other, PublicKey): + return False + + return self.n == other.n and self.e == other.e + + def __ne__(self, other): + return not (self == other) + + @classmethod + def _load_pkcs1_der(cls, keyfile): + r'''Loads a key in PKCS#1 DER format. + + @param keyfile: contents of a DER-encoded file that contains the public + key. + @return: a PublicKey object + + First let's construct a DER encoded key: + + >>> import base64 + >>> b64der = 'MAwCBQCNGmYtAgMBAAE=' + >>> der = base64.decodestring(b64der) + + This loads the file: + + >>> PublicKey._load_pkcs1_der(der) + PublicKey(2367317549, 65537) + + ''' + + from pyasn1.codec.der import decoder + from rsa.asn1 import AsnPubKey + + (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey()) + return cls(n=int(priv['modulus']), e=int(priv['publicExponent'])) + + def _save_pkcs1_der(self): + '''Saves the public key in PKCS#1 DER format. + + @returns: the DER-encoded public key. + ''' + + from pyasn1.codec.der import encoder + from rsa.asn1 import AsnPubKey + + # Create the ASN object + asn_key = AsnPubKey() + asn_key.setComponentByName('modulus', self.n) + asn_key.setComponentByName('publicExponent', self.e) + + return encoder.encode(asn_key) + + @classmethod + def _load_pkcs1_pem(cls, keyfile): + '''Loads a PKCS#1 PEM-encoded public key file. + + The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and + after the "-----END RSA PUBLIC KEY-----" lines is ignored. + + @param keyfile: contents of a PEM-encoded file that contains the public + key. + @return: a PublicKey object + ''' + + der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY') + return cls._load_pkcs1_der(der) + + def _save_pkcs1_pem(self): + '''Saves a PKCS#1 PEM-encoded public key file. + + @return: contents of a PEM-encoded file that contains the public key. + ''' + + der = self._save_pkcs1_der() + return rsa.pem.save_pem(der, 'RSA PUBLIC KEY') + + @classmethod + def load_pkcs1_openssl_pem(cls, keyfile): + '''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. + + These files can be recognised in that they start with BEGIN PUBLIC KEY + rather than BEGIN RSA PUBLIC KEY. + + The contents of the file before the "-----BEGIN PUBLIC KEY-----" and + after the "-----END PUBLIC KEY-----" lines is ignored. + + @param keyfile: contents of a PEM-encoded file that contains the public + key, from OpenSSL. + @return: a PublicKey object + ''' + + der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY') + return cls.load_pkcs1_openssl_der(der) + + @classmethod + def load_pkcs1_openssl_der(cls, keyfile): + '''Loads a PKCS#1 DER-encoded public key file from OpenSSL. + + @param keyfile: contents of a DER-encoded file that contains the public + key, from OpenSSL. + @return: a PublicKey object + ''' + + from rsa.asn1 import OpenSSLPubKey + from pyasn1.codec.der import decoder + from pyasn1.type import univ + + (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey()) + + if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'): + raise TypeError("This is not a DER-encoded OpenSSL-compatible public key") + + return cls._load_pkcs1_der(keyinfo['key'][1:]) + + + + +class PrivateKey(AbstractKey): + '''Represents a private RSA key. + + This key is also known as the 'decryption key'. It contains the 'n', 'e', + 'd', 'p', 'q' and other values. + + Supports attributes as well as dictionary-like access. Attribute accesss is + faster, though. + + >>> PrivateKey(3247, 65537, 833, 191, 17) + PrivateKey(3247, 65537, 833, 191, 17) + + exp1, exp2 and coef don't have to be given, they will be calculated: + + >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + >>> pk.exp1 + 55063 + >>> pk.exp2 + 10095 + >>> pk.coef + 50797 + + If you give exp1, exp2 or coef, they will be used as-is: + + >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8) + >>> pk.exp1 + 6 + >>> pk.exp2 + 7 + >>> pk.coef + 8 + + ''' + + __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef') + + def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None): + self.n = n + self.e = e + self.d = d + self.p = p + self.q = q + + # Calculate the other values if they aren't supplied + if exp1 is None: + self.exp1 = int(d % (p - 1)) + else: + self.exp1 = exp1 + + if exp1 is None: + self.exp2 = int(d % (q - 1)) + else: + self.exp2 = exp2 + + if coef is None: + self.coef = rsa.common.inverse(q, p) + else: + self.coef = coef + + def __getitem__(self, key): + return getattr(self, key) + + def __repr__(self): + return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self + + def __eq__(self, other): + if other is None: + return False + + if not isinstance(other, PrivateKey): + return False + + return (self.n == other.n and + self.e == other.e and + self.d == other.d and + self.p == other.p and + self.q == other.q and + self.exp1 == other.exp1 and + self.exp2 == other.exp2 and + self.coef == other.coef) + + def __ne__(self, other): + return not (self == other) + + @classmethod + def _load_pkcs1_der(cls, keyfile): + r'''Loads a key in PKCS#1 DER format. + + @param keyfile: contents of a DER-encoded file that contains the private + key. + @return: a PrivateKey object + + First let's construct a DER encoded key: + + >>> import base64 + >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' + >>> der = base64.decodestring(b64der) + + This loads the file: + + >>> PrivateKey._load_pkcs1_der(der) + PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + + ''' + + from pyasn1.codec.der import decoder + (priv, _) = decoder.decode(keyfile) + + # ASN.1 contents of DER encoded private key: + # + # RSAPrivateKey ::= SEQUENCE { + # version Version, + # modulus INTEGER, -- n + # publicExponent INTEGER, -- e + # privateExponent INTEGER, -- d + # prime1 INTEGER, -- p + # prime2 INTEGER, -- q + # exponent1 INTEGER, -- d mod (p-1) + # exponent2 INTEGER, -- d mod (q-1) + # coefficient INTEGER, -- (inverse of q) mod p + # otherPrimeInfos OtherPrimeInfos OPTIONAL + # } + + if priv[0] != 0: + raise ValueError('Unable to read this file, version %s != 0' % priv[0]) + + as_ints = tuple(int(x) for x in priv[1:9]) + return cls(*as_ints) + + def _save_pkcs1_der(self): + '''Saves the private key in PKCS#1 DER format. + + @returns: the DER-encoded private key. + ''' + + from pyasn1.type import univ, namedtype + from pyasn1.codec.der import encoder + + class AsnPrivKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer()), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('privateExponent', univ.Integer()), + namedtype.NamedType('prime1', univ.Integer()), + namedtype.NamedType('prime2', univ.Integer()), + namedtype.NamedType('exponent1', univ.Integer()), + namedtype.NamedType('exponent2', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()), + ) + + # Create the ASN object + asn_key = AsnPrivKey() + asn_key.setComponentByName('version', 0) + asn_key.setComponentByName('modulus', self.n) + asn_key.setComponentByName('publicExponent', self.e) + asn_key.setComponentByName('privateExponent', self.d) + asn_key.setComponentByName('prime1', self.p) + asn_key.setComponentByName('prime2', self.q) + asn_key.setComponentByName('exponent1', self.exp1) + asn_key.setComponentByName('exponent2', self.exp2) + asn_key.setComponentByName('coefficient', self.coef) + + return encoder.encode(asn_key) + + @classmethod + def _load_pkcs1_pem(cls, keyfile): + '''Loads a PKCS#1 PEM-encoded private key file. + + The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and + after the "-----END RSA PRIVATE KEY-----" lines is ignored. + + @param keyfile: contents of a PEM-encoded file that contains the private + key. + @return: a PrivateKey object + ''' + + der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY')) + return cls._load_pkcs1_der(der) + + def _save_pkcs1_pem(self): + '''Saves a PKCS#1 PEM-encoded private key file. + + @return: contents of a PEM-encoded file that contains the private key. + ''' + + der = self._save_pkcs1_der() + return rsa.pem.save_pem(der, b('RSA PRIVATE KEY')) + +def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): + ''''Returns a tuple of two different primes of nbits bits each. + + The resulting p * q has exacty 2 * nbits bits, and the returned p and q + will not be equal. + + :param nbits: the number of bits in each of p and q. + :param getprime_func: the getprime function, defaults to + :py:func:`rsa.prime.getprime`. + + *Introduced in Python-RSA 3.1* + + :param accurate: whether to enable accurate mode or not. + :returns: (p, q), where p > q + + >>> (p, q) = find_p_q(128) + >>> from rsa import common + >>> common.bit_size(p * q) + 256 + + When not in accurate mode, the number of bits can be slightly less + + >>> (p, q) = find_p_q(128, accurate=False) + >>> from rsa import common + >>> common.bit_size(p * q) <= 256 + True + >>> common.bit_size(p * q) > 240 + True + + ''' + + total_bits = nbits * 2 + + # Make sure that p and q aren't too close or the factoring programs can + # factor n. + shift = nbits // 16 + pbits = nbits + shift + qbits = nbits - shift + + # Choose the two initial primes + log.debug('find_p_q(%i): Finding p', nbits) + p = getprime_func(pbits) + log.debug('find_p_q(%i): Finding q', nbits) + q = getprime_func(qbits) + + def is_acceptable(p, q): + '''Returns True iff p and q are acceptable: + + - p and q differ + - (p * q) has the right nr of bits (when accurate=True) + ''' + + if p == q: + return False + + if not accurate: + return True + + # Make sure we have just the right amount of bits + found_size = rsa.common.bit_size(p * q) + return total_bits == found_size + + # Keep choosing other primes until they match our requirements. + change_p = False + while not is_acceptable(p, q): + # Change p on one iteration and q on the other + if change_p: + p = getprime_func(pbits) + else: + q = getprime_func(qbits) + + change_p = not change_p + + # We want p > q as described on + # http://www.di-mgt.com.au/rsa_alg.html#crt + return (max(p, q), min(p, q)) + +def calculate_keys(p, q, nbits): + '''Calculates an encryption and a decryption key given p and q, and + returns them as a tuple (e, d) + + ''' + + phi_n = (p - 1) * (q - 1) + + # A very common choice for e is 65537 + e = 65537 + + try: + d = rsa.common.inverse(e, phi_n) + except ValueError: + raise ValueError("e (%d) and phi_n (%d) are not relatively prime" % + (e, phi_n)) + + if (e * d) % phi_n != 1: + raise ValueError("e (%d) and d (%d) are not mult. inv. modulo " + "phi_n (%d)" % (e, d, phi_n)) + + return (e, d) + +def gen_keys(nbits, getprime_func, accurate=True): + '''Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + + :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and + ``q`` will use ``nbits/2`` bits. + :param getprime_func: either :py:func:`rsa.prime.getprime` or a function + with similar signature. + ''' + + (p, q) = find_p_q(nbits // 2, getprime_func, accurate) + (e, d) = calculate_keys(p, q, nbits // 2) + + return (p, q, e, d) + +def newkeys(nbits, accurate=True, poolsize=1): + '''Generates public and private keys, and returns them as (pub, priv). + + The public key is also known as the 'encryption key', and is a + :py:class:`rsa.PublicKey` object. The private key is also known as the + 'decryption key' and is a :py:class:`rsa.PrivateKey` object. + + :param nbits: the number of bits required to store ``n = p*q``. + :param accurate: when True, ``n`` will have exactly the number of bits you + asked for. However, this makes key generation much slower. When False, + `n`` may have slightly less bits. + :param poolsize: the number of processes to use to generate the prime + numbers. If set to a number > 1, a parallel algorithm will be used. + This requires Python 2.6 or newer. + + :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`) + + The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires + Python 2.6 or newer. + + ''' + + if nbits < 16: + raise ValueError('Key too small') + + if poolsize < 1: + raise ValueError('Pool size (%i) should be >= 1' % poolsize) + + # Determine which getprime function to use + if poolsize > 1: + from rsa import parallel + import functools + + getprime_func = functools.partial(parallel.getprime, poolsize=poolsize) + else: getprime_func = rsa.prime.getprime + + # Generate the key components + (p, q, e, d) = gen_keys(nbits, getprime_func) + + # Create the key objects + n = p * q + + return ( + PublicKey(n, e), + PrivateKey(n, e, d, p, q) + ) + +__all__ = ['PublicKey', 'PrivateKey', 'newkeys'] + +if __name__ == '__main__': + import doctest + + try: + for count in range(100): + (failures, tests) = doctest.testmod() + if failures: + break + + if (count and count % 10 == 0) or count == 1: + print('%i times' % count) + except KeyboardInterrupt: + print('Aborted') + else: + print('Doctests done') diff --git a/server/www/packages/packages-common/rsa/parallel.py b/server/www/packages/packages-common/rsa/parallel.py new file mode 100644 index 0000000..e5034ac --- /dev/null +++ b/server/www/packages/packages-common/rsa/parallel.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Functions for parallel computation on multiple cores. + +Introduced in Python-RSA 3.1. + +.. note:: + + Requires Python 2.6 or newer. + +''' + +from __future__ import print_function + +import multiprocessing as mp + +import rsa.prime +import rsa.randnum + +def _find_prime(nbits, pipe): + while True: + integer = rsa.randnum.read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if rsa.prime.is_prime(integer): + pipe.send(integer) + return + +def getprime(nbits, poolsize): + '''Returns a prime number that can be stored in 'nbits' bits. + + Works in multiple threads at the same time. + + >>> p = getprime(128, 3) + >>> rsa.prime.is_prime(p-1) + False + >>> rsa.prime.is_prime(p) + True + >>> rsa.prime.is_prime(p+1) + False + + >>> from rsa import common + >>> common.bit_size(p) == 128 + True + + ''' + + (pipe_recv, pipe_send) = mp.Pipe(duplex=False) + + # Create processes + procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) + for _ in range(poolsize)] + [p.start() for p in procs] + + result = pipe_recv.recv() + + [p.terminate() for p in procs] + + return result + +__all__ = ['getprime'] + + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(100): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 10 == 0: + print('%i times' % count) + + print('Doctests done') + diff --git a/server/www/packages/packages-common/rsa/pem.py b/server/www/packages/packages-common/rsa/pem.py new file mode 100644 index 0000000..b1c3a0e --- /dev/null +++ b/server/www/packages/packages-common/rsa/pem.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Functions that load and write PEM-encoded files.''' + +import base64 +from rsa._compat import b, is_bytes + +def _markers(pem_marker): + ''' + Returns the start and end PEM markers + ''' + + if is_bytes(pem_marker): + pem_marker = pem_marker.decode('utf-8') + + return (b('-----BEGIN %s-----' % pem_marker), + b('-----END %s-----' % pem_marker)) + +def load_pem(contents, pem_marker): + '''Loads a PEM file. + + @param contents: the contents of the file to interpret + @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + when your file has '-----BEGIN RSA PRIVATE KEY-----' and + '-----END RSA PRIVATE KEY-----' markers. + + @return the base64-decoded content between the start and end markers. + + @raise ValueError: when the content is invalid, for example when the start + marker cannot be found. + + ''' + + (pem_start, pem_end) = _markers(pem_marker) + + pem_lines = [] + in_pem_part = False + + for line in contents.splitlines(): + line = line.strip() + + # Skip empty lines + if not line: + continue + + # Handle start marker + if line == pem_start: + if in_pem_part: + raise ValueError('Seen start marker "%s" twice' % pem_start) + + in_pem_part = True + continue + + # Skip stuff before first marker + if not in_pem_part: + continue + + # Handle end marker + if in_pem_part and line == pem_end: + in_pem_part = False + break + + # Load fields + if b(':') in line: + continue + + pem_lines.append(line) + + # Do some sanity checks + if not pem_lines: + raise ValueError('No PEM start marker "%s" found' % pem_start) + + if in_pem_part: + raise ValueError('No PEM end marker "%s" found' % pem_end) + + # Base64-decode the contents + pem = b('').join(pem_lines) + return base64.decodestring(pem) + + +def save_pem(contents, pem_marker): + '''Saves a PEM file. + + @param contents: the contents to encode in PEM format + @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + when your file has '-----BEGIN RSA PRIVATE KEY-----' and + '-----END RSA PRIVATE KEY-----' markers. + + @return the base64-encoded content between the start and end markers. + + ''' + + (pem_start, pem_end) = _markers(pem_marker) + + b64 = base64.encodestring(contents).replace(b('\n'), b('')) + pem_lines = [pem_start] + + for block_start in range(0, len(b64), 64): + block = b64[block_start:block_start + 64] + pem_lines.append(block) + + pem_lines.append(pem_end) + pem_lines.append(b('')) + + return b('\n').join(pem_lines) + diff --git a/server/www/packages/packages-common/rsa/pkcs1.py b/server/www/packages/packages-common/rsa/pkcs1.py new file mode 100644 index 0000000..15e4cf6 --- /dev/null +++ b/server/www/packages/packages-common/rsa/pkcs1.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Functions for PKCS#1 version 1.5 encryption and signing + +This module implements certain functionality from PKCS#1 version 1.5. For a +very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes + +At least 8 bytes of random padding is used when encrypting a message. This makes +these methods much more secure than the ones in the ``rsa`` module. + +WARNING: this module leaks information when decryption or verification fails. +The exceptions that are raised contain the Python traceback information, which +can be used to deduce where in the process the failure occurred. DO NOT PASS +SUCH INFORMATION to your users. +''' + +import hashlib +import os + +from rsa._compat import b +from rsa import common, transform, core, varblock + +# ASN.1 codes that describe the hash algorithm used. +HASH_ASN1 = { + 'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'), + 'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'), + 'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'), + 'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'), + 'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'), +} + +HASH_METHODS = { + 'MD5': hashlib.md5, + 'SHA-1': hashlib.sha1, + 'SHA-256': hashlib.sha256, + 'SHA-384': hashlib.sha384, + 'SHA-512': hashlib.sha512, +} + +class CryptoError(Exception): + '''Base class for all exceptions in this module.''' + +class DecryptionError(CryptoError): + '''Raised when decryption fails.''' + +class VerificationError(CryptoError): + '''Raised when verification fails.''' + +def _pad_for_encryption(message, target_length): + r'''Pads the message for encryption, returning the padded message. + + :return: 00 02 RANDOM_DATA 00 MESSAGE + + >>> block = _pad_for_encryption('hello', 16) + >>> len(block) + 16 + >>> block[0:2] + '\x00\x02' + >>> block[-6:] + '\x00hello' + + ''' + + max_msglength = target_length - 11 + msglength = len(message) + + if msglength > max_msglength: + raise OverflowError('%i bytes needed for message, but there is only' + ' space for %i' % (msglength, max_msglength)) + + # Get random padding + padding = b('') + padding_length = target_length - msglength - 3 + + # We remove 0-bytes, so we'll end up with less padding than we've asked for, + # so keep adding data until we're at the correct length. + while len(padding) < padding_length: + needed_bytes = padding_length - len(padding) + + # Always read at least 8 bytes more than we need, and trim off the rest + # after removing the 0-bytes. This increases the chance of getting + # enough bytes, especially when needed_bytes is small + new_padding = os.urandom(needed_bytes + 5) + new_padding = new_padding.replace(b('\x00'), b('')) + padding = padding + new_padding[:needed_bytes] + + assert len(padding) == padding_length + + return b('').join([b('\x00\x02'), + padding, + b('\x00'), + message]) + + +def _pad_for_signing(message, target_length): + r'''Pads the message for signing, returning the padded message. + + The padding is always a repetition of FF bytes. + + :return: 00 01 PADDING 00 MESSAGE + + >>> block = _pad_for_signing('hello', 16) + >>> len(block) + 16 + >>> block[0:2] + '\x00\x01' + >>> block[-6:] + '\x00hello' + >>> block[2:-6] + '\xff\xff\xff\xff\xff\xff\xff\xff' + + ''' + + max_msglength = target_length - 11 + msglength = len(message) + + if msglength > max_msglength: + raise OverflowError('%i bytes needed for message, but there is only' + ' space for %i' % (msglength, max_msglength)) + + padding_length = target_length - msglength - 3 + + return b('').join([b('\x00\x01'), + padding_length * b('\xff'), + b('\x00'), + message]) + + +def encrypt(message, pub_key): + '''Encrypts the given message using PKCS#1 v1.5 + + :param message: the message to encrypt. Must be a byte string no longer than + ``k-11`` bytes, where ``k`` is the number of bytes needed to encode + the ``n`` component of the public key. + :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. + :raise OverflowError: when the message is too large to fit in the padded + block. + + >>> from rsa import key, common + >>> (pub_key, priv_key) = key.newkeys(256) + >>> message = 'hello' + >>> crypto = encrypt(message, pub_key) + + The crypto text should be just as long as the public key 'n' component: + + >>> len(crypto) == common.byte_size(pub_key.n) + True + + ''' + + keylength = common.byte_size(pub_key.n) + padded = _pad_for_encryption(message, keylength) + + payload = transform.bytes2int(padded) + encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n) + block = transform.int2bytes(encrypted, keylength) + + return block + +def decrypt(crypto, priv_key): + r'''Decrypts the given message using PKCS#1 v1.5 + + The decryption is considered 'failed' when the resulting cleartext doesn't + start with the bytes 00 02, or when the 00 byte between the padding and + the message cannot be found. + + :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` + :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. + :raise DecryptionError: when the decryption fails. No details are given as + to why the code thinks the decryption fails, as this would leak + information about the private key. + + + >>> import rsa + >>> (pub_key, priv_key) = rsa.newkeys(256) + + It works with strings: + + >>> crypto = encrypt('hello', pub_key) + >>> decrypt(crypto, priv_key) + 'hello' + + And with binary data: + + >>> crypto = encrypt('\x00\x00\x00\x00\x01', pub_key) + >>> decrypt(crypto, priv_key) + '\x00\x00\x00\x00\x01' + + Altering the encrypted information will *likely* cause a + :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use + :py:func:`rsa.sign`. + + + .. warning:: + + Never display the stack trace of a + :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the + code the exception occurred, and thus leaks information about the key. + It's only a tiny bit of information, but every bit makes cracking the + keys easier. + + >>> crypto = encrypt('hello', pub_key) + >>> crypto = crypto[0:5] + 'X' + crypto[6:] # change a byte + >>> decrypt(crypto, priv_key) + Traceback (most recent call last): + ... + DecryptionError: Decryption failed + + ''' + + blocksize = common.byte_size(priv_key.n) + encrypted = transform.bytes2int(crypto) + decrypted = core.decrypt_int(encrypted, priv_key.d, priv_key.n) + cleartext = transform.int2bytes(decrypted, blocksize) + + # If we can't find the cleartext marker, decryption failed. + if cleartext[0:2] != b('\x00\x02'): + raise DecryptionError('Decryption failed') + + # Find the 00 separator between the padding and the message + try: + sep_idx = cleartext.index(b('\x00'), 2) + except ValueError: + raise DecryptionError('Decryption failed') + + return cleartext[sep_idx+1:] + +def sign(message, priv_key, hash): + '''Signs the message with the private key. + + Hashes the message, then signs the hash with the given key. This is known + as a "detached signature", because the message itself isn't altered. + + :param message: the message to sign. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param priv_key: the :py:class:`rsa.PrivateKey` to sign with + :param hash: the hash method used on the message. Use 'MD5', 'SHA-1', + 'SHA-256', 'SHA-384' or 'SHA-512'. + :return: a message signature block. + :raise OverflowError: if the private key is too small to contain the + requested hash. + + ''' + + # Get the ASN1 code for this hash method + if hash not in HASH_ASN1: + raise ValueError('Invalid hash method: %s' % hash) + asn1code = HASH_ASN1[hash] + + # Calculate the hash + hash = _hash(message, hash) + + # Encrypt the hash with the private key + cleartext = asn1code + hash + keylength = common.byte_size(priv_key.n) + padded = _pad_for_signing(cleartext, keylength) + + payload = transform.bytes2int(padded) + encrypted = core.encrypt_int(payload, priv_key.d, priv_key.n) + block = transform.int2bytes(encrypted, keylength) + + return block + +def verify(message, signature, pub_key): + '''Verifies that the signature matches the message. + + The hash method is detected automatically from the signature. + + :param message: the signed message. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param signature: the signature block, as created with :py:func:`rsa.sign`. + :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. + :raise VerificationError: when the signature doesn't match the message. + + .. warning:: + + Never display the stack trace of a + :py:class:`rsa.pkcs1.VerificationError` exception. It shows where in + the code the exception occurred, and thus leaks information about the + key. It's only a tiny bit of information, but every bit makes cracking + the keys easier. + + ''' + + blocksize = common.byte_size(pub_key.n) + encrypted = transform.bytes2int(signature) + decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) + clearsig = transform.int2bytes(decrypted, blocksize) + + # If we can't find the signature marker, verification failed. + if clearsig[0:2] != b('\x00\x01'): + raise VerificationError('Verification failed') + + # Find the 00 separator between the padding and the payload + try: + sep_idx = clearsig.index(b('\x00'), 2) + except ValueError: + raise VerificationError('Verification failed') + + # Get the hash and the hash method + (method_name, signature_hash) = _find_method_hash(clearsig[sep_idx+1:]) + message_hash = _hash(message, method_name) + + # Compare the real hash to the hash in the signature + if message_hash != signature_hash: + raise VerificationError('Verification failed') + + return True + +def _hash(message, method_name): + '''Returns the message digest. + + :param message: the signed message. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param method_name: the hash method, must be a key of + :py:const:`HASH_METHODS`. + + ''' + + if method_name not in HASH_METHODS: + raise ValueError('Invalid hash method: %s' % method_name) + + method = HASH_METHODS[method_name] + hasher = method() + + if hasattr(message, 'read') and hasattr(message.read, '__call__'): + # read as 1K blocks + for block in varblock.yield_fixedblocks(message, 1024): + hasher.update(block) + else: + # hash the message object itself. + hasher.update(message) + + return hasher.digest() + + +def _find_method_hash(method_hash): + '''Finds the hash method and the hash itself. + + :param method_hash: ASN1 code for the hash method concatenated with the + hash itself. + + :return: tuple (method, hash) where ``method`` is the used hash method, and + ``hash`` is the hash itself. + + :raise VerificationFailed: when the hash method cannot be found + + ''' + + for (hashname, asn1code) in HASH_ASN1.items(): + if not method_hash.startswith(asn1code): + continue + + return (hashname, method_hash[len(asn1code):]) + + raise VerificationError('Verification failed') + + +__all__ = ['encrypt', 'decrypt', 'sign', 'verify', + 'DecryptionError', 'VerificationError', 'CryptoError'] + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(1000): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 100 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/server/www/packages/packages-common/rsa/prime.py b/server/www/packages/packages-common/rsa/prime.py new file mode 100644 index 0000000..7422eb1 --- /dev/null +++ b/server/www/packages/packages-common/rsa/prime.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Numerical functions related to primes. + +Implementation based on the book Algorithm Design by Michael T. Goodrich and +Roberto Tamassia, 2002. +''' + +__all__ = [ 'getprime', 'are_relatively_prime'] + +import rsa.randnum + +def gcd(p, q): + '''Returns the greatest common divisor of p and q + + >>> gcd(48, 180) + 12 + ''' + + while q != 0: + if p < q: (p,q) = (q,p) + (p,q) = (q, p % q) + return p + + +def jacobi(a, b): + '''Calculates the value of the Jacobi symbol (a/b) where both a and b are + positive integers, and b is odd + + :returns: -1, 0 or 1 + ''' + + assert a > 0 + assert b > 0 + + if a == 0: return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + a, b = b % a, a + else: + if (((b * b) - 1) >> 3) & 1: + result = -result + a >>= 1 + if a == 0: return 0 + return result + +def jacobi_witness(x, n): + '''Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + ''' + + j = jacobi(x, n) % n + + f = pow(x, n >> 1, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + '''Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number is composite, and True if it's + probably prime. + ''' + + # 50% of Jacobi-witnesses can report compositness of non-prime numbers + + # The implemented algorithm using the Jacobi witness function has error + # probability q <= 0.5, according to Goodrich et. al + # + # q = 0.5 + # t = int(math.ceil(k / log(1 / q, 2))) + # So t = k / log(2, 2) = k / 1 = k + # this means we can use range(k) rather than range(t) + + for _ in range(k): + x = rsa.randnum.randint(n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + '''Returns True if the number is prime, and False otherwise. + + >>> is_prime(42) + False + >>> is_prime(41) + True + ''' + + return randomized_primality_testing(number, 6) + +def getprime(nbits): + '''Returns a prime number that can be stored in 'nbits' bits. + + >>> p = getprime(128) + >>> is_prime(p-1) + False + >>> is_prime(p) + True + >>> is_prime(p+1) + False + + >>> from rsa import common + >>> common.bit_size(p) == 128 + True + + ''' + + while True: + integer = rsa.randnum.read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): + return integer + + # Retry if not prime + + +def are_relatively_prime(a, b): + '''Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + 1 + >>> are_relatively_prime(2, 4) + 0 + ''' + + d = gcd(a, b) + return (d == 1) + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(1000): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 100 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/server/www/packages/packages-common/rsa/randnum.py b/server/www/packages/packages-common/rsa/randnum.py new file mode 100644 index 0000000..0e78274 --- /dev/null +++ b/server/www/packages/packages-common/rsa/randnum.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Functions for generating random numbers.''' + +# Source inspired by code by Yesudeep Mangalapilly + +import os + +from rsa import common, transform +from rsa._compat import byte + +def read_random_bits(nbits): + '''Reads 'nbits' random bits. + + If nbits isn't a whole number of bytes, an extra byte will be appended with + only the lower bits set. + ''' + + nbytes, rbits = divmod(nbits, 8) + + # Get the random bytes + randomdata = os.urandom(nbytes) + + # Add the remaining random bits + if rbits > 0: + randomvalue = ord(os.urandom(1)) + randomvalue >>= (8 - rbits) + randomdata = byte(randomvalue) + randomdata + + return randomdata + + +def read_random_int(nbits): + '''Reads a random integer of approximately nbits bits. + ''' + + randomdata = read_random_bits(nbits) + value = transform.bytes2int(randomdata) + + # Ensure that the number is large enough to just fill out the required + # number of bits. + value |= 1 << (nbits - 1) + + return value + +def randint(maxvalue): + '''Returns a random integer x with 1 <= x <= maxvalue + + May take a very long time in specific situations. If maxvalue needs N bits + to store, the closer maxvalue is to (2 ** N) - 1, the faster this function + is. + ''' + + bit_size = common.bit_size(maxvalue) + + tries = 0 + while True: + value = read_random_int(bit_size) + if value <= maxvalue: + break + + if tries and tries % 10 == 0: + # After a lot of tries to get the right number of bits but still + # smaller than maxvalue, decrease the number of bits by 1. That'll + # dramatically increase the chances to get a large enough number. + bit_size -= 1 + tries += 1 + + return value + + diff --git a/server/www/packages/packages-common/rsa/transform.py b/server/www/packages/packages-common/rsa/transform.py new file mode 100644 index 0000000..c740b2d --- /dev/null +++ b/server/www/packages/packages-common/rsa/transform.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Data transformation functions. + +From bytes to a number, number to bytes, etc. +''' + +from __future__ import absolute_import + +try: + # We'll use psyco if available on 32-bit architectures to speed up code. + # Using psyco (if available) cuts down the execution time on Python 2.5 + # at least by half. + import psyco + psyco.full() +except ImportError: + pass + +import binascii +from struct import pack +from rsa import common +from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE + + +def bytes2int(raw_bytes): + r'''Converts a list of bytes or an 8-bit string to an integer. + + When using unicode strings, encode it to some encoding like UTF8 first. + + >>> (((128 * 256) + 64) * 256) + 15 + 8405007 + >>> bytes2int('\x80@\x0f') + 8405007 + + ''' + + return int(binascii.hexlify(raw_bytes), 16) + + +def _int2bytes(number, block_size=None): + r'''Converts a number to a string of bytes. + + Usage:: + + >>> _int2bytes(123456789) + '\x07[\xcd\x15' + >>> bytes2int(_int2bytes(123456789)) + 123456789 + + >>> _int2bytes(123456789, 6) + '\x00\x00\x07[\xcd\x15' + >>> bytes2int(_int2bytes(123456789, 128)) + 123456789 + + >>> _int2bytes(123456789, 3) + Traceback (most recent call last): + ... + OverflowError: Needed 4 bytes for number, but block size is 3 + + @param number: the number to convert + @param block_size: the number of bytes to output. If the number encoded to + bytes is less than this, the block will be zero-padded. When not given, + the returned block is not padded. + + @throws OverflowError when block_size is given and the number takes up more + bytes than fit into the block. + ''' + # Type checking + if not is_integer(number): + raise TypeError("You must pass an integer for 'number', not %s" % + number.__class__) + + if number < 0: + raise ValueError('Negative numbers cannot be used: %i' % number) + + # Do some bounds checking + if number == 0: + needed_bytes = 1 + raw_bytes = [ZERO_BYTE] + else: + needed_bytes = common.byte_size(number) + raw_bytes = [] + + # You cannot compare None > 0 in Python 3x. It will fail with a TypeError. + if block_size and block_size > 0: + if needed_bytes > block_size: + raise OverflowError('Needed %i bytes for number, but block size ' + 'is %i' % (needed_bytes, block_size)) + + # Convert the number to bytes. + while number > 0: + raw_bytes.insert(0, byte(number & 0xFF)) + number >>= 8 + + # Pad with zeroes to fill the block + if block_size and block_size > 0: + padding = (block_size - needed_bytes) * ZERO_BYTE + else: + padding = EMPTY_BYTE + + return padding + EMPTY_BYTE.join(raw_bytes) + + +def bytes_leading(raw_bytes, needle=ZERO_BYTE): + ''' + Finds the number of prefixed byte occurrences in the haystack. + + Useful when you want to deal with padding. + + :param raw_bytes: + Raw bytes. + :param needle: + The byte to count. Default \000. + :returns: + The number of leading needle bytes. + ''' + leading = 0 + # Indexing keeps compatibility between Python 2.x and Python 3.x + _byte = needle[0] + for x in raw_bytes: + if x == _byte: + leading += 1 + else: + break + return leading + + +def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): + ''' + Convert an unsigned integer to bytes (base-256 representation):: + + Does not preserve leading zeros if you don't specify a chunk size or + fill size. + + .. NOTE: + You must not specify both fill_size and chunk_size. Only one + of them is allowed. + + :param number: + Integer value + :param fill_size: + If the optional fill size is given the length of the resulting + byte string is expected to be the fill size and will be padded + with prefix zero bytes to satisfy that length. + :param chunk_size: + If optional chunk size is given and greater than zero, pad the front of + the byte string with binary zeros so that the length is a multiple of + ``chunk_size``. + :param overflow: + ``False`` (default). If this is ``True``, no ``OverflowError`` + will be raised when the fill_size is shorter than the length + of the generated byte sequence. Instead the byte sequence will + be returned as is. + :returns: + Raw bytes (base-256 representation). + :raises: + ``OverflowError`` when fill_size is given and the number takes up more + bytes than fit into the block. This requires the ``overflow`` + argument to this function to be set to ``False`` otherwise, no + error will be raised. + ''' + if number < 0: + raise ValueError("Number must be an unsigned integer: %d" % number) + + if fill_size and chunk_size: + raise ValueError("You can either fill or pad chunks, but not both") + + # Ensure these are integers. + number & 1 + + raw_bytes = b('') + + # Pack the integer one machine word at a time into bytes. + num = number + word_bits, _, max_uint, pack_type = get_word_alignment(num) + pack_format = ">%s" % pack_type + while num > 0: + raw_bytes = pack(pack_format, num & max_uint) + raw_bytes + num >>= word_bits + # Obtain the index of the first non-zero byte. + zero_leading = bytes_leading(raw_bytes) + if number == 0: + raw_bytes = ZERO_BYTE + # De-padding. + raw_bytes = raw_bytes[zero_leading:] + + length = len(raw_bytes) + if fill_size and fill_size > 0: + if not overflow and length > fill_size: + raise OverflowError( + "Need %d bytes for number, but fill size is %d" % + (length, fill_size) + ) + raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE) + elif chunk_size and chunk_size > 0: + remainder = length % chunk_size + if remainder: + padding_size = chunk_size - remainder + raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE) + return raw_bytes + + +if __name__ == '__main__': + import doctest + doctest.testmod() + diff --git a/server/www/packages/packages-common/rsa/util.py b/server/www/packages/packages-common/rsa/util.py new file mode 100644 index 0000000..5bbb70b --- /dev/null +++ b/server/www/packages/packages-common/rsa/util.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Utility functions.''' + +from __future__ import with_statement, print_function + +import sys +from optparse import OptionParser + +import rsa.key + +def private_to_public(): + '''Reads a private key and outputs the corresponding public key.''' + + # Parse the CLI options + parser = OptionParser(usage='usage: %prog [options]', + description='Reads a private key and outputs the ' + 'corresponding public key. Both private and public keys use ' + 'the format described in PKCS#1 v1.5') + + parser.add_option('-i', '--input', dest='infilename', type='string', + help='Input filename. Reads from stdin if not specified') + parser.add_option('-o', '--output', dest='outfilename', type='string', + help='Output filename. Writes to stdout of not specified') + + parser.add_option('--inform', dest='inform', + help='key format of input - default PEM', + choices=('PEM', 'DER'), default='PEM') + + parser.add_option('--outform', dest='outform', + help='key format of output - default PEM', + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv) + + # Read the input data + if cli.infilename: + print('Reading private key from %s in %s format' % \ + (cli.infilename, cli.inform), file=sys.stderr) + with open(cli.infilename, 'rb') as infile: + in_data = infile.read() + else: + print('Reading private key from stdin in %s format' % cli.inform, + file=sys.stderr) + in_data = sys.stdin.read().encode('ascii') + + assert type(in_data) == bytes, type(in_data) + + + # Take the public fields and create a public key + priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform) + pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e) + + # Save to the output file + out_data = pub_key.save_pkcs1(cli.outform) + + if cli.outfilename: + print('Writing public key to %s in %s format' % \ + (cli.outfilename, cli.outform), file=sys.stderr) + with open(cli.outfilename, 'wb') as outfile: + outfile.write(out_data) + else: + print('Writing public key to stdout in %s format' % cli.outform, + file=sys.stderr) + sys.stdout.write(out_data.decode('ascii')) + + diff --git a/server/www/packages/packages-common/rsa/varblock.py b/server/www/packages/packages-common/rsa/varblock.py new file mode 100644 index 0000000..c7d96ae --- /dev/null +++ b/server/www/packages/packages-common/rsa/varblock.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. St眉vel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''VARBLOCK file support + +The VARBLOCK file format is as follows, where || denotes byte concatenation: + + FILE := VERSION || BLOCK || BLOCK ... + + BLOCK := LENGTH || DATA + + LENGTH := varint-encoded length of the subsequent data. Varint comes from + Google Protobuf, and encodes an integer into a variable number of bytes. + Each byte uses the 7 lowest bits to encode the value. The highest bit set + to 1 indicates the next byte is also part of the varint. The last byte will + have this bit set to 0. + +This file format is called the VARBLOCK format, in line with the varint format +used to denote the block sizes. + +''' + +from rsa._compat import byte, b + + +ZERO_BYTE = b('\x00') +VARBLOCK_VERSION = 1 + +def read_varint(infile): + '''Reads a varint from the file. + + When the first byte to be read indicates EOF, (0, 0) is returned. When an + EOF occurs when at least one byte has been read, an EOFError exception is + raised. + + @param infile: the file-like object to read from. It should have a read() + method. + @returns (varint, length), the read varint and the number of read bytes. + ''' + + varint = 0 + read_bytes = 0 + + while True: + char = infile.read(1) + if len(char) == 0: + if read_bytes == 0: + return (0, 0) + raise EOFError('EOF while reading varint, value is %i so far' % + varint) + + byte = ord(char) + varint += (byte & 0x7F) << (7 * read_bytes) + + read_bytes += 1 + + if not byte & 0x80: + return (varint, read_bytes) + + +def write_varint(outfile, value): + '''Writes a varint to a file. + + @param outfile: the file-like object to write to. It should have a write() + method. + @returns the number of written bytes. + ''' + + # there is a big difference between 'write the value 0' (this case) and + # 'there is nothing left to write' (the false-case of the while loop) + + if value == 0: + outfile.write(ZERO_BYTE) + return 1 + + written_bytes = 0 + while value > 0: + to_write = value & 0x7f + value = value >> 7 + + if value > 0: + to_write |= 0x80 + + outfile.write(byte(to_write)) + written_bytes += 1 + + return written_bytes + + +def yield_varblocks(infile): + '''Generator, yields each block in the input file. + + @param infile: file to read, is expected to have the VARBLOCK format as + described in the module's docstring. + @yields the contents of each block. + ''' + + # Check the version number + first_char = infile.read(1) + if len(first_char) == 0: + raise EOFError('Unable to read VARBLOCK version number') + + version = ord(first_char) + if version != VARBLOCK_VERSION: + raise ValueError('VARBLOCK version %i not supported' % version) + + while True: + (block_size, read_bytes) = read_varint(infile) + + # EOF at block boundary, that's fine. + if read_bytes == 0 and block_size == 0: + break + + block = infile.read(block_size) + + read_size = len(block) + if read_size != block_size: + raise EOFError('Block size is %i, but could read only %i bytes' % + (block_size, read_size)) + + yield block + + +def yield_fixedblocks(infile, blocksize): + '''Generator, yields each block of ``blocksize`` bytes in the input file. + + :param infile: file to read and separate in blocks. + :returns: a generator that yields the contents of each block + ''' + + while True: + block = infile.read(blocksize) + + read_bytes = len(block) + if read_bytes == 0: + break + + yield block + + if read_bytes < blocksize: + break + diff --git a/server/www/packages/packages-common/six.py b/server/www/packages/packages-common/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/server/www/packages/packages-common/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/server/www/packages/packages-common/tornado/__init__.py b/server/www/packages/packages-common/tornado/__init__.py new file mode 100644 index 0000000..85bacc7 --- /dev/null +++ b/server/www/packages/packages-common/tornado/__init__.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The Tornado web server and tools.""" + +from __future__ import absolute_import, division, print_function, with_statement + +# version is a human-readable version number. + +# version_info is a four-tuple for programmatic comparison. The first +# three numbers are the components of the version number. The fourth +# is zero for an official release, positive for a development branch, +# or negative for a release candidate or beta (after the base version +# number has been incremented) +version = "4.3" +version_info = (4, 3, 0, 0) diff --git a/server/www/packages/packages-common/tornado/_locale_data.py b/server/www/packages/packages-common/tornado/_locale_data.py new file mode 100644 index 0000000..47c1df6 --- /dev/null +++ b/server/www/packages/packages-common/tornado/_locale_data.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# coding: utf-8 +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Data used by the tornado.locale module.""" + +from __future__ import absolute_import, division, print_function, with_statement + +# NOTE: This file is supposed to contain unicode strings, which is +# exactly what you'd get with e.g. u"Espa帽ol" in most python versions. +# However, Python 3.2 doesn't support the u"" syntax, so we use a u() +# function instead. tornado.util.u cannot be used because it doesn't +# support non-ascii characters on python 2. +# When we drop support for Python 3.2, we can remove the parens +# and make these plain unicode strings. +from tornado.escape import to_unicode as u + +LOCALE_NAMES = { + "af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")}, + "am_ET": {"name_en": u("Amharic"), "name": u("釆犪垱釄姏")}, + "ar_AR": {"name_en": u("Arabic"), "name": u("丕賱毓乇亘賷丞")}, + "bg_BG": {"name_en": u("Bulgarian"), "name": u("袘褗谢谐邪褉褋泻懈")}, + "bn_IN": {"name_en": u("Bengali"), "name": u("唳唳傕Σ唳")}, + "bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")}, + "ca_ES": {"name_en": u("Catalan"), "name": u("Catal脿")}, + "cs_CZ": {"name_en": u("Czech"), "name": u("膶e拧tina")}, + "cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")}, + "da_DK": {"name_en": u("Danish"), "name": u("Dansk")}, + "de_DE": {"name_en": u("German"), "name": u("Deutsch")}, + "el_GR": {"name_en": u("Greek"), "name": u("螘位位畏谓喂魏维")}, + "en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")}, + "en_US": {"name_en": u("English (US)"), "name": u("English (US)")}, + "es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Espa帽ol (Espa帽a)")}, + "es_LA": {"name_en": u("Spanish"), "name": u("Espa帽ol")}, + "et_EE": {"name_en": u("Estonian"), "name": u("Eesti")}, + "eu_ES": {"name_en": u("Basque"), "name": u("Euskara")}, + "fa_IR": {"name_en": u("Persian"), "name": u("賮丕乇爻蹖")}, + "fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")}, + "fr_CA": {"name_en": u("French (Canada)"), "name": u("Fran莽ais (Canada)")}, + "fr_FR": {"name_en": u("French"), "name": u("Fran莽ais")}, + "ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")}, + "gl_ES": {"name_en": u("Galician"), "name": u("Galego")}, + "he_IL": {"name_en": u("Hebrew"), "name": u("注讘专讬转")}, + "hi_IN": {"name_en": u("Hindi"), "name": u("啶灌た啶ㄠ啶︵")}, + "hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")}, + "hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")}, + "id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")}, + "is_IS": {"name_en": u("Icelandic"), "name": u("脥slenska")}, + "it_IT": {"name_en": u("Italian"), "name": u("Italiano")}, + "ja_JP": {"name_en": u("Japanese"), "name": u("鏃ユ湰瑾")}, + "ko_KR": {"name_en": u("Korean"), "name": u("頃滉淡鞏")}, + "lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvi懦")}, + "lv_LV": {"name_en": u("Latvian"), "name": u("Latvie拧u")}, + "mk_MK": {"name_en": u("Macedonian"), "name": u("袦邪泻械写芯薪褋泻懈")}, + "ml_IN": {"name_en": u("Malayalam"), "name": u("啻床啻淳啻赤磦")}, + "ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")}, + "nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokm氓l)")}, + "nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")}, + "nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")}, + "pa_IN": {"name_en": u("Punjabi"), "name": u("啜┌啜溹ň啜﹢")}, + "pl_PL": {"name_en": u("Polish"), "name": u("Polski")}, + "pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Portugu锚s (Brasil)")}, + "pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Portugu锚s (Portugal)")}, + "ro_RO": {"name_en": u("Romanian"), "name": u("Rom芒n膬")}, + "ru_RU": {"name_en": u("Russian"), "name": u("袪褍褋褋泻懈泄")}, + "sk_SK": {"name_en": u("Slovak"), "name": u("Sloven膷ina")}, + "sl_SI": {"name_en": u("Slovenian"), "name": u("Sloven拧膷ina")}, + "sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")}, + "sr_RS": {"name_en": u("Serbian"), "name": u("小褉锌褋泻懈")}, + "sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")}, + "sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")}, + "ta_IN": {"name_en": u("Tamil"), "name": u("喈む喈苦喁")}, + "te_IN": {"name_en": u("Telugu"), "name": u("喟む眴喟侧眮喟椸眮")}, + "th_TH": {"name_en": u("Thai"), "name": u("喔犩覆喔┼覆喙勦笚喔")}, + "tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")}, + "tr_TR": {"name_en": u("Turkish"), "name": u("T眉rk莽e")}, + "uk_UA": {"name_en": u("Ukraini "), "name": u("校泻褉邪褩薪褋褜泻邪")}, + "vi_VN": {"name_en": u("Vietnamese"), "name": u("Ti岷縩g Vi峄噒")}, + "zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("涓枃(绠浣)")}, + "zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("涓枃(绻侀珨)")}, +} diff --git a/server/www/packages/packages-common/tornado/auth.py b/server/www/packages/packages-common/tornado/auth.py new file mode 100644 index 0000000..ff7172a --- /dev/null +++ b/server/www/packages/packages-common/tornado/auth.py @@ -0,0 +1,1139 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""This module contains implementations of various third-party +authentication schemes. + +All the classes in this file are class mixins designed to be used with +the `tornado.web.RequestHandler` class. They are used in two ways: + +* On a login handler, use methods such as ``authenticate_redirect()``, + ``authorize_redirect()``, and ``get_authenticated_user()`` to + establish the user's identity and store authentication tokens to your + database and/or cookies. +* In non-login handlers, use methods such as ``facebook_request()`` + or ``twitter_request()`` to use the authentication tokens to make + requests to the respective services. + +They all take slightly different arguments due to the fact all these +services implement authentication and authorization slightly differently. +See the individual service classes below for complete documentation. + +Example usage for Google OAuth: + +.. testcode:: + + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument('code', False): + user = yield self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + # Save the user with e.g. set_secure_cookie + else: + yield self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + +.. testoutput:: + :hide: + + +.. versionchanged:: 4.0 + All of the callback interfaces in this module are now guaranteed + to run their callback with an argument of ``None`` on error. + Previously some functions would do this while others would simply + terminate the request on their own. This change also ensures that + errors are more consistently reported through the ``Future`` interfaces. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import base64 +import binascii +import functools +import hashlib +import hmac +import time +import uuid + +from tornado.concurrent import TracebackFuture, return_future, chain_future +from tornado import gen +from tornado import httpclient +from tornado import escape +from tornado.httputil import url_concat +from tornado.log import gen_log +from tornado.stack_context import ExceptionStackContext +from tornado.util import u, unicode_type, ArgReplacer + +try: + import urlparse # py2 +except ImportError: + import urllib.parse as urlparse # py3 + +try: + import urllib.parse as urllib_parse # py3 +except ImportError: + import urllib as urllib_parse # py2 + +try: + long # py2 +except NameError: + long = int # py3 + + +class AuthError(Exception): + pass + + +def _auth_future_to_callback(callback, future): + try: + result = future.result() + except AuthError as e: + gen_log.warning(str(e)) + result = None + callback(result) + + +def _auth_return_future(f): + """Similar to tornado.concurrent.return_future, but uses the auth + module's legacy callback interface. + + Note that when using this decorator the ``callback`` parameter + inside the function will actually be a future. + """ + replacer = ArgReplacer(f, 'callback') + + @functools.wraps(f) + def wrapper(*args, **kwargs): + future = TracebackFuture() + callback, args, kwargs = replacer.replace(future, args, kwargs) + if callback is not None: + future.add_done_callback( + functools.partial(_auth_future_to_callback, callback)) + + def handle_exception(typ, value, tb): + if future.done(): + return False + else: + future.set_exc_info((typ, value, tb)) + return True + with ExceptionStackContext(handle_exception): + f(*args, **kwargs) + return future + return wrapper + + +class OpenIdMixin(object): + """Abstract implementation of OpenID and Attribute Exchange. + + Class attributes: + + * ``_OPENID_ENDPOINT``: the identity provider's URI. + """ + @return_future + def authenticate_redirect(self, callback_uri=None, + ax_attrs=["name", "email", "language", "username"], + callback=None): + """Redirects to the authentication URL for this service. + + After authentication, the service will redirect back to the given + callback URI with additional parameters including ``openid.mode``. + + We request the given attributes for the authenticated user by + default (name, email, language, and username). If you don't need + all those attributes for your app, you can request fewer with + the ax_attrs keyword argument. + + .. versionchanged:: 3.1 + Returns a `.Future` and takes an optional callback. These are + not strictly necessary as this method is synchronous, + but they are supplied for consistency with + `OAuthMixin.authorize_redirect`. + """ + callback_uri = callback_uri or self.request.uri + args = self._openid_args(callback_uri, ax_attrs=ax_attrs) + self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args)) + callback() + + @_auth_return_future + def get_authenticated_user(self, callback, http_client=None): + """Fetches the authenticated user data upon redirect. + + This method should be called by the handler that receives the + redirect from the `authenticate_redirect()` method (which is + often the same as the one that calls it; in that case you would + call `get_authenticated_user` if the ``openid.mode`` parameter + is present and `authenticate_redirect` if it is not). + + The result of this method will generally be used to set a cookie. + """ + # Verify the OpenID response via direct request to the OP + args = dict((k, v[-1]) for k, v in self.request.arguments.items()) + args["openid.mode"] = u("check_authentication") + url = self._OPENID_ENDPOINT + if http_client is None: + http_client = self.get_auth_http_client() + http_client.fetch(url, functools.partial( + self._on_authentication_verified, callback), + method="POST", body=urllib_parse.urlencode(args)) + + def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None): + url = urlparse.urljoin(self.request.full_url(), callback_uri) + args = { + "openid.ns": "http://specs.openid.net/auth/2.0", + "openid.claimed_id": + "http://specs.openid.net/auth/2.0/identifier_select", + "openid.identity": + "http://specs.openid.net/auth/2.0/identifier_select", + "openid.return_to": url, + "openid.realm": urlparse.urljoin(url, '/'), + "openid.mode": "checkid_setup", + } + if ax_attrs: + args.update({ + "openid.ns.ax": "http://openid.net/srv/ax/1.0", + "openid.ax.mode": "fetch_request", + }) + ax_attrs = set(ax_attrs) + required = [] + if "name" in ax_attrs: + ax_attrs -= set(["name", "firstname", "fullname", "lastname"]) + required += ["firstname", "fullname", "lastname"] + args.update({ + "openid.ax.type.firstname": + "http://axschema.org/namePerson/first", + "openid.ax.type.fullname": + "http://axschema.org/namePerson", + "openid.ax.type.lastname": + "http://axschema.org/namePerson/last", + }) + known_attrs = { + "email": "http://axschema.org/contact/email", + "language": "http://axschema.org/pref/language", + "username": "http://axschema.org/namePerson/friendly", + } + for name in ax_attrs: + args["openid.ax.type." + name] = known_attrs[name] + required.append(name) + args["openid.ax.required"] = ",".join(required) + if oauth_scope: + args.update({ + "openid.ns.oauth": + "http://specs.openid.net/extensions/oauth/1.0", + "openid.oauth.consumer": self.request.host.split(":")[0], + "openid.oauth.scope": oauth_scope, + }) + return args + + def _on_authentication_verified(self, future, response): + if response.error or b"is_valid:true" not in response.body: + future.set_exception(AuthError( + "Invalid OpenID response: %s" % (response.error or + response.body))) + return + + # Make sure we got back at least an email from attribute exchange + ax_ns = None + for name in self.request.arguments: + if name.startswith("openid.ns.") and \ + self.get_argument(name) == u("http://openid.net/srv/ax/1.0"): + ax_ns = name[10:] + break + + def get_ax_arg(uri): + if not ax_ns: + return u("") + prefix = "openid." + ax_ns + ".type." + ax_name = None + for name in self.request.arguments.keys(): + if self.get_argument(name) == uri and name.startswith(prefix): + part = name[len(prefix):] + ax_name = "openid." + ax_ns + ".value." + part + break + if not ax_name: + return u("") + return self.get_argument(ax_name, u("")) + + email = get_ax_arg("http://axschema.org/contact/email") + name = get_ax_arg("http://axschema.org/namePerson") + first_name = get_ax_arg("http://axschema.org/namePerson/first") + last_name = get_ax_arg("http://axschema.org/namePerson/last") + username = get_ax_arg("http://axschema.org/namePerson/friendly") + locale = get_ax_arg("http://axschema.org/pref/language").lower() + user = dict() + name_parts = [] + if first_name: + user["first_name"] = first_name + name_parts.append(first_name) + if last_name: + user["last_name"] = last_name + name_parts.append(last_name) + if name: + user["name"] = name + elif name_parts: + user["name"] = u(" ").join(name_parts) + elif email: + user["name"] = email.split("@")[0] + if email: + user["email"] = email + if locale: + user["locale"] = locale + if username: + user["username"] = username + claimed_id = self.get_argument("openid.claimed_id", None) + if claimed_id: + user["claimed_id"] = claimed_id + future.set_result(user) + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + +class OAuthMixin(object): + """Abstract implementation of OAuth 1.0 and 1.0a. + + See `TwitterMixin` below for an example implementation. + + Class attributes: + + * ``_OAUTH_AUTHORIZE_URL``: The service's OAuth authorization url. + * ``_OAUTH_ACCESS_TOKEN_URL``: The service's OAuth access token url. + * ``_OAUTH_VERSION``: May be either "1.0" or "1.0a". + * ``_OAUTH_NO_CALLBACKS``: Set this to True if the service requires + advance registration of callbacks. + + Subclasses must also override the `_oauth_get_user_future` and + `_oauth_consumer_token` methods. + """ + @return_future + def authorize_redirect(self, callback_uri=None, extra_params=None, + http_client=None, callback=None): + """Redirects the user to obtain OAuth authorization for this service. + + The ``callback_uri`` may be omitted if you have previously + registered a callback URI with the third-party service. For + some services (including Friendfeed), you must use a + previously-registered callback URI and cannot specify a + callback via this method. + + This method sets a cookie called ``_oauth_request_token`` which is + subsequently used (and cleared) in `get_authenticated_user` for + security purposes. + + Note that this method is asynchronous, although it calls + `.RequestHandler.finish` for you so it may not be necessary + to pass a callback or use the `.Future` it returns. However, + if this method is called from a function decorated with + `.gen.coroutine`, you must call it with ``yield`` to keep the + response from being closed prematurely. + + .. versionchanged:: 3.1 + Now returns a `.Future` and takes an optional callback, for + compatibility with `.gen.coroutine`. + """ + if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False): + raise Exception("This service does not support oauth_callback") + if http_client is None: + http_client = self.get_auth_http_client() + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + http_client.fetch( + self._oauth_request_token_url(callback_uri=callback_uri, + extra_params=extra_params), + functools.partial( + self._on_request_token, + self._OAUTH_AUTHORIZE_URL, + callback_uri, + callback)) + else: + http_client.fetch( + self._oauth_request_token_url(), + functools.partial( + self._on_request_token, self._OAUTH_AUTHORIZE_URL, + callback_uri, + callback)) + + @_auth_return_future + def get_authenticated_user(self, callback, http_client=None): + """Gets the OAuth authorized user and access token. + + This method should be called from the handler for your + OAuth callback URL to complete the registration process. We run the + callback with the authenticated user dictionary. This dictionary + will contain an ``access_key`` which can be used to make authorized + requests to this service on behalf of the user. The dictionary will + also contain other fields such as ``name``, depending on the service + used. + """ + future = callback + request_key = escape.utf8(self.get_argument("oauth_token")) + oauth_verifier = self.get_argument("oauth_verifier", None) + request_cookie = self.get_cookie("_oauth_request_token") + if not request_cookie: + future.set_exception(AuthError( + "Missing OAuth request token cookie")) + return + self.clear_cookie("_oauth_request_token") + cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")] + if cookie_key != request_key: + future.set_exception(AuthError( + "Request token does not match cookie")) + return + token = dict(key=cookie_key, secret=cookie_secret) + if oauth_verifier: + token["verifier"] = oauth_verifier + if http_client is None: + http_client = self.get_auth_http_client() + http_client.fetch(self._oauth_access_token_url(token), + functools.partial(self._on_access_token, callback)) + + def _oauth_request_token_url(self, callback_uri=None, extra_params=None): + consumer_token = self._oauth_consumer_token() + url = self._OAUTH_REQUEST_TOKEN_URL + args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + if callback_uri == "oob": + args["oauth_callback"] = "oob" + elif callback_uri: + args["oauth_callback"] = urlparse.urljoin( + self.request.full_url(), callback_uri) + if extra_params: + args.update(extra_params) + signature = _oauth10a_signature(consumer_token, "GET", url, args) + else: + signature = _oauth_signature(consumer_token, "GET", url, args) + + args["oauth_signature"] = signature + return url + "?" + urllib_parse.urlencode(args) + + def _on_request_token(self, authorize_url, callback_uri, callback, + response): + if response.error: + raise Exception("Could not get request token: %s" % response.error) + request_token = _oauth_parse_response(response.body) + data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" + + base64.b64encode(escape.utf8(request_token["secret"]))) + self.set_cookie("_oauth_request_token", data) + args = dict(oauth_token=request_token["key"]) + if callback_uri == "oob": + self.finish(authorize_url + "?" + urllib_parse.urlencode(args)) + callback() + return + elif callback_uri: + args["oauth_callback"] = urlparse.urljoin( + self.request.full_url(), callback_uri) + self.redirect(authorize_url + "?" + urllib_parse.urlencode(args)) + callback() + + def _oauth_access_token_url(self, request_token): + consumer_token = self._oauth_consumer_token() + url = self._OAUTH_ACCESS_TOKEN_URL + args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_token=escape.to_basestring(request_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + if "verifier" in request_token: + args["oauth_verifier"] = request_token["verifier"] + + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + signature = _oauth10a_signature(consumer_token, "GET", url, args, + request_token) + else: + signature = _oauth_signature(consumer_token, "GET", url, args, + request_token) + + args["oauth_signature"] = signature + return url + "?" + urllib_parse.urlencode(args) + + def _on_access_token(self, future, response): + if response.error: + future.set_exception(AuthError("Could not fetch access token")) + return + + access_token = _oauth_parse_response(response.body) + self._oauth_get_user_future(access_token).add_done_callback( + functools.partial(self._on_oauth_get_user, access_token, future)) + + def _oauth_consumer_token(self): + """Subclasses must override this to return their OAuth consumer keys. + + The return value should be a `dict` with keys ``key`` and ``secret``. + """ + raise NotImplementedError() + + @return_future + def _oauth_get_user_future(self, access_token, callback): + """Subclasses must override this to get basic information about the + user. + + Should return a `.Future` whose result is a dictionary + containing information about the user, which may have been + retrieved by using ``access_token`` to make a request to the + service. + + The access token will be added to the returned dictionary to make + the result of `get_authenticated_user`. + + For backwards compatibility, the callback-based ``_oauth_get_user`` + method is also supported. + """ + # By default, call the old-style _oauth_get_user, but new code + # should override this method instead. + self._oauth_get_user(access_token, callback) + + def _oauth_get_user(self, access_token, callback): + raise NotImplementedError() + + def _on_oauth_get_user(self, access_token, future, user_future): + if user_future.exception() is not None: + future.set_exception(user_future.exception()) + return + user = user_future.result() + if not user: + future.set_exception(AuthError("Error getting user")) + return + user["access_token"] = access_token + future.set_result(user) + + def _oauth_request_parameters(self, url, access_token, parameters={}, + method="GET"): + """Returns the OAuth parameters as a dict for the given request. + + parameters should include all POST arguments and query string arguments + that will be sent with the request. + """ + consumer_token = self._oauth_consumer_token() + base_args = dict( + oauth_consumer_key=escape.to_basestring(consumer_token["key"]), + oauth_token=escape.to_basestring(access_token["key"]), + oauth_signature_method="HMAC-SHA1", + oauth_timestamp=str(int(time.time())), + oauth_nonce=escape.to_basestring(binascii.b2a_hex(uuid.uuid4().bytes)), + oauth_version="1.0", + ) + args = {} + args.update(base_args) + args.update(parameters) + if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": + signature = _oauth10a_signature(consumer_token, method, url, args, + access_token) + else: + signature = _oauth_signature(consumer_token, method, url, args, + access_token) + base_args["oauth_signature"] = escape.to_basestring(signature) + return base_args + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + +class OAuth2Mixin(object): + """Abstract implementation of OAuth 2.0. + + See `FacebookGraphMixin` or `GoogleOAuth2Mixin` below for example + implementations. + + Class attributes: + + * ``_OAUTH_AUTHORIZE_URL``: The service's authorization url. + * ``_OAUTH_ACCESS_TOKEN_URL``: The service's access token url. + """ + @return_future + def authorize_redirect(self, redirect_uri=None, client_id=None, + client_secret=None, extra_params=None, + callback=None, scope=None, response_type="code"): + """Redirects the user to obtain OAuth authorization for this service. + + Some providers require that you register a redirect URL with + your application instead of passing one via this method. You + should call this method to log the user in, and then call + ``get_authenticated_user`` in the handler for your + redirect URL to complete the authorization process. + + .. versionchanged:: 3.1 + Returns a `.Future` and takes an optional callback. These are + not strictly necessary as this method is synchronous, + but they are supplied for consistency with + `OAuthMixin.authorize_redirect`. + """ + args = { + "redirect_uri": redirect_uri, + "client_id": client_id, + "response_type": response_type + } + if extra_params: + args.update(extra_params) + if scope: + args['scope'] = ' '.join(scope) + self.redirect( + url_concat(self._OAUTH_AUTHORIZE_URL, args)) + callback() + + def _oauth_request_token_url(self, redirect_uri=None, client_id=None, + client_secret=None, code=None, + extra_params=None): + url = self._OAUTH_ACCESS_TOKEN_URL + args = dict( + redirect_uri=redirect_uri, + code=code, + client_id=client_id, + client_secret=client_secret, + ) + if extra_params: + args.update(extra_params) + return url_concat(url, args) + + @_auth_return_future + def oauth2_request(self, url, callback, access_token=None, + post_args=None, **args): + """Fetches the given URL auth an OAuth2 access token. + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + Example usage: + + ..testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.oauth2_request( + "https://graph.facebook.com/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + .. versionadded:: 4.3 + """ + all_args = {} + if access_token: + all_args["access_token"] = access_token + all_args.update(args) + + if all_args: + url += "?" + urllib_parse.urlencode(all_args) + callback = functools.partial(self._on_oauth2_request, callback) + http = self.get_auth_http_client() + if post_args is not None: + http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), + callback=callback) + else: + http.fetch(url, callback=callback) + + def _on_oauth2_request(self, future, response): + if response.error: + future.set_exception(AuthError("Error response %s fetching %s" % + (response.error, response.request.url))) + return + + future.set_result(escape.json_decode(response.body)) + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + + .. versionadded:: 4.3 + """ + return httpclient.AsyncHTTPClient() + + +class TwitterMixin(OAuthMixin): + """Twitter OAuth authentication. + + To authenticate with Twitter, register your application with + Twitter at http://twitter.com/apps. Then copy your Consumer Key + and Consumer Secret to the application + `~tornado.web.Application.settings` ``twitter_consumer_key`` and + ``twitter_consumer_secret``. Use this mixin on the handler for the + URL you registered as your application's callback URL. + + When your application is set up, you can use this mixin like this + to authenticate the user with Twitter and get access to their stream: + + .. testcode:: + + class TwitterLoginHandler(tornado.web.RequestHandler, + tornado.auth.TwitterMixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument("oauth_token", None): + user = yield self.get_authenticated_user() + # Save the user using e.g. set_secure_cookie() + else: + yield self.authorize_redirect() + + .. testoutput:: + :hide: + + The user object returned by `~OAuthMixin.get_authenticated_user` + includes the attributes ``username``, ``name``, ``access_token``, + and all of the custom Twitter user attributes described at + https://dev.twitter.com/docs/api/1.1/get/users/show + """ + _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token" + _OAUTH_ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token" + _OAUTH_AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize" + _OAUTH_AUTHENTICATE_URL = "https://api.twitter.com/oauth/authenticate" + _OAUTH_NO_CALLBACKS = False + _TWITTER_BASE_URL = "https://api.twitter.com/1.1" + + @return_future + def authenticate_redirect(self, callback_uri=None, callback=None): + """Just like `~OAuthMixin.authorize_redirect`, but + auto-redirects if authorized. + + This is generally the right interface to use if you are using + Twitter for single-sign on. + + .. versionchanged:: 3.1 + Now returns a `.Future` and takes an optional callback, for + compatibility with `.gen.coroutine`. + """ + http = self.get_auth_http_client() + http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), + functools.partial( + self._on_request_token, self._OAUTH_AUTHENTICATE_URL, + None, callback)) + + @_auth_return_future + def twitter_request(self, path, callback=None, access_token=None, + post_args=None, **args): + """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor`` + + The path should not include the format or API version number. + (we automatically use JSON format and API version 1). + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + All the Twitter methods are documented at http://dev.twitter.com/ + + Many methods require an OAuth access token which you can + obtain through `~OAuthMixin.authorize_redirect` and + `~OAuthMixin.get_authenticated_user`. The user returned through that + process includes an 'access_token' attribute that can be used + to make authenticated requests via this method. Example + usage: + + .. testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.TwitterMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.twitter_request( + "/statuses/update", + post_args={"status": "Testing Tornado Web Server"}, + access_token=self.current_user["access_token"]) + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + """ + if path.startswith('http:') or path.startswith('https:'): + # Raw urls are useful for e.g. search which doesn't follow the + # usual pattern: http://search.twitter.com/search.json + url = path + else: + url = self._TWITTER_BASE_URL + path + ".json" + # Add the OAuth resource request signature if we have credentials + if access_token: + all_args = {} + all_args.update(args) + all_args.update(post_args or {}) + method = "POST" if post_args is not None else "GET" + oauth = self._oauth_request_parameters( + url, access_token, all_args, method=method) + args.update(oauth) + if args: + url += "?" + urllib_parse.urlencode(args) + http = self.get_auth_http_client() + http_callback = functools.partial(self._on_twitter_request, callback) + if post_args is not None: + http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), + callback=http_callback) + else: + http.fetch(url, callback=http_callback) + + def _on_twitter_request(self, future, response): + if response.error: + future.set_exception(AuthError( + "Error response %s fetching %s" % (response.error, + response.request.url))) + return + future.set_result(escape.json_decode(response.body)) + + def _oauth_consumer_token(self): + self.require_setting("twitter_consumer_key", "Twitter OAuth") + self.require_setting("twitter_consumer_secret", "Twitter OAuth") + return dict( + key=self.settings["twitter_consumer_key"], + secret=self.settings["twitter_consumer_secret"]) + + @gen.coroutine + def _oauth_get_user_future(self, access_token): + user = yield self.twitter_request( + "/account/verify_credentials", + access_token=access_token) + if user: + user["username"] = user["screen_name"] + raise gen.Return(user) + + +class GoogleOAuth2Mixin(OAuth2Mixin): + """Google authentication using OAuth2. + + In order to use, register your application with Google and copy the + relevant parameters to your application settings. + + * Go to the Google Dev Console at http://console.developers.google.com + * Select a project, or create a new one. + * In the sidebar on the left, select APIs & Auth. + * In the list of APIs, find the Google+ API service and set it to ON. + * In the sidebar on the left, select Credentials. + * In the OAuth section of the page, select Create New Client ID. + * Set the Redirect URI to point to your auth handler + * Copy the "Client secret" and "Client ID" to the application settings as + {"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}} + + .. versionadded:: 3.2 + """ + _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth" + _OAUTH_ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token" + _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" + _OAUTH_NO_CALLBACKS = False + _OAUTH_SETTINGS_KEY = 'google_oauth' + + @_auth_return_future + def get_authenticated_user(self, redirect_uri, code, callback): + """Handles the login for the Google user, returning an access token. + + The result is a dictionary containing an ``access_token`` field + ([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)). + Unlike other ``get_authenticated_user`` methods in this package, + this method does not return any additional information about the user. + The returned access token can be used with `OAuth2Mixin.oauth2_request` + to request additional information (perhaps from + ``https://www.googleapis.com/oauth2/v2/userinfo``) + + Example usage: + + .. testcode:: + + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument('code', False): + access = yield self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + user = yield self.oauth2_request( + "https://www.googleapis.com/oauth2/v1/userinfo", + access_token=access["access_token"]) + # Save the user and access token with + # e.g. set_secure_cookie. + else: + yield self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + + .. testoutput:: + :hide: + + """ + http = self.get_auth_http_client() + body = urllib_parse.urlencode({ + "redirect_uri": redirect_uri, + "code": code, + "client_id": self.settings[self._OAUTH_SETTINGS_KEY]['key'], + "client_secret": self.settings[self._OAUTH_SETTINGS_KEY]['secret'], + "grant_type": "authorization_code", + }) + + http.fetch(self._OAUTH_ACCESS_TOKEN_URL, + functools.partial(self._on_access_token, callback), + method="POST", headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=body) + + def _on_access_token(self, future, response): + """Callback function for the exchange to the access token.""" + if response.error: + future.set_exception(AuthError('Google auth error: %s' % str(response))) + return + + args = escape.json_decode(response.body) + future.set_result(args) + + +class FacebookGraphMixin(OAuth2Mixin): + """Facebook authentication using the new Graph API and OAuth2.""" + _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?" + _OAUTH_AUTHORIZE_URL = "https://www.facebook.com/dialog/oauth?" + _OAUTH_NO_CALLBACKS = False + _FACEBOOK_BASE_URL = "https://graph.facebook.com" + + @_auth_return_future + def get_authenticated_user(self, redirect_uri, client_id, client_secret, + code, callback, extra_fields=None): + """Handles the login for the Facebook user, returning a user object. + + Example usage: + + .. testcode:: + + class FacebookGraphLoginHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.gen.coroutine + def get(self): + if self.get_argument("code", False): + user = yield self.get_authenticated_user( + redirect_uri='/auth/facebookgraph/', + client_id=self.settings["facebook_api_key"], + client_secret=self.settings["facebook_secret"], + code=self.get_argument("code")) + # Save the user with e.g. set_secure_cookie + else: + yield self.authorize_redirect( + redirect_uri='/auth/facebookgraph/', + client_id=self.settings["facebook_api_key"], + extra_params={"scope": "read_stream,offline_access"}) + + .. testoutput:: + :hide: + + """ + http = self.get_auth_http_client() + args = { + "redirect_uri": redirect_uri, + "code": code, + "client_id": client_id, + "client_secret": client_secret, + } + + fields = set(['id', 'name', 'first_name', 'last_name', + 'locale', 'picture', 'link']) + if extra_fields: + fields.update(extra_fields) + + http.fetch(self._oauth_request_token_url(**args), + functools.partial(self._on_access_token, redirect_uri, client_id, + client_secret, callback, fields)) + + def _on_access_token(self, redirect_uri, client_id, client_secret, + future, fields, response): + if response.error: + future.set_exception(AuthError('Facebook auth error: %s' % str(response))) + return + + args = urlparse.parse_qs(escape.native_str(response.body)) + session = { + "access_token": args["access_token"][-1], + "expires": args.get("expires") + } + + self.facebook_request( + path="/me", + callback=functools.partial( + self._on_get_user_info, future, session, fields), + access_token=session["access_token"], + fields=",".join(fields) + ) + + def _on_get_user_info(self, future, session, fields, user): + if user is None: + future.set_result(None) + return + + fieldmap = {} + for field in fields: + fieldmap[field] = user.get(field) + + fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")}) + future.set_result(fieldmap) + + @_auth_return_future + def facebook_request(self, path, callback, access_token=None, + post_args=None, **args): + """Fetches the given relative API path, e.g., "/btaylor/picture" + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + An introduction to the Facebook Graph API can be found at + http://developers.facebook.com/docs/api + + Many methods require an OAuth access token which you can + obtain through `~OAuth2Mixin.authorize_redirect` and + `get_authenticated_user`. The user returned through that + process includes an ``access_token`` attribute that can be + used to make authenticated requests via this method. + + Example usage: + + ..testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.facebook_request( + "/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + The given path is relative to ``self._FACEBOOK_BASE_URL``, + by default "https://graph.facebook.com". + + This method is a wrapper around `OAuth2Mixin.oauth2_request`; + the only difference is that this method takes a relative path, + while ``oauth2_request`` takes a complete url. + + .. versionchanged:: 3.1 + Added the ability to override ``self._FACEBOOK_BASE_URL``. + """ + url = self._FACEBOOK_BASE_URL + path + # Thanks to the _auth_return_future decorator, our "callback" + # argument is a Future, which we cannot pass as a callback to + # oauth2_request. Instead, have oauth2_request return a + # future and chain them together. + oauth_future = self.oauth2_request(url, access_token=access_token, + post_args=post_args, **args) + chain_future(oauth_future, callback) + + +def _oauth_signature(consumer_token, method, url, parameters={}, token=None): + """Calculates the HMAC-SHA1 OAuth signature for the given request. + + See http://oauth.net/core/1.0/#signing_process + """ + parts = urlparse.urlparse(url) + scheme, netloc, path = parts[:3] + normalized_url = scheme.lower() + "://" + netloc.lower() + path + + base_elems = [] + base_elems.append(method.upper()) + base_elems.append(normalized_url) + base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) + for k, v in sorted(parameters.items()))) + base_string = "&".join(_oauth_escape(e) for e in base_elems) + + key_elems = [escape.utf8(consumer_token["secret"])] + key_elems.append(escape.utf8(token["secret"] if token else "")) + key = b"&".join(key_elems) + + hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) + return binascii.b2a_base64(hash.digest())[:-1] + + +def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None): + """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request. + + See http://oauth.net/core/1.0a/#signing_process + """ + parts = urlparse.urlparse(url) + scheme, netloc, path = parts[:3] + normalized_url = scheme.lower() + "://" + netloc.lower() + path + + base_elems = [] + base_elems.append(method.upper()) + base_elems.append(normalized_url) + base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) + for k, v in sorted(parameters.items()))) + + base_string = "&".join(_oauth_escape(e) for e in base_elems) + key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))] + key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else "")) + key = b"&".join(key_elems) + + hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1) + return binascii.b2a_base64(hash.digest())[:-1] + + +def _oauth_escape(val): + if isinstance(val, unicode_type): + val = val.encode("utf-8") + return urllib_parse.quote(val, safe="~") + + +def _oauth_parse_response(body): + # I can't find an officially-defined encoding for oauth responses and + # have never seen anyone use non-ascii. Leave the response in a byte + # string for python 2, and use utf8 on python 3. + body = escape.native_str(body) + p = urlparse.parse_qs(body, keep_blank_values=False) + token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0]) + + # Add the extra parameters the Provider included to the token + special = ("oauth_token", "oauth_token_secret") + token.update((k, p[k][0]) for k in p if k not in special) + return token diff --git a/server/www/packages/packages-common/tornado/autoreload.py b/server/www/packages/packages-common/tornado/autoreload.py new file mode 100644 index 0000000..1cbf26c --- /dev/null +++ b/server/www/packages/packages-common/tornado/autoreload.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Automatically restart the server when a source file is modified. + +Most applications should not access this module directly. Instead, +pass the keyword argument ``autoreload=True`` to the +`tornado.web.Application` constructor (or ``debug=True``, which +enables this setting and several others). This will enable autoreload +mode as well as checking for changes to templates and static +resources. Note that restarting is a destructive operation and any +requests in progress will be aborted when the process restarts. (If +you want to disable autoreload while using other debug-mode features, +pass both ``debug=True`` and ``autoreload=False``). + +This module can also be used as a command-line wrapper around scripts +such as unit test runners. See the `main` method for details. + +The command-line wrapper and Application debug modes can be used together. +This combination is encouraged as the wrapper catches syntax errors and +other import-time failures, while debug mode catches changes once +the server has started. + +This module depends on `.IOLoop`, so it will not work in WSGI applications +and Google App Engine. It also will not work correctly when `.HTTPServer`'s +multi-process mode is used. + +Reloading loses any Python interpreter command-line arguments (e.g. ``-u``) +because it re-executes Python using ``sys.executable`` and ``sys.argv``. +Additionally, modifying these variables will cause reloading to behave +incorrectly. + +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import os +import sys + +# sys.path handling +# ----------------- +# +# If a module is run with "python -m", the current directory (i.e. "") +# is automatically prepended to sys.path, but not if it is run as +# "path/to/file.py". The processing for "-m" rewrites the former to +# the latter, so subsequent executions won't have the same path as the +# original. +# +# Conversely, when run as path/to/file.py, the directory containing +# file.py gets added to the path, which can cause confusion as imports +# may become relative in spite of the future import. +# +# We address the former problem by setting the $PYTHONPATH environment +# variable before re-execution so the new process will see the correct +# path. We attempt to address the latter problem when tornado.autoreload +# is run as __main__, although we can't fix the general case because +# we cannot reliably reconstruct the original command line +# (http://bugs.python.org/issue14208). + +if __name__ == "__main__": + # This sys.path manipulation must come before our imports (as much + # as possible - if we introduced a tornado.sys or tornado.os + # module we'd be in trouble), or else our imports would become + # relative again despite the future import. + # + # There is a separate __main__ block at the end of the file to call main(). + if sys.path[0] == os.path.dirname(__file__): + del sys.path[0] + +import functools +import logging +import os +import pkgutil +import sys +import traceback +import types +import subprocess +import weakref + +from tornado import ioloop +from tornado.log import gen_log +from tornado import process +from tornado.util import exec_in + +try: + import signal +except ImportError: + signal = None + +# os.execv is broken on Windows and can't properly parse command line +# arguments and executable name if they contain whitespaces. subprocess +# fixes that behavior. +# This distinction is also important because when we use execv, we want to +# close the IOLoop and all its file descriptors, to guard against any +# file descriptors that were not set CLOEXEC. When execv is not available, +# we must not close the IOLoop because we want the process to exit cleanly. +_has_execv = sys.platform != 'win32' + +_watched_files = set() +_reload_hooks = [] +_reload_attempted = False +_io_loops = weakref.WeakKeyDictionary() + + +def start(io_loop=None, check_time=500): + """Begins watching source files for changes. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + io_loop = io_loop or ioloop.IOLoop.current() + if io_loop in _io_loops: + return + _io_loops[io_loop] = True + if len(_io_loops) > 1: + gen_log.warning("tornado.autoreload started more than once in the same process") + if _has_execv: + add_reload_hook(functools.partial(io_loop.close, all_fds=True)) + modify_times = {} + callback = functools.partial(_reload_on_update, modify_times) + scheduler = ioloop.PeriodicCallback(callback, check_time, io_loop=io_loop) + scheduler.start() + + +def wait(): + """Wait for a watched file to change, then restart the process. + + Intended to be used at the end of scripts like unit test runners, + to run the tests again after any source file changes (but see also + the command-line interface in `main`) + """ + io_loop = ioloop.IOLoop() + start(io_loop) + io_loop.start() + + +def watch(filename): + """Add a file to the watch list. + + All imported modules are watched by default. + """ + _watched_files.add(filename) + + +def add_reload_hook(fn): + """Add a function to be called before reloading the process. + + Note that for open file and socket handles it is generally + preferable to set the ``FD_CLOEXEC`` flag (using `fcntl` or + ``tornado.platform.auto.set_close_exec``) instead + of using a reload hook to close them. + """ + _reload_hooks.append(fn) + + +def _reload_on_update(modify_times): + if _reload_attempted: + # We already tried to reload and it didn't work, so don't try again. + return + if process.task_id() is not None: + # We're in a child process created by fork_processes. If child + # processes restarted themselves, they'd all restart and then + # all call fork_processes again. + return + for module in list(sys.modules.values()): + # Some modules play games with sys.modules (e.g. email/__init__.py + # in the standard library), and occasionally this can cause strange + # failures in getattr. Just ignore anything that's not an ordinary + # module. + if not isinstance(module, types.ModuleType): + continue + path = getattr(module, "__file__", None) + if not path: + continue + if path.endswith(".pyc") or path.endswith(".pyo"): + path = path[:-1] + _check_file(modify_times, path) + for path in _watched_files: + _check_file(modify_times, path) + + +def _check_file(modify_times, path): + try: + modified = os.stat(path).st_mtime + except Exception: + return + if path not in modify_times: + modify_times[path] = modified + return + if modify_times[path] != modified: + gen_log.info("%s modified; restarting server", path) + _reload() + + +def _reload(): + global _reload_attempted + _reload_attempted = True + for fn in _reload_hooks: + fn() + if hasattr(signal, "setitimer"): + # Clear the alarm signal set by + # ioloop.set_blocking_log_threshold so it doesn't fire + # after the exec. + signal.setitimer(signal.ITIMER_REAL, 0, 0) + # sys.path fixes: see comments at top of file. If sys.path[0] is an empty + # string, we were (probably) invoked with -m and the effective path + # is about to change on re-exec. Add the current directory to $PYTHONPATH + # to ensure that the new process sees the same path we did. + path_prefix = '.' + os.pathsep + if (sys.path[0] == '' and + not os.environ.get("PYTHONPATH", "").startswith(path_prefix)): + os.environ["PYTHONPATH"] = (path_prefix + + os.environ.get("PYTHONPATH", "")) + if not _has_execv: + subprocess.Popen([sys.executable] + sys.argv) + sys.exit(0) + else: + try: + os.execv(sys.executable, [sys.executable] + sys.argv) + except OSError: + # Mac OS X versions prior to 10.6 do not support execv in + # a process that contains multiple threads. Instead of + # re-executing in the current process, start a new one + # and cause the current process to exit. This isn't + # ideal since the new process is detached from the parent + # terminal and thus cannot easily be killed with ctrl-C, + # but it's better than not being able to autoreload at + # all. + # Unfortunately the errno returned in this case does not + # appear to be consistent, so we can't easily check for + # this error specifically. + os.spawnv(os.P_NOWAIT, sys.executable, + [sys.executable] + sys.argv) + # At this point the IOLoop has been closed and finally + # blocks will experience errors if we allow the stack to + # unwind, so just exit uncleanly. + os._exit(0) + +_USAGE = """\ +Usage: + python -m tornado.autoreload -m module.to.run [args...] + python -m tornado.autoreload path/to/script.py [args...] +""" + + +def main(): + """Command-line wrapper to re-run a script whenever its source changes. + + Scripts may be specified by filename or module name:: + + python -m tornado.autoreload -m tornado.test.runtests + python -m tornado.autoreload tornado/test/runtests.py + + Running a script with this wrapper is similar to calling + `tornado.autoreload.wait` at the end of the script, but this wrapper + can catch import-time problems like syntax errors that would otherwise + prevent the script from reaching its call to `wait`. + """ + original_argv = sys.argv + sys.argv = sys.argv[:] + if len(sys.argv) >= 3 and sys.argv[1] == "-m": + mode = "module" + module = sys.argv[2] + del sys.argv[1:3] + elif len(sys.argv) >= 2: + mode = "script" + script = sys.argv[1] + sys.argv = sys.argv[1:] + else: + print(_USAGE, file=sys.stderr) + sys.exit(1) + + try: + if mode == "module": + import runpy + runpy.run_module(module, run_name="__main__", alter_sys=True) + elif mode == "script": + with open(script) as f: + # Execute the script in our namespace instead of creating + # a new one so that something that tries to import __main__ + # (e.g. the unittest module) will see names defined in the + # script instead of just those defined in this module. + global __file__ + __file__ = script + # If __package__ is defined, imports may be incorrectly + # interpreted as relative to this module. + global __package__ + del __package__ + exec_in(f.read(), globals(), globals()) + except SystemExit as e: + logging.basicConfig() + gen_log.info("Script exited with status %s", e.code) + except Exception as e: + logging.basicConfig() + gen_log.warning("Script exited with uncaught exception", exc_info=True) + # If an exception occurred at import time, the file with the error + # never made it into sys.modules and so we won't know to watch it. + # Just to make sure we've covered everything, walk the stack trace + # from the exception and watch every file. + for (filename, lineno, name, line) in traceback.extract_tb(sys.exc_info()[2]): + watch(filename) + if isinstance(e, SyntaxError): + # SyntaxErrors are special: their innermost stack frame is fake + # so extract_tb won't see it and we have to get the filename + # from the exception object. + watch(e.filename) + else: + logging.basicConfig() + gen_log.info("Script exited normally") + # restore sys.argv so subsequent executions will include autoreload + sys.argv = original_argv + + if mode == 'module': + # runpy did a fake import of the module as __main__, but now it's + # no longer in sys.modules. Figure out where it is and watch it. + loader = pkgutil.get_loader(module) + if loader is not None: + watch(loader.get_filename()) + + wait() + + +if __name__ == "__main__": + # See also the other __main__ block at the top of the file, which modifies + # sys.path before our imports + main() diff --git a/server/www/packages/packages-common/tornado/concurrent.py b/server/www/packages/packages-common/tornado/concurrent.py new file mode 100644 index 0000000..5f8cdc4 --- /dev/null +++ b/server/www/packages/packages-common/tornado/concurrent.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Utilities for working with threads and ``Futures``. + +``Futures`` are a pattern for concurrent programming introduced in +Python 3.2 in the `concurrent.futures` package. This package defines +a mostly-compatible `Future` class designed for use from coroutines, +as well as some utility functions for interacting with the +`concurrent.futures` package. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import platform +import textwrap +import traceback +import sys + +from tornado.log import app_log +from tornado.stack_context import ExceptionStackContext, wrap +from tornado.util import raise_exc_info, ArgReplacer + +try: + from concurrent import futures +except ImportError: + futures = None + + +# Can the garbage collector handle cycles that include __del__ methods? +# This is true in cpython beginning with version 3.4 (PEP 442). +_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and + sys.version_info >= (3, 4)) + + +class ReturnValueIgnoredError(Exception): + pass + +# This class and associated code in the future object is derived +# from the Trollius project, a backport of asyncio to Python 2.x - 3.x + + +class _TracebackLogger(object): + """Helper to log a traceback upon destruction if not cleared. + + This solves a nasty problem with Futures and Tasks that have an + exception set: if nobody asks for the exception, the exception is + never logged. This violates the Zen of Python: 'Errors should + never pass silently. Unless explicitly silenced.' + + However, we don't want to log the exception as soon as + set_exception() is called: if the calling code is written + properly, it will get the exception and handle it properly. But + we *do* want to log it if result() or exception() was never called + -- otherwise developers waste a lot of time wondering why their + buggy code fails silently. + + An earlier attempt added a __del__() method to the Future class + itself, but this backfired because the presence of __del__() + prevents garbage collection from breaking cycles. A way out of + this catch-22 is to avoid having a __del__() method on the Future + class itself, but instead to have a reference to a helper object + with a __del__() method that logs the traceback, where we ensure + that the helper object doesn't participate in cycles, and only the + Future has a reference to it. + + The helper object is added when set_exception() is called. When + the Future is collected, and the helper is present, the helper + object is also collected, and its __del__() method will log the + traceback. When the Future's result() or exception() method is + called (and a helper object is present), it removes the the helper + object, after calling its clear() method to prevent it from + logging. + + One downside is that we do a fair amount of work to extract the + traceback from the exception, even when it is never logged. It + would seem cheaper to just store the exception object, but that + references the traceback, which references stack frames, which may + reference the Future, which references the _TracebackLogger, and + then the _TracebackLogger would be included in a cycle, which is + what we're trying to avoid! As an optimization, we don't + immediately format the exception; we only do the work when + activate() is called, which call is delayed until after all the + Future's callbacks have run. Since usually a Future has at least + one callback (typically set by 'yield From') and usually that + callback extracts the callback, thereby removing the need to + format the exception. + + PS. I don't claim credit for this solution. I first heard of it + in a discussion about closing files when they are collected. + """ + + __slots__ = ('exc_info', 'formatted_tb') + + def __init__(self, exc_info): + self.exc_info = exc_info + self.formatted_tb = None + + def activate(self): + exc_info = self.exc_info + if exc_info is not None: + self.exc_info = None + self.formatted_tb = traceback.format_exception(*exc_info) + + def clear(self): + self.exc_info = None + self.formatted_tb = None + + def __del__(self): + if self.formatted_tb: + app_log.error('Future exception was never retrieved: %s', + ''.join(self.formatted_tb).rstrip()) + + +class Future(object): + """Placeholder for an asynchronous result. + + A ``Future`` encapsulates the result of an asynchronous + operation. In synchronous applications ``Futures`` are used + to wait for the result from a thread or process pool; in + Tornado they are normally used with `.IOLoop.add_future` or by + yielding them in a `.gen.coroutine`. + + `tornado.concurrent.Future` is similar to + `concurrent.futures.Future`, but not thread-safe (and therefore + faster for use with single-threaded event loops). + + In addition to ``exception`` and ``set_exception``, methods ``exc_info`` + and ``set_exc_info`` are supported to capture tracebacks in Python 2. + The traceback is automatically available in Python 3, but in the + Python 2 futures backport this information is discarded. + This functionality was previously available in a separate class + ``TracebackFuture``, which is now a deprecated alias for this class. + + .. versionchanged:: 4.0 + `tornado.concurrent.Future` is always a thread-unsafe ``Future`` + with support for the ``exc_info`` methods. Previously it would + be an alias for the thread-safe `concurrent.futures.Future` + if that package was available and fall back to the thread-unsafe + implementation if it was not. + + .. versionchanged:: 4.1 + If a `.Future` contains an error but that error is never observed + (by calling ``result()``, ``exception()``, or ``exc_info()``), + a stack trace will be logged when the `.Future` is garbage collected. + This normally indicates an error in the application, but in cases + where it results in undesired logging it may be necessary to + suppress the logging by ensuring that the exception is observed: + ``f.add_done_callback(lambda f: f.exception())``. + """ + def __init__(self): + self._done = False + self._result = None + self._exc_info = None + + self._log_traceback = False # Used for Python >= 3.4 + self._tb_logger = None # Used for Python <= 3.3 + + self._callbacks = [] + + # Implement the Python 3.5 Awaitable protocol if possible + # (we can't use return and yield together until py33). + if sys.version_info >= (3, 3): + exec(textwrap.dedent(""" + def __await__(self): + return (yield self) + """)) + else: + # Py2-compatible version for use with cython. + def __await__(self): + result = yield self + # StopIteration doesn't take args before py33, + # but Cython recognizes the args tuple. + e = StopIteration() + e.args = (result,) + raise e + + def cancel(self): + """Cancel the operation, if possible. + + Tornado ``Futures`` do not support cancellation, so this method always + returns False. + """ + return False + + def cancelled(self): + """Returns True if the operation has been cancelled. + + Tornado ``Futures`` do not support cancellation, so this method + always returns False. + """ + return False + + def running(self): + """Returns True if this operation is currently running.""" + return not self._done + + def done(self): + """Returns True if the future has finished running.""" + return self._done + + def _clear_tb_log(self): + self._log_traceback = False + if self._tb_logger is not None: + self._tb_logger.clear() + self._tb_logger = None + + def result(self, timeout=None): + """If the operation succeeded, return its result. If it failed, + re-raise its exception. + + This method takes a ``timeout`` argument for compatibility with + `concurrent.futures.Future` but it is an error to call it + before the `Future` is done, so the ``timeout`` is never used. + """ + self._clear_tb_log() + if self._result is not None: + return self._result + if self._exc_info is not None: + raise_exc_info(self._exc_info) + self._check_done() + return self._result + + def exception(self, timeout=None): + """If the operation raised an exception, return the `Exception` + object. Otherwise returns None. + + This method takes a ``timeout`` argument for compatibility with + `concurrent.futures.Future` but it is an error to call it + before the `Future` is done, so the ``timeout`` is never used. + """ + self._clear_tb_log() + if self._exc_info is not None: + return self._exc_info[1] + else: + self._check_done() + return None + + def add_done_callback(self, fn): + """Attaches the given callback to the `Future`. + + It will be invoked with the `Future` as its argument when the Future + has finished running and its result is available. In Tornado + consider using `.IOLoop.add_future` instead of calling + `add_done_callback` directly. + """ + if self._done: + fn(self) + else: + self._callbacks.append(fn) + + def set_result(self, result): + """Sets the result of a ``Future``. + + It is undefined to call any of the ``set`` methods more than once + on the same object. + """ + self._result = result + self._set_done() + + def set_exception(self, exception): + """Sets the exception of a ``Future.``""" + self.set_exc_info( + (exception.__class__, + exception, + getattr(exception, '__traceback__', None))) + + def exc_info(self): + """Returns a tuple in the same format as `sys.exc_info` or None. + + .. versionadded:: 4.0 + """ + self._clear_tb_log() + return self._exc_info + + def set_exc_info(self, exc_info): + """Sets the exception information of a ``Future.`` + + Preserves tracebacks on Python 2. + + .. versionadded:: 4.0 + """ + self._exc_info = exc_info + self._log_traceback = True + if not _GC_CYCLE_FINALIZERS: + self._tb_logger = _TracebackLogger(exc_info) + + try: + self._set_done() + finally: + # Activate the logger after all callbacks have had a + # chance to call result() or exception(). + if self._log_traceback and self._tb_logger is not None: + self._tb_logger.activate() + self._exc_info = exc_info + + def _check_done(self): + if not self._done: + raise Exception("DummyFuture does not support blocking for results") + + def _set_done(self): + self._done = True + for cb in self._callbacks: + try: + cb(self) + except Exception: + app_log.exception('Exception in callback %r for %r', + cb, self) + self._callbacks = None + + # On Python 3.3 or older, objects with a destructor part of a reference + # cycle are never destroyed. It's no longer the case on Python 3.4 thanks to + # the PEP 442. + if _GC_CYCLE_FINALIZERS: + def __del__(self): + if not self._log_traceback: + # set_exception() was not called, or result() or exception() + # has consumed the exception + return + + tb = traceback.format_exception(*self._exc_info) + + app_log.error('Future %r exception was never retrieved: %s', + self, ''.join(tb).rstrip()) + +TracebackFuture = Future + +if futures is None: + FUTURES = Future +else: + FUTURES = (futures.Future, Future) + + +def is_future(x): + return isinstance(x, FUTURES) + + +class DummyExecutor(object): + def submit(self, fn, *args, **kwargs): + future = TracebackFuture() + try: + future.set_result(fn(*args, **kwargs)) + except Exception: + future.set_exc_info(sys.exc_info()) + return future + + def shutdown(self, wait=True): + pass + +dummy_executor = DummyExecutor() + + +def run_on_executor(*args, **kwargs): + """Decorator to run a synchronous method asynchronously on an executor. + + The decorated method may be called with a ``callback`` keyword + argument and returns a future. + + The `.IOLoop` and executor to be used are determined by the ``io_loop`` + and ``executor`` attributes of ``self``. To use different attributes, + pass keyword arguments to the decorator:: + + @run_on_executor(executor='_thread_pool') + def foo(self): + pass + + .. versionchanged:: 4.2 + Added keyword arguments to use alternative attributes. + """ + def run_on_executor_decorator(fn): + executor = kwargs.get("executor", "executor") + io_loop = kwargs.get("io_loop", "io_loop") + + @functools.wraps(fn) + def wrapper(self, *args, **kwargs): + callback = kwargs.pop("callback", None) + future = getattr(self, executor).submit(fn, self, *args, **kwargs) + if callback: + getattr(self, io_loop).add_future( + future, lambda future: callback(future.result())) + return future + return wrapper + if args and kwargs: + raise ValueError("cannot combine positional and keyword args") + if len(args) == 1: + return run_on_executor_decorator(args[0]) + elif len(args) != 0: + raise ValueError("expected 1 argument, got %d", len(args)) + return run_on_executor_decorator + + +_NO_RESULT = object() + + +def return_future(f): + """Decorator to make a function that returns via callback return a + `Future`. + + The wrapped function should take a ``callback`` keyword argument + and invoke it with one argument when it has finished. To signal failure, + the function can simply raise an exception (which will be + captured by the `.StackContext` and passed along to the ``Future``). + + From the caller's perspective, the callback argument is optional. + If one is given, it will be invoked when the function is complete + with `Future.result()` as an argument. If the function fails, the + callback will not be run and an exception will be raised into the + surrounding `.StackContext`. + + If no callback is given, the caller should use the ``Future`` to + wait for the function to complete (perhaps by yielding it in a + `.gen.engine` function, or passing it to `.IOLoop.add_future`). + + Usage: + + .. testcode:: + + @return_future + def future_func(arg1, arg2, callback): + # Do stuff (possibly asynchronous) + callback(result) + + @gen.engine + def caller(callback): + yield future_func(arg1, arg2) + callback() + + .. + + Note that ``@return_future`` and ``@gen.engine`` can be applied to the + same function, provided ``@return_future`` appears first. However, + consider using ``@gen.coroutine`` instead of this combination. + """ + replacer = ArgReplacer(f, 'callback') + + @functools.wraps(f) + def wrapper(*args, **kwargs): + future = TracebackFuture() + callback, args, kwargs = replacer.replace( + lambda value=_NO_RESULT: future.set_result(value), + args, kwargs) + + def handle_error(typ, value, tb): + future.set_exc_info((typ, value, tb)) + return True + exc_info = None + with ExceptionStackContext(handle_error): + try: + result = f(*args, **kwargs) + if result is not None: + raise ReturnValueIgnoredError( + "@return_future should not be used with functions " + "that return values") + except: + exc_info = sys.exc_info() + raise + if exc_info is not None: + # If the initial synchronous part of f() raised an exception, + # go ahead and raise it to the caller directly without waiting + # for them to inspect the Future. + future.result() + + # If the caller passed in a callback, schedule it to be called + # when the future resolves. It is important that this happens + # just before we return the future, or else we risk confusing + # stack contexts with multiple exceptions (one here with the + # immediate exception, and again when the future resolves and + # the callback triggers its exception by calling future.result()). + if callback is not None: + def run_callback(future): + result = future.result() + if result is _NO_RESULT: + callback() + else: + callback(future.result()) + future.add_done_callback(wrap(run_callback)) + return future + return wrapper + + +def chain_future(a, b): + """Chain two futures together so that when one completes, so does the other. + + The result (success or failure) of ``a`` will be copied to ``b``, unless + ``b`` has already been completed or cancelled by the time ``a`` finishes. + """ + def copy(future): + assert future is a + if b.done(): + return + if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture) + and a.exc_info() is not None): + b.set_exc_info(a.exc_info()) + elif a.exception() is not None: + b.set_exception(a.exception()) + else: + b.set_result(a.result()) + a.add_done_callback(copy) diff --git a/server/www/packages/packages-common/tornado/curl_httpclient.py b/server/www/packages/packages-common/tornado/curl_httpclient.py new file mode 100644 index 0000000..22f2502 --- /dev/null +++ b/server/www/packages/packages-common/tornado/curl_httpclient.py @@ -0,0 +1,500 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Non-blocking HTTP client implementation using pycurl.""" + +from __future__ import absolute_import, division, print_function, with_statement + +import collections +import functools +import logging +import pycurl +import threading +import time +from io import BytesIO + +from tornado import httputil +from tornado import ioloop +from tornado import stack_context + +from tornado.escape import utf8, native_str +from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main + +curl_log = logging.getLogger('tornado.curl_httpclient') + + +class CurlAsyncHTTPClient(AsyncHTTPClient): + def initialize(self, io_loop, max_clients=10, defaults=None): + super(CurlAsyncHTTPClient, self).initialize(io_loop, defaults=defaults) + self._multi = pycurl.CurlMulti() + self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout) + self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket) + self._curls = [self._curl_create() for i in range(max_clients)] + self._free_list = self._curls[:] + self._requests = collections.deque() + self._fds = {} + self._timeout = None + + # libcurl has bugs that sometimes cause it to not report all + # relevant file descriptors and timeouts to TIMERFUNCTION/ + # SOCKETFUNCTION. Mitigate the effects of such bugs by + # forcing a periodic scan of all active requests. + self._force_timeout_callback = ioloop.PeriodicCallback( + self._handle_force_timeout, 1000, io_loop=io_loop) + self._force_timeout_callback.start() + + # Work around a bug in libcurl 7.29.0: Some fields in the curl + # multi object are initialized lazily, and its destructor will + # segfault if it is destroyed without having been used. Add + # and remove a dummy handle to make sure everything is + # initialized. + dummy_curl_handle = pycurl.Curl() + self._multi.add_handle(dummy_curl_handle) + self._multi.remove_handle(dummy_curl_handle) + + def close(self): + self._force_timeout_callback.stop() + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + for curl in self._curls: + curl.close() + self._multi.close() + super(CurlAsyncHTTPClient, self).close() + + def fetch_impl(self, request, callback): + self._requests.append((request, callback)) + self._process_queue() + self._set_timeout(0) + + def _handle_socket(self, event, fd, multi, data): + """Called by libcurl when it wants to change the file descriptors + it cares about. + """ + event_map = { + pycurl.POLL_NONE: ioloop.IOLoop.NONE, + pycurl.POLL_IN: ioloop.IOLoop.READ, + pycurl.POLL_OUT: ioloop.IOLoop.WRITE, + pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE + } + if event == pycurl.POLL_REMOVE: + if fd in self._fds: + self.io_loop.remove_handler(fd) + del self._fds[fd] + else: + ioloop_event = event_map[event] + # libcurl sometimes closes a socket and then opens a new + # one using the same FD without giving us a POLL_NONE in + # between. This is a problem with the epoll IOLoop, + # because the kernel can tell when a socket is closed and + # removes it from the epoll automatically, causing future + # update_handler calls to fail. Since we can't tell when + # this has happened, always use remove and re-add + # instead of update. + if fd in self._fds: + self.io_loop.remove_handler(fd) + self.io_loop.add_handler(fd, self._handle_events, + ioloop_event) + self._fds[fd] = ioloop_event + + def _set_timeout(self, msecs): + """Called by libcurl to schedule a timeout.""" + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = self.io_loop.add_timeout( + self.io_loop.time() + msecs / 1000.0, self._handle_timeout) + + def _handle_events(self, fd, events): + """Called by IOLoop when there is activity on one of our + file descriptors. + """ + action = 0 + if events & ioloop.IOLoop.READ: + action |= pycurl.CSELECT_IN + if events & ioloop.IOLoop.WRITE: + action |= pycurl.CSELECT_OUT + while True: + try: + ret, num_handles = self._multi.socket_action(fd, action) + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + def _handle_timeout(self): + """Called by IOLoop when the requested timeout has passed.""" + with stack_context.NullContext(): + self._timeout = None + while True: + try: + ret, num_handles = self._multi.socket_action( + pycurl.SOCKET_TIMEOUT, 0) + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + # In theory, we shouldn't have to do this because curl will + # call _set_timeout whenever the timeout changes. However, + # sometimes after _handle_timeout we will need to reschedule + # immediately even though nothing has changed from curl's + # perspective. This is because when socket_action is + # called with SOCKET_TIMEOUT, libcurl decides internally which + # timeouts need to be processed by using a monotonic clock + # (where available) while tornado uses python's time.time() + # to decide when timeouts have occurred. When those clocks + # disagree on elapsed time (as they will whenever there is an + # NTP adjustment), tornado might call _handle_timeout before + # libcurl is ready. After each timeout, resync the scheduled + # timeout with libcurl's current state. + new_timeout = self._multi.timeout() + if new_timeout >= 0: + self._set_timeout(new_timeout) + + def _handle_force_timeout(self): + """Called by IOLoop periodically to ask libcurl to process any + events it may have forgotten about. + """ + with stack_context.NullContext(): + while True: + try: + ret, num_handles = self._multi.socket_all() + except pycurl.error as e: + ret = e.args[0] + if ret != pycurl.E_CALL_MULTI_PERFORM: + break + self._finish_pending_requests() + + def _finish_pending_requests(self): + """Process any requests that were completed by the last + call to multi.socket_action. + """ + while True: + num_q, ok_list, err_list = self._multi.info_read() + for curl in ok_list: + self._finish(curl) + for curl, errnum, errmsg in err_list: + self._finish(curl, errnum, errmsg) + if num_q == 0: + break + self._process_queue() + + def _process_queue(self): + with stack_context.NullContext(): + while True: + started = 0 + while self._free_list and self._requests: + started += 1 + curl = self._free_list.pop() + (request, callback) = self._requests.popleft() + curl.info = { + "headers": httputil.HTTPHeaders(), + "buffer": BytesIO(), + "request": request, + "callback": callback, + "curl_start_time": time.time(), + } + try: + self._curl_setup_request( + curl, request, curl.info["buffer"], + curl.info["headers"]) + except Exception as e: + # If there was an error in setup, pass it on + # to the callback. Note that allowing the + # error to escape here will appear to work + # most of the time since we are still in the + # caller's original stack frame, but when + # _process_queue() is called from + # _finish_pending_requests the exceptions have + # nowhere to go. + callback(HTTPResponse( + request=request, + code=599, + error=e)) + else: + self._multi.add_handle(curl) + + if not started: + break + + def _finish(self, curl, curl_error=None, curl_message=None): + info = curl.info + curl.info = None + self._multi.remove_handle(curl) + self._free_list.append(curl) + buffer = info["buffer"] + if curl_error: + error = CurlError(curl_error, curl_message) + code = error.code + effective_url = None + buffer.close() + buffer = None + else: + error = None + code = curl.getinfo(pycurl.HTTP_CODE) + effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) + buffer.seek(0) + # the various curl timings are documented at + # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html + time_info = dict( + queue=info["curl_start_time"] - info["request"].start_time, + namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME), + connect=curl.getinfo(pycurl.CONNECT_TIME), + pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME), + starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME), + total=curl.getinfo(pycurl.TOTAL_TIME), + redirect=curl.getinfo(pycurl.REDIRECT_TIME), + ) + try: + info["callback"](HTTPResponse( + request=info["request"], code=code, headers=info["headers"], + buffer=buffer, effective_url=effective_url, error=error, + reason=info['headers'].get("X-Http-Reason", None), + request_time=time.time() - info["curl_start_time"], + time_info=time_info)) + except Exception: + self.handle_callback_exception(info["callback"]) + + def handle_callback_exception(self, callback): + self.io_loop.handle_callback_exception(callback) + + def _curl_create(self): + curl = pycurl.Curl() + if curl_log.isEnabledFor(logging.DEBUG): + curl.setopt(pycurl.VERBOSE, 1) + curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug) + return curl + + def _curl_setup_request(self, curl, request, buffer, headers): + curl.setopt(pycurl.URL, native_str(request.url)) + + # libcurl's magic "Expect: 100-continue" behavior causes delays + # with servers that don't support it (which include, among others, + # Google's OpenID endpoint). Additionally, this behavior has + # a bug in conjunction with the curl_multi_socket_action API + # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976), + # which increases the delays. It's more trouble than it's worth, + # so just turn off the feature (yes, setting Expect: to an empty + # value is the official way to disable this) + if "Expect" not in request.headers: + request.headers["Expect"] = "" + + # libcurl adds Pragma: no-cache by default; disable that too + if "Pragma" not in request.headers: + request.headers["Pragma"] = "" + + curl.setopt(pycurl.HTTPHEADER, + ["%s: %s" % (native_str(k), native_str(v)) + for k, v in request.headers.get_all()]) + + curl.setopt(pycurl.HEADERFUNCTION, + functools.partial(self._curl_header_callback, + headers, request.header_callback)) + if request.streaming_callback: + def write_function(chunk): + self.io_loop.add_callback(request.streaming_callback, chunk) + else: + write_function = buffer.write + if bytes is str: # py2 + curl.setopt(pycurl.WRITEFUNCTION, write_function) + else: # py3 + # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes + # a fork/port. That version has a bug in which it passes unicode + # strings instead of bytes to the WRITEFUNCTION. This means that + # if you use a WRITEFUNCTION (which tornado always does), you cannot + # download arbitrary binary data. This needs to be fixed in the + # ported pycurl package, but in the meantime this lambda will + # make it work for downloading (utf8) text. + curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s))) + curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects) + curl.setopt(pycurl.MAXREDIRS, request.max_redirects) + curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout)) + curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout)) + if request.user_agent: + curl.setopt(pycurl.USERAGENT, native_str(request.user_agent)) + else: + curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)") + if request.network_interface: + curl.setopt(pycurl.INTERFACE, request.network_interface) + if request.decompress_response: + curl.setopt(pycurl.ENCODING, "gzip,deflate") + else: + curl.setopt(pycurl.ENCODING, "none") + if request.proxy_host and request.proxy_port: + curl.setopt(pycurl.PROXY, request.proxy_host) + curl.setopt(pycurl.PROXYPORT, request.proxy_port) + if request.proxy_username: + credentials = '%s:%s' % (request.proxy_username, + request.proxy_password) + curl.setopt(pycurl.PROXYUSERPWD, credentials) + else: + curl.setopt(pycurl.PROXY, '') + curl.unsetopt(pycurl.PROXYUSERPWD) + if request.validate_cert: + curl.setopt(pycurl.SSL_VERIFYPEER, 1) + curl.setopt(pycurl.SSL_VERIFYHOST, 2) + else: + curl.setopt(pycurl.SSL_VERIFYPEER, 0) + curl.setopt(pycurl.SSL_VERIFYHOST, 0) + if request.ca_certs is not None: + curl.setopt(pycurl.CAINFO, request.ca_certs) + else: + # There is no way to restore pycurl.CAINFO to its default value + # (Using unsetopt makes it reject all certificates). + # I don't see any way to read the default value from python so it + # can be restored later. We'll have to just leave CAINFO untouched + # if no ca_certs file was specified, and require that if any + # request uses a custom ca_certs file, they all must. + pass + + if request.allow_ipv6 is False: + # Curl behaves reasonably when DNS resolution gives an ipv6 address + # that we can't reach, so allow ipv6 unless the user asks to disable. + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) + else: + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER) + + # Set the request method through curl's irritating interface which makes + # up names for almost every single method + curl_options = { + "GET": pycurl.HTTPGET, + "POST": pycurl.POST, + "PUT": pycurl.UPLOAD, + "HEAD": pycurl.NOBODY, + } + custom_methods = set(["DELETE", "OPTIONS", "PATCH"]) + for o in curl_options.values(): + curl.setopt(o, False) + if request.method in curl_options: + curl.unsetopt(pycurl.CUSTOMREQUEST) + curl.setopt(curl_options[request.method], True) + elif request.allow_nonstandard_methods or request.method in custom_methods: + curl.setopt(pycurl.CUSTOMREQUEST, request.method) + else: + raise KeyError('unknown method ' + request.method) + + body_expected = request.method in ("POST", "PATCH", "PUT") + body_present = request.body is not None + if not request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + if ((body_expected and not body_present) or + (body_present and not body_expected)): + raise ValueError( + 'Body must %sbe None for method %s (unless ' + 'allow_nonstandard_methods is true)' % + ('not ' if body_expected else '', request.method)) + + if body_expected or body_present: + if request.method == "GET": + # Even with `allow_nonstandard_methods` we disallow + # GET with a body (because libcurl doesn't allow it + # unless we use CUSTOMREQUEST). While the spec doesn't + # forbid clients from sending a body, it arguably + # disallows the server from doing anything with them. + raise ValueError('Body must be None for GET request') + request_buffer = BytesIO(utf8(request.body or '')) + + def ioctl(cmd): + if cmd == curl.IOCMD_RESTARTREAD: + request_buffer.seek(0) + curl.setopt(pycurl.READFUNCTION, request_buffer.read) + curl.setopt(pycurl.IOCTLFUNCTION, ioctl) + if request.method == "POST": + curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or '')) + else: + curl.setopt(pycurl.UPLOAD, True) + curl.setopt(pycurl.INFILESIZE, len(request.body or '')) + + if request.auth_username is not None: + userpwd = "%s:%s" % (request.auth_username, request.auth_password or '') + + if request.auth_mode is None or request.auth_mode == "basic": + curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) + elif request.auth_mode == "digest": + curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) + else: + raise ValueError("Unsupported auth_mode %s" % request.auth_mode) + + curl.setopt(pycurl.USERPWD, native_str(userpwd)) + curl_log.debug("%s %s (username: %r)", request.method, request.url, + request.auth_username) + else: + curl.unsetopt(pycurl.USERPWD) + curl_log.debug("%s %s", request.method, request.url) + + if request.client_cert is not None: + curl.setopt(pycurl.SSLCERT, request.client_cert) + + if request.client_key is not None: + curl.setopt(pycurl.SSLKEY, request.client_key) + + if request.ssl_options is not None: + raise ValueError("ssl_options not supported in curl_httpclient") + + if threading.activeCount() > 1: + # libcurl/pycurl is not thread-safe by default. When multiple threads + # are used, signals should be disabled. This has the side effect + # of disabling DNS timeouts in some environments (when libcurl is + # not linked against ares), so we don't do it when there is only one + # thread. Applications that use many short-lived threads may need + # to set NOSIGNAL manually in a prepare_curl_callback since + # there may not be any other threads running at the time we call + # threading.activeCount. + curl.setopt(pycurl.NOSIGNAL, 1) + if request.prepare_curl_callback is not None: + request.prepare_curl_callback(curl) + + def _curl_header_callback(self, headers, header_callback, header_line): + header_line = native_str(header_line) + if header_callback is not None: + self.io_loop.add_callback(header_callback, header_line) + # header_line as returned by curl includes the end-of-line characters. + # whitespace at the start should be preserved to allow multi-line headers + header_line = header_line.rstrip() + if header_line.startswith("HTTP/"): + headers.clear() + try: + (__, __, reason) = httputil.parse_response_start_line(header_line) + header_line = "X-Http-Reason: %s" % reason + except httputil.HTTPInputError: + return + if not header_line: + return + headers.parse_line(header_line) + + def _curl_debug(self, debug_type, debug_msg): + debug_types = ('I', '<', '>', '<', '>') + if debug_type == 0: + curl_log.debug('%s', debug_msg.strip()) + elif debug_type in (1, 2): + for line in debug_msg.splitlines(): + curl_log.debug('%s %s', debug_types[debug_type], line) + elif debug_type == 4: + curl_log.debug('%s %r', debug_types[debug_type], debug_msg) + + +class CurlError(HTTPError): + def __init__(self, errno, message): + HTTPError.__init__(self, 599, message) + self.errno = errno + + +if __name__ == "__main__": + AsyncHTTPClient.configure(CurlAsyncHTTPClient) + main() diff --git a/server/www/packages/packages-common/tornado/escape.py b/server/www/packages/packages-common/tornado/escape.py new file mode 100644 index 0000000..2f04b46 --- /dev/null +++ b/server/www/packages/packages-common/tornado/escape.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Escaping/unescaping methods for HTML, JSON, URLs, and others. + +Also includes a few other miscellaneous string manipulation functions that +have crept in over time. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import re +import sys + +from tornado.util import unicode_type, basestring_type, u + +try: + from urllib.parse import parse_qs as _parse_qs # py3 +except ImportError: + from urlparse import parse_qs as _parse_qs # Python 2.6+ + +try: + import htmlentitydefs # py2 +except ImportError: + import html.entities as htmlentitydefs # py3 + +try: + import urllib.parse as urllib_parse # py3 +except ImportError: + import urllib as urllib_parse # py2 + +import json + +try: + unichr +except NameError: + unichr = chr + +_XHTML_ESCAPE_RE = re.compile('[&<>"\']') +_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"', + '\'': '''} + + +def xhtml_escape(value): + """Escapes a string so it is valid within HTML or XML. + + Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``. + When used in attribute values the escaped strings must be enclosed + in quotes. + + .. versionchanged:: 3.2 + + Added the single quote to the list of escaped characters. + """ + return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], + to_basestring(value)) + + +def xhtml_unescape(value): + """Un-escapes an XML-escaped string.""" + return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value)) + + +# The fact that json_encode wraps json.dumps is an implementation detail. +# Please see https://github.com/tornadoweb/tornado/pull/706 +# before sending a pull request that adds **kwargs to this function. +def json_encode(value): + """JSON-encodes the given Python object.""" + # JSON permits but does not require forward slashes to be escaped. + # This is useful when json data is emitted in a tags from prematurely terminating + # the javascript. Some json libraries do this escaping by default, + # although python's standard library does not, so we do it here. + # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped + return json.dumps(value).replace("?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""")) + + +def linkify(text, shorten=False, extra_params="", + require_protocol=False, permitted_protocols=["http", "https"]): + """Converts plain text into HTML with links. + + For example: ``linkify("Hello http://tornadoweb.org!")`` would return + ``Hello http://tornadoweb.org!`` + + Parameters: + + * ``shorten``: Long urls will be shortened for display. + + * ``extra_params``: Extra text to include in the link tag, or a callable + taking the link as an argument and returning the extra text + e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``, + or:: + + def extra_params_cb(url): + if url.startswith("http://example.com"): + return 'class="internal"' + else: + return 'class="external" rel="nofollow"' + linkify(text, extra_params=extra_params_cb) + + * ``require_protocol``: Only linkify urls which include a protocol. If + this is False, urls such as www.facebook.com will also be linkified. + + * ``permitted_protocols``: List (or set) of protocols which should be + linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp", + "mailto"])``. It is very unsafe to include protocols such as + ``javascript``. + """ + if extra_params and not callable(extra_params): + extra_params = " " + extra_params.strip() + + def make_link(m): + url = m.group(1) + proto = m.group(2) + if require_protocol and not proto: + return url # not protocol, no linkify + + if proto and proto not in permitted_protocols: + return url # bad protocol, no linkify + + href = m.group(1) + if not proto: + href = "http://" + href # no proto specified, use http + + if callable(extra_params): + params = " " + extra_params(href).strip() + else: + params = extra_params + + # clip long urls. max_len is just an approximation + max_len = 30 + if shorten and len(url) > max_len: + before_clip = url + if proto: + proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for : + else: + proto_len = 0 + + parts = url[proto_len:].split("/") + if len(parts) > 1: + # Grab the whole host part plus the first bit of the path + # The path is usually not that interesting once shortened + # (no more slug, etc), so it really just provides a little + # extra indication of shortening. + url = url[:proto_len] + parts[0] + "/" + \ + parts[1][:8].split('?')[0].split('.')[0] + + if len(url) > max_len * 1.5: # still too long + url = url[:max_len] + + if url != before_clip: + amp = url.rfind('&') + # avoid splitting html char entities + if amp > max_len - 5: + url = url[:amp] + url += "..." + + if len(url) >= len(before_clip): + url = before_clip + else: + # full url is visible on mouse-over (for those who don't + # have a status bar, such as Safari by default) + params += ' title="%s"' % href + + return u('%s') % (href, params, url) + + # First HTML-escape so that our strings are all safe. + # The regex is modified to avoid character entites other than & so + # that we won't pick up ", etc. + text = _unicode(xhtml_escape(text)) + return _URL_RE.sub(make_link, text) + + +def _convert_entity(m): + if m.group(1) == "#": + try: + if m.group(2)[:1].lower() == 'x': + return unichr(int(m.group(2)[1:], 16)) + else: + return unichr(int(m.group(2))) + except ValueError: + return "&#%s;" % m.group(2) + try: + return _HTML_UNICODE_MAP[m.group(2)] + except KeyError: + return "&%s;" % m.group(2) + + +def _build_unicode_map(): + unicode_map = {} + for name, value in htmlentitydefs.name2codepoint.items(): + unicode_map[name] = unichr(value) + return unicode_map + +_HTML_UNICODE_MAP = _build_unicode_map() diff --git a/server/www/packages/packages-common/tornado/gen.py b/server/www/packages/packages-common/tornado/gen.py new file mode 100644 index 0000000..bf184e5 --- /dev/null +++ b/server/www/packages/packages-common/tornado/gen.py @@ -0,0 +1,1241 @@ +"""``tornado.gen`` is a generator-based interface to make it easier to +work in an asynchronous environment. Code using the ``gen`` module +is technically asynchronous, but it is written as a single generator +instead of a collection of separate functions. + +For example, the following asynchronous handler: + +.. testcode:: + + class AsyncHandler(RequestHandler): + @asynchronous + def get(self): + http_client = AsyncHTTPClient() + http_client.fetch("http://example.com", + callback=self.on_fetch) + + def on_fetch(self, response): + do_something_with_response(response) + self.render("template.html") + +.. testoutput:: + :hide: + +could be written with ``gen`` as: + +.. testcode:: + + class GenAsyncHandler(RequestHandler): + @gen.coroutine + def get(self): + http_client = AsyncHTTPClient() + response = yield http_client.fetch("http://example.com") + do_something_with_response(response) + self.render("template.html") + +.. testoutput:: + :hide: + +Most asynchronous functions in Tornado return a `.Future`; +yielding this object returns its `~.Future.result`. + +You can also yield a list or dict of ``Futures``, which will be +started at the same time and run in parallel; a list or dict of results will +be returned when they are all finished: + +.. testcode:: + + @gen.coroutine + def get(self): + http_client = AsyncHTTPClient() + response1, response2 = yield [http_client.fetch(url1), + http_client.fetch(url2)] + response_dict = yield dict(response3=http_client.fetch(url3), + response4=http_client.fetch(url4)) + response3 = response_dict['response3'] + response4 = response_dict['response4'] + +.. testoutput:: + :hide: + +If the `~functools.singledispatch` library is available (standard in +Python 3.4, available via the `singledispatch +`_ package on older +versions), additional types of objects may be yielded. Tornado includes +support for ``asyncio.Future`` and Twisted's ``Deferred`` class when +``tornado.platform.asyncio`` and ``tornado.platform.twisted`` are imported. +See the `convert_yielded` function to extend this mechanism. + +.. versionchanged:: 3.2 + Dict support added. + +.. versionchanged:: 4.1 + Support added for yielding ``asyncio`` Futures and Twisted Deferreds + via ``singledispatch``. + +""" +from __future__ import absolute_import, division, print_function, with_statement + +import collections +import functools +import itertools +import os +import sys +import textwrap +import types + +from tornado.concurrent import Future, TracebackFuture, is_future, chain_future +from tornado.ioloop import IOLoop +from tornado.log import app_log +from tornado import stack_context +from tornado.util import raise_exc_info + +try: + try: + from functools import singledispatch # py34+ + except ImportError: + from singledispatch import singledispatch # backport +except ImportError: + # In most cases, singledispatch is required (to avoid + # difficult-to-diagnose problems in which the functionality + # available differs depending on which invisble packages are + # installed). However, in Google App Engine third-party + # dependencies are more trouble so we allow this module to be + # imported without it. + if 'APPENGINE_RUNTIME' not in os.environ: + raise + singledispatch = None + +try: + try: + from collections.abc import Generator as GeneratorType # py35+ + except ImportError: + from backports_abc import Generator as GeneratorType + + try: + from inspect import isawaitable # py35+ + except ImportError: + from backports_abc import isawaitable +except ImportError: + if 'APPENGINE_RUNTIME' not in os.environ: + raise + from types import GeneratorType + + def isawaitable(x): + return False + +try: + import builtins # py3 +except ImportError: + import __builtin__ as builtins + + +class KeyReuseError(Exception): + pass + + +class UnknownKeyError(Exception): + pass + + +class LeakedCallbackError(Exception): + pass + + +class BadYieldError(Exception): + pass + + +class ReturnValueIgnoredError(Exception): + pass + + +class TimeoutError(Exception): + """Exception raised by ``with_timeout``.""" + + +def _value_from_stopiteration(e): + try: + # StopIteration has a value attribute beginning in py33. + # So does our Return class. + return e.value + except AttributeError: + pass + try: + # Cython backports coroutine functionality by putting the value in + # e.args[0]. + return e.args[0] + except (AttributeError, IndexError): + return None + + +def engine(func): + """Callback-oriented decorator for asynchronous generators. + + This is an older interface; for new code that does not need to be + compatible with versions of Tornado older than 3.0 the + `coroutine` decorator is recommended instead. + + This decorator is similar to `coroutine`, except it does not + return a `.Future` and the ``callback`` argument is not treated + specially. + + In most cases, functions decorated with `engine` should take + a ``callback`` argument and invoke it with their result when + they are finished. One notable exception is the + `~tornado.web.RequestHandler` :ref:`HTTP verb methods `, + which use ``self.finish()`` in place of a callback argument. + """ + func = _make_coroutine_wrapper(func, replace_callback=False) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + future = func(*args, **kwargs) + + def final_callback(future): + if future.result() is not None: + raise ReturnValueIgnoredError( + "@gen.engine functions cannot return values: %r" % + (future.result(),)) + # The engine interface doesn't give us any way to return + # errors but to raise them into the stack context. + # Save the stack context here to use when the Future has resolved. + future.add_done_callback(stack_context.wrap(final_callback)) + return wrapper + + +def coroutine(func, replace_callback=True): + """Decorator for asynchronous generators. + + Any generator that yields objects from this module must be wrapped + in either this decorator or `engine`. + + Coroutines may "return" by raising the special exception + `Return(value) `. In Python 3.3+, it is also possible for + the function to simply use the ``return value`` statement (prior to + Python 3.3 generators were not allowed to also return values). + In all versions of Python a coroutine that simply wishes to exit + early may use the ``return`` statement without a value. + + Functions with this decorator return a `.Future`. Additionally, + they may be called with a ``callback`` keyword argument, which + will be invoked with the future's result when it resolves. If the + coroutine fails, the callback will not be run and an exception + will be raised into the surrounding `.StackContext`. The + ``callback`` argument is not visible inside the decorated + function; it is handled by the decorator itself. + + From the caller's perspective, ``@gen.coroutine`` is similar to + the combination of ``@return_future`` and ``@gen.engine``. + + .. warning:: + + When exceptions occur inside a coroutine, the exception + information will be stored in the `.Future` object. You must + examine the result of the `.Future` object, or the exception + may go unnoticed by your code. This means yielding the function + if called from another coroutine, using something like + `.IOLoop.run_sync` for top-level calls, or passing the `.Future` + to `.IOLoop.add_future`. + + """ + return _make_coroutine_wrapper(func, replace_callback=True) + + +def _make_coroutine_wrapper(func, replace_callback): + """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. + + The two decorators differ in their treatment of the ``callback`` + argument, so we cannot simply implement ``@engine`` in terms of + ``@coroutine``. + """ + # On Python 3.5, set the coroutine flag on our generator, to allow it + # to be used with 'await'. + if hasattr(types, 'coroutine'): + func = types.coroutine(func) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + future = TracebackFuture() + + if replace_callback and 'callback' in kwargs: + callback = kwargs.pop('callback') + IOLoop.current().add_future( + future, lambda future: callback(future.result())) + + try: + result = func(*args, **kwargs) + except (Return, StopIteration) as e: + result = _value_from_stopiteration(e) + except Exception: + future.set_exc_info(sys.exc_info()) + return future + else: + if isinstance(result, GeneratorType): + # Inline the first iteration of Runner.run. This lets us + # avoid the cost of creating a Runner when the coroutine + # never actually yields, which in turn allows us to + # use "optional" coroutines in critical path code without + # performance penalty for the synchronous case. + try: + orig_stack_contexts = stack_context._state.contexts + yielded = next(result) + if stack_context._state.contexts is not orig_stack_contexts: + yielded = TracebackFuture() + yielded.set_exception( + stack_context.StackContextInconsistentError( + 'stack_context inconsistency (probably caused ' + 'by yield within a "with StackContext" block)')) + except (StopIteration, Return) as e: + future.set_result(_value_from_stopiteration(e)) + except Exception: + future.set_exc_info(sys.exc_info()) + else: + Runner(result, future, yielded) + try: + return future + finally: + # Subtle memory optimization: if next() raised an exception, + # the future's exc_info contains a traceback which + # includes this stack frame. This creates a cycle, + # which will be collected at the next full GC but has + # been shown to greatly increase memory usage of + # benchmarks (relative to the refcount-based scheme + # used in the absence of cycles). We can avoid the + # cycle by clearing the local variable after we return it. + future = None + future.set_result(result) + return future + return wrapper + + +class Return(Exception): + """Special exception to return a value from a `coroutine`. + + If this exception is raised, its value argument is used as the + result of the coroutine:: + + @gen.coroutine + def fetch_json(url): + response = yield AsyncHTTPClient().fetch(url) + raise gen.Return(json_decode(response.body)) + + In Python 3.3, this exception is no longer necessary: the ``return`` + statement can be used directly to return a value (previously + ``yield`` and ``return`` with a value could not be combined in the + same function). + + By analogy with the return statement, the value argument is optional, + but it is never necessary to ``raise gen.Return()``. The ``return`` + statement can be used with no arguments instead. + """ + def __init__(self, value=None): + super(Return, self).__init__() + self.value = value + # Cython recognizes subclasses of StopIteration with a .args tuple. + self.args = (value,) + + +class WaitIterator(object): + """Provides an iterator to yield the results of futures as they finish. + + Yielding a set of futures like this: + + ``results = yield [future1, future2]`` + + pauses the coroutine until both ``future1`` and ``future2`` + return, and then restarts the coroutine with the results of both + futures. If either future is an exception, the expression will + raise that exception and all the results will be lost. + + If you need to get the result of each future as soon as possible, + or if you need the result of some futures even if others produce + errors, you can use ``WaitIterator``:: + + wait_iterator = gen.WaitIterator(future1, future2) + while not wait_iterator.done(): + try: + result = yield wait_iterator.next() + except Exception as e: + print("Error {} from {}".format(e, wait_iterator.current_future)) + else: + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + + Because results are returned as soon as they are available the + output from the iterator *will not be in the same order as the + input arguments*. If you need to know which future produced the + current result, you can use the attributes + ``WaitIterator.current_future``, or ``WaitIterator.current_index`` + to get the index of the future from the input list. (if keyword + arguments were used in the construction of the `WaitIterator`, + ``current_index`` will use the corresponding keyword). + + On Python 3.5, `WaitIterator` implements the async iterator + protocol, so it can be used with the ``async for`` statement (note + that in this version the entire iteration is aborted if any value + raises an exception, while the previous example can continue past + individual errors):: + + async for result in gen.WaitIterator(future1, future2): + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + + """ + def __init__(self, *args, **kwargs): + if args and kwargs: + raise ValueError( + "You must provide args or kwargs, not both") + + if kwargs: + self._unfinished = dict((f, k) for (k, f) in kwargs.items()) + futures = list(kwargs.values()) + else: + self._unfinished = dict((f, i) for (i, f) in enumerate(args)) + futures = args + + self._finished = collections.deque() + self.current_index = self.current_future = None + self._running_future = None + + for future in futures: + future.add_done_callback(self._done_callback) + + def done(self): + """Returns True if this iterator has no more results.""" + if self._finished or self._unfinished: + return False + # Clear the 'current' values when iteration is done. + self.current_index = self.current_future = None + return True + + def next(self): + """Returns a `.Future` that will yield the next available result. + + Note that this `.Future` will not be the same object as any of + the inputs. + """ + self._running_future = TracebackFuture() + + if self._finished: + self._return_result(self._finished.popleft()) + + return self._running_future + + def _done_callback(self, done): + if self._running_future and not self._running_future.done(): + self._return_result(done) + else: + self._finished.append(done) + + def _return_result(self, done): + """Called set the returned future's state that of the future + we yielded, and set the current future for the iterator. + """ + chain_future(done, self._running_future) + + self.current_future = done + self.current_index = self._unfinished.pop(done) + + @coroutine + def __aiter__(self): + raise Return(self) + + def __anext__(self): + if self.done(): + # Lookup by name to silence pyflakes on older versions. + raise getattr(builtins, 'StopAsyncIteration')() + return self.next() + + +class YieldPoint(object): + """Base class for objects that may be yielded from the generator. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def start(self, runner): + """Called by the runner after the generator has yielded. + + No other methods will be called on this object before ``start``. + """ + raise NotImplementedError() + + def is_ready(self): + """Called by the runner to determine whether to resume the generator. + + Returns a boolean; may be called more than once. + """ + raise NotImplementedError() + + def get_result(self): + """Returns the value to use as the result of the yield expression. + + This method will only be called once, and only after `is_ready` + has returned true. + """ + raise NotImplementedError() + + +class Callback(YieldPoint): + """Returns a callable object that will allow a matching `Wait` to proceed. + + The key may be any value suitable for use as a dictionary key, and is + used to match ``Callbacks`` to their corresponding ``Waits``. The key + must be unique among outstanding callbacks within a single run of the + generator function, but may be reused across different runs of the same + function (so constants generally work fine). + + The callback may be called with zero or one arguments; if an argument + is given it will be returned by `Wait`. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def __init__(self, key): + self.key = key + + def start(self, runner): + self.runner = runner + runner.register_callback(self.key) + + def is_ready(self): + return True + + def get_result(self): + return self.runner.result_callback(self.key) + + +class Wait(YieldPoint): + """Returns the argument passed to the result of a previous `Callback`. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def __init__(self, key): + self.key = key + + def start(self, runner): + self.runner = runner + + def is_ready(self): + return self.runner.is_ready(self.key) + + def get_result(self): + return self.runner.pop_result(self.key) + + +class WaitAll(YieldPoint): + """Returns the results of multiple previous `Callbacks `. + + The argument is a sequence of `Callback` keys, and the result is + a list of results in the same order. + + `WaitAll` is equivalent to yielding a list of `Wait` objects. + + .. deprecated:: 4.0 + Use `Futures <.Future>` instead. + """ + def __init__(self, keys): + self.keys = keys + + def start(self, runner): + self.runner = runner + + def is_ready(self): + return all(self.runner.is_ready(key) for key in self.keys) + + def get_result(self): + return [self.runner.pop_result(key) for key in self.keys] + + +def Task(func, *args, **kwargs): + """Adapts a callback-based asynchronous function for use in coroutines. + + Takes a function (and optional additional arguments) and runs it with + those arguments plus a ``callback`` keyword argument. The argument passed + to the callback is returned as the result of the yield expression. + + .. versionchanged:: 4.0 + ``gen.Task`` is now a function that returns a `.Future`, instead of + a subclass of `YieldPoint`. It still behaves the same way when + yielded. + """ + future = Future() + + def handle_exception(typ, value, tb): + if future.done(): + return False + future.set_exc_info((typ, value, tb)) + return True + + def set_result(result): + if future.done(): + return + future.set_result(result) + with stack_context.ExceptionStackContext(handle_exception): + func(*args, callback=_argument_adapter(set_result), **kwargs) + return future + + +class YieldFuture(YieldPoint): + def __init__(self, future, io_loop=None): + """Adapts a `.Future` to the `YieldPoint` interface. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + self.future = future + self.io_loop = io_loop or IOLoop.current() + + def start(self, runner): + if not self.future.done(): + self.runner = runner + self.key = object() + runner.register_callback(self.key) + self.io_loop.add_future(self.future, runner.result_callback(self.key)) + else: + self.runner = None + self.result_fn = self.future.result + + def is_ready(self): + if self.runner is not None: + return self.runner.is_ready(self.key) + else: + return True + + def get_result(self): + if self.runner is not None: + return self.runner.pop_result(self.key).result() + else: + return self.result_fn() + + +def _contains_yieldpoint(children): + """Returns True if ``children`` contains any YieldPoints. + + ``children`` may be a dict or a list, as used by `MultiYieldPoint` + and `multi_future`. + """ + if isinstance(children, dict): + return any(isinstance(i, YieldPoint) for i in children.values()) + if isinstance(children, list): + return any(isinstance(i, YieldPoint) for i in children) + return False + + +def multi(children, quiet_exceptions=()): + """Runs multiple asynchronous operations in parallel. + + ``children`` may either be a list or a dict whose values are + yieldable objects. ``multi()`` returns a new yieldable + object that resolves to a parallel structure containing their + results. If ``children`` is a list, the result is a list of + results in the same order; if it is a dict, the result is a dict + with the same keys. + + That is, ``results = yield multi(list_of_futures)`` is equivalent + to:: + + results = [] + for future in list_of_futures: + results.append(yield future) + + If any children raise exceptions, ``multi()`` will raise the first + one. All others will be logged, unless they are of types + contained in the ``quiet_exceptions`` argument. + + If any of the inputs are `YieldPoints `, the returned + yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`. + This means that the result of `multi` can be used in a native + coroutine if and only if all of its children can be. + + In a ``yield``-based coroutine, it is not normally necessary to + call this function directly, since the coroutine runner will + do it automatically when a list or dict is yielded. However, + it is necessary in ``await``-based coroutines, or to pass + the ``quiet_exceptions`` argument. + + This function is available under the names ``multi()`` and ``Multi()`` + for historical reasons. + + .. versionchanged:: 4.2 + If multiple yieldables fail, any exceptions after the first + (which is raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Replaced the class ``Multi`` and the function ``multi_future`` + with a unified function ``multi``. Added support for yieldables + other than `YieldPoint` and `.Future`. + + """ + if _contains_yieldpoint(children): + return MultiYieldPoint(children, quiet_exceptions=quiet_exceptions) + else: + return multi_future(children, quiet_exceptions=quiet_exceptions) + +Multi = multi + + +class MultiYieldPoint(YieldPoint): + """Runs multiple asynchronous operations in parallel. + + This class is similar to `multi`, but it always creates a stack + context even when no children require it. It is not compatible with + native coroutines. + + .. versionchanged:: 4.2 + If multiple ``YieldPoints`` fail, any exceptions after the first + (which is raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Renamed from ``Multi`` to ``MultiYieldPoint``. The name ``Multi`` + remains as an alias for the equivalent `multi` function. + + .. deprecated:: 4.3 + Use `multi` instead. + """ + def __init__(self, children, quiet_exceptions=()): + self.keys = None + if isinstance(children, dict): + self.keys = list(children.keys()) + children = children.values() + self.children = [] + for i in children: + if not isinstance(i, YieldPoint): + i = convert_yielded(i) + if is_future(i): + i = YieldFuture(i) + self.children.append(i) + assert all(isinstance(i, YieldPoint) for i in self.children) + self.unfinished_children = set(self.children) + self.quiet_exceptions = quiet_exceptions + + def start(self, runner): + for i in self.children: + i.start(runner) + + def is_ready(self): + finished = list(itertools.takewhile( + lambda i: i.is_ready(), self.unfinished_children)) + self.unfinished_children.difference_update(finished) + return not self.unfinished_children + + def get_result(self): + result_list = [] + exc_info = None + for f in self.children: + try: + result_list.append(f.get_result()) + except Exception as e: + if exc_info is None: + exc_info = sys.exc_info() + else: + if not isinstance(e, self.quiet_exceptions): + app_log.error("Multiple exceptions in yield list", + exc_info=True) + if exc_info is not None: + raise_exc_info(exc_info) + if self.keys is not None: + return dict(zip(self.keys, result_list)) + else: + return list(result_list) + + +def multi_future(children, quiet_exceptions=()): + """Wait for multiple asynchronous futures in parallel. + + This function is similar to `multi`, but does not support + `YieldPoints `. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.2 + If multiple ``Futures`` fail, any exceptions after the first (which is + raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. deprecated:: 4.3 + Use `multi` instead. + """ + if isinstance(children, dict): + keys = list(children.keys()) + children = children.values() + else: + keys = None + children = list(map(convert_yielded, children)) + assert all(is_future(i) for i in children) + unfinished_children = set(children) + + future = Future() + if not children: + future.set_result({} if keys is not None else []) + + def callback(f): + unfinished_children.remove(f) + if not unfinished_children: + result_list = [] + for f in children: + try: + result_list.append(f.result()) + except Exception as e: + if future.done(): + if not isinstance(e, quiet_exceptions): + app_log.error("Multiple exceptions in yield list", + exc_info=True) + else: + future.set_exc_info(sys.exc_info()) + if not future.done(): + if keys is not None: + future.set_result(dict(zip(keys, result_list))) + else: + future.set_result(result_list) + + listening = set() + for f in children: + if f not in listening: + listening.add(f) + f.add_done_callback(callback) + return future + + +def maybe_future(x): + """Converts ``x`` into a `.Future`. + + If ``x`` is already a `.Future`, it is simply returned; otherwise + it is wrapped in a new `.Future`. This is suitable for use as + ``result = yield gen.maybe_future(f())`` when you don't know whether + ``f()`` returns a `.Future` or not. + + .. deprecated:: 4.3 + This function only handles ``Futures``, not other yieldable objects. + Instead of `maybe_future`, check for the non-future result types + you expect (often just ``None``), and ``yield`` anything unknown. + """ + if is_future(x): + return x + else: + fut = Future() + fut.set_result(x) + return fut + + +def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): + """Wraps a `.Future` in a timeout. + + Raises `TimeoutError` if the input future does not complete before + ``timeout``, which may be specified in any form allowed by + `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time + relative to `.IOLoop.time`) + + If the wrapped `.Future` fails after it has timed out, the exception + will be logged unless it is of a type contained in ``quiet_exceptions`` + (which may be an exception type or a sequence of types). + + Currently only supports Futures, not other `YieldPoint` classes. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.1 + Added the ``quiet_exceptions`` argument and the logging of unhandled + exceptions. + """ + # TODO: allow yield points in addition to futures? + # Tricky to do with stack_context semantics. + # + # It's tempting to optimize this by cancelling the input future on timeout + # instead of creating a new one, but A) we can't know if we are the only + # one waiting on the input future, so cancelling it might disrupt other + # callers and B) concurrent futures can only be cancelled while they are + # in the queue, so cancellation cannot reliably bound our waiting time. + result = Future() + chain_future(future, result) + if io_loop is None: + io_loop = IOLoop.current() + + def error_callback(future): + try: + future.result() + except Exception as e: + if not isinstance(e, quiet_exceptions): + app_log.error("Exception in Future %r after timeout", + future, exc_info=True) + + def timeout_callback(): + result.set_exception(TimeoutError("Timeout")) + # In case the wrapped future goes on to fail, log it. + future.add_done_callback(error_callback) + timeout_handle = io_loop.add_timeout( + timeout, timeout_callback) + if isinstance(future, Future): + # We know this future will resolve on the IOLoop, so we don't + # need the extra thread-safety of IOLoop.add_future (and we also + # don't care about StackContext here. + future.add_done_callback( + lambda future: io_loop.remove_timeout(timeout_handle)) + else: + # concurrent.futures.Futures may resolve on any thread, so we + # need to route them back to the IOLoop. + io_loop.add_future( + future, lambda future: io_loop.remove_timeout(timeout_handle)) + return result + + +def sleep(duration): + """Return a `.Future` that resolves after the given number of seconds. + + When used with ``yield`` in a coroutine, this is a non-blocking + analogue to `time.sleep` (which should not be used in coroutines + because it is blocking):: + + yield gen.sleep(0.5) + + Note that calling this function on its own does nothing; you must + wait on the `.Future` it returns (usually by yielding it). + + .. versionadded:: 4.1 + """ + f = Future() + IOLoop.current().call_later(duration, lambda: f.set_result(None)) + return f + + +_null_future = Future() +_null_future.set_result(None) + +moment = Future() +moment.__doc__ = \ + """A special object which may be yielded to allow the IOLoop to run for +one iteration. + +This is not needed in normal use but it can be helpful in long-running +coroutines that are likely to yield Futures that are ready instantly. + +Usage: ``yield gen.moment`` + +.. versionadded:: 4.0 +""" +moment.set_result(None) + + +class Runner(object): + """Internal implementation of `tornado.gen.engine`. + + Maintains information about pending callbacks and their results. + + The results of the generator are stored in ``result_future`` (a + `.TracebackFuture`) + """ + def __init__(self, gen, result_future, first_yielded): + self.gen = gen + self.result_future = result_future + self.future = _null_future + self.yield_point = None + self.pending_callbacks = None + self.results = None + self.running = False + self.finished = False + self.had_exception = False + self.io_loop = IOLoop.current() + # For efficiency, we do not create a stack context until we + # reach a YieldPoint (stack contexts are required for the historical + # semantics of YieldPoints, but not for Futures). When we have + # done so, this field will be set and must be called at the end + # of the coroutine. + self.stack_context_deactivate = None + if self.handle_yield(first_yielded): + self.run() + + def register_callback(self, key): + """Adds ``key`` to the list of callbacks.""" + if self.pending_callbacks is None: + # Lazily initialize the old-style YieldPoint data structures. + self.pending_callbacks = set() + self.results = {} + if key in self.pending_callbacks: + raise KeyReuseError("key %r is already pending" % (key,)) + self.pending_callbacks.add(key) + + def is_ready(self, key): + """Returns true if a result is available for ``key``.""" + if self.pending_callbacks is None or key not in self.pending_callbacks: + raise UnknownKeyError("key %r is not pending" % (key,)) + return key in self.results + + def set_result(self, key, result): + """Sets the result for ``key`` and attempts to resume the generator.""" + self.results[key] = result + if self.yield_point is not None and self.yield_point.is_ready(): + try: + self.future.set_result(self.yield_point.get_result()) + except: + self.future.set_exc_info(sys.exc_info()) + self.yield_point = None + self.run() + + def pop_result(self, key): + """Returns the result for ``key`` and unregisters it.""" + self.pending_callbacks.remove(key) + return self.results.pop(key) + + def run(self): + """Starts or resumes the generator, running until it reaches a + yield point that is not ready. + """ + if self.running or self.finished: + return + try: + self.running = True + while True: + future = self.future + if not future.done(): + return + self.future = None + try: + orig_stack_contexts = stack_context._state.contexts + exc_info = None + + try: + value = future.result() + except Exception: + self.had_exception = True + exc_info = sys.exc_info() + + if exc_info is not None: + yielded = self.gen.throw(*exc_info) + exc_info = None + else: + yielded = self.gen.send(value) + + if stack_context._state.contexts is not orig_stack_contexts: + self.gen.throw( + stack_context.StackContextInconsistentError( + 'stack_context inconsistency (probably caused ' + 'by yield within a "with StackContext" block)')) + except (StopIteration, Return) as e: + self.finished = True + self.future = _null_future + if self.pending_callbacks and not self.had_exception: + # If we ran cleanly without waiting on all callbacks + # raise an error (really more of a warning). If we + # had an exception then some callbacks may have been + # orphaned, so skip the check in that case. + raise LeakedCallbackError( + "finished without waiting for callbacks %r" % + self.pending_callbacks) + self.result_future.set_result(_value_from_stopiteration(e)) + self.result_future = None + self._deactivate_stack_context() + return + except Exception: + self.finished = True + self.future = _null_future + self.result_future.set_exc_info(sys.exc_info()) + self.result_future = None + self._deactivate_stack_context() + return + if not self.handle_yield(yielded): + return + finally: + self.running = False + + def handle_yield(self, yielded): + # Lists containing YieldPoints require stack contexts; + # other lists are handled in convert_yielded. + if _contains_yieldpoint(yielded): + yielded = multi(yielded) + + if isinstance(yielded, YieldPoint): + # YieldPoints are too closely coupled to the Runner to go + # through the generic convert_yielded mechanism. + self.future = TracebackFuture() + + def start_yield_point(): + try: + yielded.start(self) + if yielded.is_ready(): + self.future.set_result( + yielded.get_result()) + else: + self.yield_point = yielded + except Exception: + self.future = TracebackFuture() + self.future.set_exc_info(sys.exc_info()) + + if self.stack_context_deactivate is None: + # Start a stack context if this is the first + # YieldPoint we've seen. + with stack_context.ExceptionStackContext( + self.handle_exception) as deactivate: + self.stack_context_deactivate = deactivate + + def cb(): + start_yield_point() + self.run() + self.io_loop.add_callback(cb) + return False + else: + start_yield_point() + else: + try: + self.future = convert_yielded(yielded) + except BadYieldError: + self.future = TracebackFuture() + self.future.set_exc_info(sys.exc_info()) + + if not self.future.done() or self.future is moment: + self.io_loop.add_future( + self.future, lambda f: self.run()) + return False + return True + + def result_callback(self, key): + return stack_context.wrap(_argument_adapter( + functools.partial(self.set_result, key))) + + def handle_exception(self, typ, value, tb): + if not self.running and not self.finished: + self.future = TracebackFuture() + self.future.set_exc_info((typ, value, tb)) + self.run() + return True + else: + return False + + def _deactivate_stack_context(self): + if self.stack_context_deactivate is not None: + self.stack_context_deactivate() + self.stack_context_deactivate = None + +Arguments = collections.namedtuple('Arguments', ['args', 'kwargs']) + + +def _argument_adapter(callback): + """Returns a function that when invoked runs ``callback`` with one arg. + + If the function returned by this function is called with exactly + one argument, that argument is passed to ``callback``. Otherwise + the args tuple and kwargs dict are wrapped in an `Arguments` object. + """ + def wrapper(*args, **kwargs): + if kwargs or len(args) > 1: + callback(Arguments(args, kwargs)) + elif args: + callback(args[0]) + else: + callback(None) + return wrapper + +# Convert Awaitables into Futures. It is unfortunately possible +# to have infinite recursion here if those Awaitables assume that +# we're using a different coroutine runner and yield objects +# we don't understand. If that happens, the solution is to +# register that runner's yieldable objects with convert_yielded. +if sys.version_info >= (3, 3): + exec(textwrap.dedent(""" + @coroutine + def _wrap_awaitable(x): + if hasattr(x, '__await__'): + x = x.__await__() + return (yield from x) + """)) +else: + # Py2-compatible version for use with Cython. + # Copied from PEP 380. + @coroutine + def _wrap_awaitable(x): + if hasattr(x, '__await__'): + _i = x.__await__() + else: + _i = iter(x) + try: + _y = next(_i) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + else: + while 1: + try: + _s = yield _y + except GeneratorExit as _e: + try: + _m = _i.close + except AttributeError: + pass + else: + _m() + raise _e + except BaseException as _e: + _x = sys.exc_info() + try: + _m = _i.throw + except AttributeError: + raise _e + else: + try: + _y = _m(*_x) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + break + else: + try: + if _s is None: + _y = next(_i) + else: + _y = _i.send(_s) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + break + raise Return(_r) + + +def convert_yielded(yielded): + """Convert a yielded object into a `.Future`. + + The default implementation accepts lists, dictionaries, and Futures. + + If the `~functools.singledispatch` library is available, this function + may be extended to support additional types. For example:: + + @convert_yielded.register(asyncio.Future) + def _(asyncio_future): + return tornado.platform.asyncio.to_tornado_future(asyncio_future) + + .. versionadded:: 4.1 + """ + # Lists and dicts containing YieldPoints were handled earlier. + if isinstance(yielded, (list, dict)): + return multi(yielded) + elif is_future(yielded): + return yielded + elif isawaitable(yielded): + return _wrap_awaitable(yielded) + else: + raise BadYieldError("yielded unknown object %r" % (yielded,)) + +if singledispatch is not None: + convert_yielded = singledispatch(convert_yielded) + + try: + # If we can import t.p.asyncio, do it for its side effect + # (registering asyncio.Future with convert_yielded). + # It's ugly to do this here, but it prevents a cryptic + # infinite recursion in _wrap_awaitable. + # Note that even with this, asyncio integration is unlikely + # to work unless the application also configures AsyncIOLoop, + # but at least the error messages in that case are more + # comprehensible than a stack overflow. + import tornado.platform.asyncio + except ImportError: + pass + else: + # Reference the imported module to make pyflakes happy. + tornado diff --git a/server/www/packages/packages-common/tornado/http1connection.py b/server/www/packages/packages-common/tornado/http1connection.py new file mode 100644 index 0000000..1c57706 --- /dev/null +++ b/server/www/packages/packages-common/tornado/http1connection.py @@ -0,0 +1,722 @@ +#!/usr/bin/env python +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Client and server implementations of HTTP/1.x. + +.. versionadded:: 4.0 +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import re + +from tornado.concurrent import Future +from tornado.escape import native_str, utf8 +from tornado import gen +from tornado import httputil +from tornado import iostream +from tornado.log import gen_log, app_log +from tornado import stack_context +from tornado.util import GzipDecompressor + + +class _QuietException(Exception): + def __init__(self): + pass + + +class _ExceptionLoggingContext(object): + """Used with the ``with`` statement when calling delegate methods to + log any exceptions with the given logger. Any exceptions caught are + converted to _QuietException + """ + def __init__(self, logger): + self.logger = logger + + def __enter__(self): + pass + + def __exit__(self, typ, value, tb): + if value is not None: + self.logger.error("Uncaught exception", exc_info=(typ, value, tb)) + raise _QuietException + + +class HTTP1ConnectionParameters(object): + """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`. + """ + def __init__(self, no_keep_alive=False, chunk_size=None, + max_header_size=None, header_timeout=None, max_body_size=None, + body_timeout=None, decompress=False): + """ + :arg bool no_keep_alive: If true, always close the connection after + one request. + :arg int chunk_size: how much data to read into memory at once + :arg int max_header_size: maximum amount of data for HTTP headers + :arg float header_timeout: how long to wait for all headers (seconds) + :arg int max_body_size: maximum amount of data for body + :arg float body_timeout: how long to wait while reading body (seconds) + :arg bool decompress: if true, decode incoming + ``Content-Encoding: gzip`` + """ + self.no_keep_alive = no_keep_alive + self.chunk_size = chunk_size or 65536 + self.max_header_size = max_header_size or 65536 + self.header_timeout = header_timeout + self.max_body_size = max_body_size + self.body_timeout = body_timeout + self.decompress = decompress + + +class HTTP1Connection(httputil.HTTPConnection): + """Implements the HTTP/1.x protocol. + + This class can be on its own for clients, or via `HTTP1ServerConnection` + for servers. + """ + def __init__(self, stream, is_client, params=None, context=None): + """ + :arg stream: an `.IOStream` + :arg bool is_client: client or server + :arg params: a `.HTTP1ConnectionParameters` instance or ``None`` + :arg context: an opaque application-defined object that can be accessed + as ``connection.context``. + """ + self.is_client = is_client + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self.no_keep_alive = params.no_keep_alive + # The body limits can be altered by the delegate, so save them + # here instead of just referencing self.params later. + self._max_body_size = (self.params.max_body_size or + self.stream.max_buffer_size) + self._body_timeout = self.params.body_timeout + # _write_finished is set to True when finish() has been called, + # i.e. there will be no more data sent. Data may still be in the + # stream's write buffer. + self._write_finished = False + # True when we have read the entire incoming body. + self._read_finished = False + # _finish_future resolves when all data has been written and flushed + # to the IOStream. + self._finish_future = Future() + # If true, the connection should be closed after this request + # (after the response has been written in the server side, + # and after it has been read in the client) + self._disconnect_on_finish = False + self._clear_callbacks() + # Save the start lines after we read or write them; they + # affect later processing (e.g. 304 responses and HEAD methods + # have content-length but no bodies) + self._request_start_line = None + self._response_start_line = None + self._request_headers = None + # True if we are writing output with chunked encoding. + self._chunking_output = None + # While reading a body with a content-length, this is the + # amount left to read. + self._expected_content_remaining = None + # A Future for our outgoing writes, returned by IOStream.write. + self._pending_write = None + + def read_response(self, delegate): + """Read a single HTTP response. + + Typical client-mode usage is to write a request using `write_headers`, + `write`, and `finish`, and then call ``read_response``. + + :arg delegate: a `.HTTPMessageDelegate` + + Returns a `.Future` that resolves to None after the full response has + been read. + """ + if self.params.decompress: + delegate = _GzipMessageDelegate(delegate, self.params.chunk_size) + return self._read_message(delegate) + + @gen.coroutine + def _read_message(self, delegate): + need_delegate_close = False + try: + header_future = self.stream.read_until_regex( + b"\r?\n\r?\n", + max_bytes=self.params.max_header_size) + if self.params.header_timeout is None: + header_data = yield header_future + else: + try: + header_data = yield gen.with_timeout( + self.stream.io_loop.time() + self.params.header_timeout, + header_future, + io_loop=self.stream.io_loop, + quiet_exceptions=iostream.StreamClosedError) + except gen.TimeoutError: + self.close() + raise gen.Return(False) + start_line, headers = self._parse_headers(header_data) + if self.is_client: + start_line = httputil.parse_response_start_line(start_line) + self._response_start_line = start_line + else: + start_line = httputil.parse_request_start_line(start_line) + self._request_start_line = start_line + self._request_headers = headers + + self._disconnect_on_finish = not self._can_keep_alive( + start_line, headers) + need_delegate_close = True + with _ExceptionLoggingContext(app_log): + header_future = delegate.headers_received(start_line, headers) + if header_future is not None: + yield header_future + if self.stream is None: + # We've been detached. + need_delegate_close = False + raise gen.Return(False) + skip_body = False + if self.is_client: + if (self._request_start_line is not None and + self._request_start_line.method == 'HEAD'): + skip_body = True + code = start_line.code + if code == 304: + # 304 responses may include the content-length header + # but do not actually have a body. + # http://tools.ietf.org/html/rfc7230#section-3.3 + skip_body = True + if code >= 100 and code < 200: + # 1xx responses should never indicate the presence of + # a body. + if ('Content-Length' in headers or + 'Transfer-Encoding' in headers): + raise httputil.HTTPInputError( + "Response code %d cannot have body" % code) + # TODO: client delegates will get headers_received twice + # in the case of a 100-continue. Document or change? + yield self._read_message(delegate) + else: + if (headers.get("Expect") == "100-continue" and + not self._write_finished): + self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") + if not skip_body: + body_future = self._read_body( + start_line.code if self.is_client else 0, headers, delegate) + if body_future is not None: + if self._body_timeout is None: + yield body_future + else: + try: + yield gen.with_timeout( + self.stream.io_loop.time() + self._body_timeout, + body_future, self.stream.io_loop, + quiet_exceptions=iostream.StreamClosedError) + except gen.TimeoutError: + gen_log.info("Timeout reading body from %s", + self.context) + self.stream.close() + raise gen.Return(False) + self._read_finished = True + if not self._write_finished or self.is_client: + need_delegate_close = False + with _ExceptionLoggingContext(app_log): + delegate.finish() + # If we're waiting for the application to produce an asynchronous + # response, and we're not detached, register a close callback + # on the stream (we didn't need one while we were reading) + if (not self._finish_future.done() and + self.stream is not None and + not self.stream.closed()): + self.stream.set_close_callback(self._on_connection_close) + yield self._finish_future + if self.is_client and self._disconnect_on_finish: + self.close() + if self.stream is None: + raise gen.Return(False) + except httputil.HTTPInputError as e: + gen_log.info("Malformed HTTP message from %s: %s", + self.context, e) + self.close() + raise gen.Return(False) + finally: + if need_delegate_close: + with _ExceptionLoggingContext(app_log): + delegate.on_connection_close() + self._clear_callbacks() + raise gen.Return(True) + + def _clear_callbacks(self): + """Clears the callback attributes. + + This allows the request handler to be garbage collected more + quickly in CPython by breaking up reference cycles. + """ + self._write_callback = None + self._write_future = None + self._close_callback = None + if self.stream is not None: + self.stream.set_close_callback(None) + + def set_close_callback(self, callback): + """Sets a callback that will be run when the connection is closed. + + .. deprecated:: 4.0 + Use `.HTTPMessageDelegate.on_connection_close` instead. + """ + self._close_callback = stack_context.wrap(callback) + + def _on_connection_close(self): + # Note that this callback is only registered on the IOStream + # when we have finished reading the request and are waiting for + # the application to produce its response. + if self._close_callback is not None: + callback = self._close_callback + self._close_callback = None + callback() + if not self._finish_future.done(): + self._finish_future.set_result(None) + self._clear_callbacks() + + def close(self): + if self.stream is not None: + self.stream.close() + self._clear_callbacks() + if not self._finish_future.done(): + self._finish_future.set_result(None) + + def detach(self): + """Take control of the underlying stream. + + Returns the underlying `.IOStream` object and stops all further + HTTP processing. May only be called during + `.HTTPMessageDelegate.headers_received`. Intended for implementing + protocols like websockets that tunnel over an HTTP handshake. + """ + self._clear_callbacks() + stream = self.stream + self.stream = None + if not self._finish_future.done(): + self._finish_future.set_result(None) + return stream + + def set_body_timeout(self, timeout): + """Sets the body timeout for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._body_timeout = timeout + + def set_max_body_size(self, max_body_size): + """Sets the body size limit for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._max_body_size = max_body_size + + def write_headers(self, start_line, headers, chunk=None, callback=None): + """Implements `.HTTPConnection.write_headers`.""" + lines = [] + if self.is_client: + self._request_start_line = start_line + lines.append(utf8('%s %s HTTP/1.1' % (start_line[0], start_line[1]))) + # Client requests with a non-empty body must have either a + # Content-Length or a Transfer-Encoding. + self._chunking_output = ( + start_line.method in ('POST', 'PUT', 'PATCH') and + 'Content-Length' not in headers and + 'Transfer-Encoding' not in headers) + else: + self._response_start_line = start_line + lines.append(utf8('HTTP/1.1 %s %s' % (start_line[1], start_line[2]))) + self._chunking_output = ( + # TODO: should this use + # self._request_start_line.version or + # start_line.version? + self._request_start_line.version == 'HTTP/1.1' and + # 304 responses have no body (not even a zero-length body), and so + # should not have either Content-Length or Transfer-Encoding. + # headers. + start_line.code != 304 and + # No need to chunk the output if a Content-Length is specified. + 'Content-Length' not in headers and + # Applications are discouraged from touching Transfer-Encoding, + # but if they do, leave it alone. + 'Transfer-Encoding' not in headers) + # If a 1.0 client asked for keep-alive, add the header. + if (self._request_start_line.version == 'HTTP/1.0' and + (self._request_headers.get('Connection', '').lower() + == 'keep-alive')): + headers['Connection'] = 'Keep-Alive' + if self._chunking_output: + headers['Transfer-Encoding'] = 'chunked' + if (not self.is_client and + (self._request_start_line.method == 'HEAD' or + start_line.code == 304)): + self._expected_content_remaining = 0 + elif 'Content-Length' in headers: + self._expected_content_remaining = int(headers['Content-Length']) + else: + self._expected_content_remaining = None + lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()]) + for line in lines: + if b'\n' in line: + raise ValueError('Newline in header: ' + repr(line)) + future = None + if self.stream.closed(): + future = self._write_future = Future() + future.set_exception(iostream.StreamClosedError()) + future.exception() + else: + if callback is not None: + self._write_callback = stack_context.wrap(callback) + else: + future = self._write_future = Future() + data = b"\r\n".join(lines) + b"\r\n\r\n" + if chunk: + data += self._format_chunk(chunk) + self._pending_write = self.stream.write(data) + self._pending_write.add_done_callback(self._on_write_complete) + return future + + def _format_chunk(self, chunk): + if self._expected_content_remaining is not None: + self._expected_content_remaining -= len(chunk) + if self._expected_content_remaining < 0: + # Close the stream now to stop further framing errors. + self.stream.close() + raise httputil.HTTPOutputError( + "Tried to write more data than Content-Length") + if self._chunking_output and chunk: + # Don't write out empty chunks because that means END-OF-STREAM + # with chunked encoding + return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n" + else: + return chunk + + def write(self, chunk, callback=None): + """Implements `.HTTPConnection.write`. + + For backwards compatibility is is allowed but deprecated to + skip `write_headers` and instead call `write()` with a + pre-encoded header block. + """ + future = None + if self.stream.closed(): + future = self._write_future = Future() + self._write_future.set_exception(iostream.StreamClosedError()) + self._write_future.exception() + else: + if callback is not None: + self._write_callback = stack_context.wrap(callback) + else: + future = self._write_future = Future() + self._pending_write = self.stream.write(self._format_chunk(chunk)) + self._pending_write.add_done_callback(self._on_write_complete) + return future + + def finish(self): + """Implements `.HTTPConnection.finish`.""" + if (self._expected_content_remaining is not None and + self._expected_content_remaining != 0 and + not self.stream.closed()): + self.stream.close() + raise httputil.HTTPOutputError( + "Tried to write %d bytes less than Content-Length" % + self._expected_content_remaining) + if self._chunking_output: + if not self.stream.closed(): + self._pending_write = self.stream.write(b"0\r\n\r\n") + self._pending_write.add_done_callback(self._on_write_complete) + self._write_finished = True + # If the app finished the request while we're still reading, + # divert any remaining data away from the delegate and + # close the connection when we're done sending our response. + # Closing the connection is the only way to avoid reading the + # whole input body. + if not self._read_finished: + self._disconnect_on_finish = True + # No more data is coming, so instruct TCP to send any remaining + # data immediately instead of waiting for a full packet or ack. + self.stream.set_nodelay(True) + if self._pending_write is None: + self._finish_request(None) + else: + self._pending_write.add_done_callback(self._finish_request) + + def _on_write_complete(self, future): + exc = future.exception() + if exc is not None and not isinstance(exc, iostream.StreamClosedError): + future.result() + if self._write_callback is not None: + callback = self._write_callback + self._write_callback = None + self.stream.io_loop.add_callback(callback) + if self._write_future is not None: + future = self._write_future + self._write_future = None + future.set_result(None) + + def _can_keep_alive(self, start_line, headers): + if self.params.no_keep_alive: + return False + connection_header = headers.get("Connection") + if connection_header is not None: + connection_header = connection_header.lower() + if start_line.version == "HTTP/1.1": + return connection_header != "close" + elif ("Content-Length" in headers + or headers.get("Transfer-Encoding", "").lower() == "chunked" + or start_line.method in ("HEAD", "GET")): + return connection_header == "keep-alive" + return False + + def _finish_request(self, future): + self._clear_callbacks() + if not self.is_client and self._disconnect_on_finish: + self.close() + return + # Turn Nagle's algorithm back on, leaving the stream in its + # default state for the next request. + self.stream.set_nodelay(False) + if not self._finish_future.done(): + self._finish_future.set_result(None) + + def _parse_headers(self, data): + # The lstrip removes newlines that some implementations sometimes + # insert between messages of a reused connection. Per RFC 7230, + # we SHOULD ignore at least one empty line before the request. + # http://tools.ietf.org/html/rfc7230#section-3.5 + data = native_str(data.decode('latin1')).lstrip("\r\n") + # RFC 7230 section allows for both CRLF and bare LF. + eol = data.find("\n") + start_line = data[:eol].rstrip("\r") + try: + headers = httputil.HTTPHeaders.parse(data[eol:]) + except ValueError: + # probably form split() if there was no ':' in the line + raise httputil.HTTPInputError("Malformed HTTP headers: %r" % + data[eol:100]) + return start_line, headers + + def _read_body(self, code, headers, delegate): + if "Content-Length" in headers: + if "Transfer-Encoding" in headers: + # Response cannot contain both Content-Length and + # Transfer-Encoding headers. + # http://tools.ietf.org/html/rfc7230#section-3.3.3 + raise httputil.HTTPInputError( + "Response with both Transfer-Encoding and Content-Length") + if "," in headers["Content-Length"]: + # Proxies sometimes cause Content-Length headers to get + # duplicated. If all the values are identical then we can + # use them but if they differ it's an error. + pieces = re.split(r',\s*', headers["Content-Length"]) + if any(i != pieces[0] for i in pieces): + raise httputil.HTTPInputError( + "Multiple unequal Content-Lengths: %r" % + headers["Content-Length"]) + headers["Content-Length"] = pieces[0] + content_length = int(headers["Content-Length"]) + + if content_length > self._max_body_size: + raise httputil.HTTPInputError("Content-Length too long") + else: + content_length = None + + if code == 204: + # This response code is not allowed to have a non-empty body, + # and has an implicit length of zero instead of read-until-close. + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 + if ("Transfer-Encoding" in headers or + content_length not in (None, 0)): + raise httputil.HTTPInputError( + "Response with code %d should not have body" % code) + content_length = 0 + + if content_length is not None: + return self._read_fixed_body(content_length, delegate) + if headers.get("Transfer-Encoding") == "chunked": + return self._read_chunked_body(delegate) + if self.is_client: + return self._read_body_until_close(delegate) + return None + + @gen.coroutine + def _read_fixed_body(self, content_length, delegate): + while content_length > 0: + body = yield self.stream.read_bytes( + min(self.params.chunk_size, content_length), partial=True) + content_length -= len(body) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(body) + if ret is not None: + yield ret + + @gen.coroutine + def _read_chunked_body(self, delegate): + # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1 + total_size = 0 + while True: + chunk_len = yield self.stream.read_until(b"\r\n", max_bytes=64) + chunk_len = int(chunk_len.strip(), 16) + if chunk_len == 0: + return + total_size += chunk_len + if total_size > self._max_body_size: + raise httputil.HTTPInputError("chunked body too large") + bytes_to_read = chunk_len + while bytes_to_read: + chunk = yield self.stream.read_bytes( + min(bytes_to_read, self.params.chunk_size), partial=True) + bytes_to_read -= len(chunk) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + ret = delegate.data_received(chunk) + if ret is not None: + yield ret + # chunk ends with \r\n + crlf = yield self.stream.read_bytes(2) + assert crlf == b"\r\n" + + @gen.coroutine + def _read_body_until_close(self, delegate): + body = yield self.stream.read_until_close() + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + delegate.data_received(body) + + +class _GzipMessageDelegate(httputil.HTTPMessageDelegate): + """Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``. + """ + def __init__(self, delegate, chunk_size): + self._delegate = delegate + self._chunk_size = chunk_size + self._decompressor = None + + def headers_received(self, start_line, headers): + if headers.get("Content-Encoding") == "gzip": + self._decompressor = GzipDecompressor() + # Downstream delegates will only see uncompressed data, + # so rename the content-encoding header. + # (but note that curl_httpclient doesn't do this). + headers.add("X-Consumed-Content-Encoding", + headers["Content-Encoding"]) + del headers["Content-Encoding"] + return self._delegate.headers_received(start_line, headers) + + @gen.coroutine + def data_received(self, chunk): + if self._decompressor: + compressed_data = chunk + while compressed_data: + decompressed = self._decompressor.decompress( + compressed_data, self._chunk_size) + if decompressed: + ret = self._delegate.data_received(decompressed) + if ret is not None: + yield ret + compressed_data = self._decompressor.unconsumed_tail + else: + ret = self._delegate.data_received(chunk) + if ret is not None: + yield ret + + def finish(self): + if self._decompressor is not None: + tail = self._decompressor.flush() + if tail: + # I believe the tail will always be empty (i.e. + # decompress will return all it can). The purpose + # of the flush call is to detect errors such + # as truncated input. But in case it ever returns + # anything, treat it as an extra chunk + self._delegate.data_received(tail) + return self._delegate.finish() + + def on_connection_close(self): + return self._delegate.on_connection_close() + + +class HTTP1ServerConnection(object): + """An HTTP/1.x server.""" + def __init__(self, stream, params=None, context=None): + """ + :arg stream: an `.IOStream` + :arg params: a `.HTTP1ConnectionParameters` or None + :arg context: an opaque application-defined object that is accessible + as ``connection.context`` + """ + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self._serving_future = None + + @gen.coroutine + def close(self): + """Closes the connection. + + Returns a `.Future` that resolves after the serving loop has exited. + """ + self.stream.close() + # Block until the serving loop is done, but ignore any exceptions + # (start_serving is already responsible for logging them). + try: + yield self._serving_future + except Exception: + pass + + def start_serving(self, delegate): + """Starts serving requests on this connection. + + :arg delegate: a `.HTTPServerConnectionDelegate` + """ + assert isinstance(delegate, httputil.HTTPServerConnectionDelegate) + self._serving_future = self._server_request_loop(delegate) + # Register the future on the IOLoop so its errors get logged. + self.stream.io_loop.add_future(self._serving_future, + lambda f: f.result()) + + @gen.coroutine + def _server_request_loop(self, delegate): + try: + while True: + conn = HTTP1Connection(self.stream, False, + self.params, self.context) + request_delegate = delegate.start_request(self, conn) + try: + ret = yield conn.read_response(request_delegate) + except (iostream.StreamClosedError, + iostream.UnsatisfiableReadError): + return + except _QuietException: + # This exception was already logged. + conn.close() + return + except Exception: + gen_log.error("Uncaught exception", exc_info=True) + conn.close() + return + if not ret: + return + yield gen.moment + finally: + delegate.on_close(self) diff --git a/server/www/packages/packages-common/tornado/httpclient.py b/server/www/packages/packages-common/tornado/httpclient.py new file mode 100644 index 0000000..9179227 --- /dev/null +++ b/server/www/packages/packages-common/tornado/httpclient.py @@ -0,0 +1,659 @@ +"""Blocking and non-blocking HTTP client interfaces. + +This module defines a common interface shared by two implementations, +``simple_httpclient`` and ``curl_httpclient``. Applications may either +instantiate their chosen implementation class directly or use the +`AsyncHTTPClient` class from this module, which selects an implementation +that can be overridden with the `AsyncHTTPClient.configure` method. + +The default implementation is ``simple_httpclient``, and this is expected +to be suitable for most users' needs. However, some applications may wish +to switch to ``curl_httpclient`` for reasons such as the following: + +* ``curl_httpclient`` has some features not found in ``simple_httpclient``, + including support for HTTP proxies and the ability to use a specified + network interface. + +* ``curl_httpclient`` is more likely to be compatible with sites that are + not-quite-compliant with the HTTP spec, or sites that use little-exercised + features of HTTP. + +* ``curl_httpclient`` is faster. + +* ``curl_httpclient`` was the default prior to Tornado 2.0. + +Note that if you are using ``curl_httpclient``, it is highly +recommended that you use a recent version of ``libcurl`` and +``pycurl``. Currently the minimum supported version of libcurl is +7.21.1, and the minimum version of pycurl is 7.18.2. It is highly +recommended that your ``libcurl`` installation is built with +asynchronous DNS resolver (threaded or c-ares), otherwise you may +encounter various problems with request timeouts (for more +information, see +http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS +and comments in curl_httpclient.py). + +To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup:: + + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import time +import weakref + +from tornado.concurrent import TracebackFuture +from tornado.escape import utf8, native_str +from tornado import httputil, stack_context +from tornado.ioloop import IOLoop +from tornado.util import Configurable + + +class HTTPClient(object): + """A blocking HTTP client. + + This interface is provided for convenience and testing; most applications + that are running an IOLoop will want to use `AsyncHTTPClient` instead. + Typical usage looks like this:: + + http_client = httpclient.HTTPClient() + try: + response = http_client.fetch("http://www.google.com/") + print response.body + except httpclient.HTTPError as e: + # HTTPError is raised for non-200 responses; the response + # can be found in e.response. + print("Error: " + str(e)) + except Exception as e: + # Other errors are possible, such as IOError. + print("Error: " + str(e)) + http_client.close() + """ + def __init__(self, async_client_class=None, **kwargs): + self._io_loop = IOLoop(make_current=False) + if async_client_class is None: + async_client_class = AsyncHTTPClient + self._async_client = async_client_class(self._io_loop, **kwargs) + self._closed = False + + def __del__(self): + self.close() + + def close(self): + """Closes the HTTPClient, freeing any resources used.""" + if not self._closed: + self._async_client.close() + self._io_loop.close() + self._closed = True + + def fetch(self, request, **kwargs): + """Executes a request, returning an `HTTPResponse`. + + The request may be either a string URL or an `HTTPRequest` object. + If it is a string, we construct an `HTTPRequest` using any additional + kwargs: ``HTTPRequest(request, **kwargs)`` + + If an error occurs during the fetch, we raise an `HTTPError` unless + the ``raise_error`` keyword argument is set to False. + """ + response = self._io_loop.run_sync(functools.partial( + self._async_client.fetch, request, **kwargs)) + return response + + +class AsyncHTTPClient(Configurable): + """An non-blocking HTTP client. + + Example usage:: + + def handle_request(response): + if response.error: + print "Error:", response.error + else: + print response.body + + http_client = AsyncHTTPClient() + http_client.fetch("http://www.google.com/", handle_request) + + The constructor for this class is magic in several respects: It + actually creates an instance of an implementation-specific + subclass, and instances are reused as a kind of pseudo-singleton + (one per `.IOLoop`). The keyword argument ``force_instance=True`` + can be used to suppress this singleton behavior. Unless + ``force_instance=True`` is used, no arguments other than + ``io_loop`` should be passed to the `AsyncHTTPClient` constructor. + The implementation subclass as well as arguments to its + constructor can be set with the static method `configure()` + + All `AsyncHTTPClient` implementations support a ``defaults`` + keyword argument, which can be used to set default values for + `HTTPRequest` attributes. For example:: + + AsyncHTTPClient.configure( + None, defaults=dict(user_agent="MyUserAgent")) + # or with force_instance: + client = AsyncHTTPClient(force_instance=True, + defaults=dict(user_agent="MyUserAgent")) + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + @classmethod + def configurable_base(cls): + return AsyncHTTPClient + + @classmethod + def configurable_default(cls): + from tornado.simple_httpclient import SimpleAsyncHTTPClient + return SimpleAsyncHTTPClient + + @classmethod + def _async_clients(cls): + attr_name = '_async_client_dict_' + cls.__name__ + if not hasattr(cls, attr_name): + setattr(cls, attr_name, weakref.WeakKeyDictionary()) + return getattr(cls, attr_name) + + def __new__(cls, io_loop=None, force_instance=False, **kwargs): + io_loop = io_loop or IOLoop.current() + if force_instance: + instance_cache = None + else: + instance_cache = cls._async_clients() + if instance_cache is not None and io_loop in instance_cache: + return instance_cache[io_loop] + instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop, + **kwargs) + # Make sure the instance knows which cache to remove itself from. + # It can't simply call _async_clients() because we may be in + # __new__(AsyncHTTPClient) but instance.__class__ may be + # SimpleAsyncHTTPClient. + instance._instance_cache = instance_cache + if instance_cache is not None: + instance_cache[instance.io_loop] = instance + return instance + + def initialize(self, io_loop, defaults=None): + self.io_loop = io_loop + self.defaults = dict(HTTPRequest._DEFAULTS) + if defaults is not None: + self.defaults.update(defaults) + self._closed = False + + def close(self): + """Destroys this HTTP client, freeing any file descriptors used. + + This method is **not needed in normal use** due to the way + that `AsyncHTTPClient` objects are transparently reused. + ``close()`` is generally only necessary when either the + `.IOLoop` is also being closed, or the ``force_instance=True`` + argument was used when creating the `AsyncHTTPClient`. + + No other methods may be called on the `AsyncHTTPClient` after + ``close()``. + + """ + if self._closed: + return + self._closed = True + if self._instance_cache is not None: + if self._instance_cache.get(self.io_loop) is not self: + raise RuntimeError("inconsistent AsyncHTTPClient cache") + del self._instance_cache[self.io_loop] + + def fetch(self, request, callback=None, raise_error=True, **kwargs): + """Executes a request, asynchronously returning an `HTTPResponse`. + + The request may be either a string URL or an `HTTPRequest` object. + If it is a string, we construct an `HTTPRequest` using any additional + kwargs: ``HTTPRequest(request, **kwargs)`` + + This method returns a `.Future` whose result is an + `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` + if the request returned a non-200 response code. Instead, if + ``raise_error`` is set to False, the response will always be + returned regardless of the response code. + + If a ``callback`` is given, it will be invoked with the `HTTPResponse`. + In the callback interface, `HTTPError` is not automatically raised. + Instead, you must check the response's ``error`` attribute or + call its `~HTTPResponse.rethrow` method. + """ + if self._closed: + raise RuntimeError("fetch() called on closed AsyncHTTPClient") + if not isinstance(request, HTTPRequest): + request = HTTPRequest(url=request, **kwargs) + # We may modify this (to add Host, Accept-Encoding, etc), + # so make sure we don't modify the caller's object. This is also + # where normal dicts get converted to HTTPHeaders objects. + request.headers = httputil.HTTPHeaders(request.headers) + request = _RequestProxy(request, self.defaults) + future = TracebackFuture() + if callback is not None: + callback = stack_context.wrap(callback) + + def handle_future(future): + exc = future.exception() + if isinstance(exc, HTTPError) and exc.response is not None: + response = exc.response + elif exc is not None: + response = HTTPResponse( + request, 599, error=exc, + request_time=time.time() - request.start_time) + else: + response = future.result() + self.io_loop.add_callback(callback, response) + future.add_done_callback(handle_future) + + def handle_response(response): + if raise_error and response.error: + future.set_exception(response.error) + else: + future.set_result(response) + self.fetch_impl(request, handle_response) + return future + + def fetch_impl(self, request, callback): + raise NotImplementedError() + + @classmethod + def configure(cls, impl, **kwargs): + """Configures the `AsyncHTTPClient` subclass to use. + + ``AsyncHTTPClient()`` actually creates an instance of a subclass. + This method may be called with either a class object or the + fully-qualified name of such a class (or ``None`` to use the default, + ``SimpleAsyncHTTPClient``) + + If additional keyword arguments are given, they will be passed + to the constructor of each subclass instance created. The + keyword argument ``max_clients`` determines the maximum number + of simultaneous `~AsyncHTTPClient.fetch()` operations that can + execute in parallel on each `.IOLoop`. Additional arguments + may be supported depending on the implementation class in use. + + Example:: + + AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") + """ + super(AsyncHTTPClient, cls).configure(impl, **kwargs) + + +class HTTPRequest(object): + """HTTP client request object.""" + + # Default values for HTTPRequest parameters. + # Merged with the values on the request object by AsyncHTTPClient + # implementations. + _DEFAULTS = dict( + connect_timeout=20.0, + request_timeout=20.0, + follow_redirects=True, + max_redirects=5, + decompress_response=True, + proxy_password='', + allow_nonstandard_methods=False, + validate_cert=True) + + def __init__(self, url, method="GET", headers=None, body=None, + auth_username=None, auth_password=None, auth_mode=None, + connect_timeout=None, request_timeout=None, + if_modified_since=None, follow_redirects=None, + max_redirects=None, user_agent=None, use_gzip=None, + network_interface=None, streaming_callback=None, + header_callback=None, prepare_curl_callback=None, + proxy_host=None, proxy_port=None, proxy_username=None, + proxy_password=None, allow_nonstandard_methods=None, + validate_cert=None, ca_certs=None, + allow_ipv6=None, + client_key=None, client_cert=None, body_producer=None, + expect_100_continue=False, decompress_response=None, + ssl_options=None): + r"""All parameters except ``url`` are optional. + + :arg string url: URL to fetch + :arg string method: HTTP method, e.g. "GET" or "POST" + :arg headers: Additional HTTP headers to pass on the request + :type headers: `~tornado.httputil.HTTPHeaders` or `dict` + :arg body: HTTP request body as a string (byte or unicode; if unicode + the utf-8 encoding will be used) + :arg body_producer: Callable used for lazy/asynchronous request bodies. + It is called with one argument, a ``write`` function, and should + return a `.Future`. It should call the write function with new + data as it becomes available. The write function returns a + `.Future` which can be used for flow control. + Only one of ``body`` and ``body_producer`` may + be specified. ``body_producer`` is not supported on + ``curl_httpclient``. When using ``body_producer`` it is recommended + to pass a ``Content-Length`` in the headers as otherwise chunked + encoding will be used, and many servers do not support chunked + encoding on requests. New in Tornado 4.0 + :arg string auth_username: Username for HTTP authentication + :arg string auth_password: Password for HTTP authentication + :arg string auth_mode: Authentication mode; default is "basic". + Allowed values are implementation-defined; ``curl_httpclient`` + supports "basic" and "digest"; ``simple_httpclient`` only supports + "basic" + :arg float connect_timeout: Timeout for initial connection in seconds + :arg float request_timeout: Timeout for entire request in seconds + :arg if_modified_since: Timestamp for ``If-Modified-Since`` header + :type if_modified_since: `datetime` or `float` + :arg bool follow_redirects: Should redirects be followed automatically + or return the 3xx response? + :arg int max_redirects: Limit for ``follow_redirects`` + :arg string user_agent: String to send as ``User-Agent`` header + :arg bool decompress_response: Request a compressed response from + the server and decompress it after downloading. Default is True. + New in Tornado 4.0. + :arg bool use_gzip: Deprecated alias for ``decompress_response`` + since Tornado 4.0. + :arg string network_interface: Network interface to use for request. + ``curl_httpclient`` only; see note below. + :arg callable streaming_callback: If set, ``streaming_callback`` will + be run with each chunk of data as it is received, and + ``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in + the final response. + :arg callable header_callback: If set, ``header_callback`` will + be run with each header line as it is received (including the + first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line + containing only ``\r\n``. All lines include the trailing newline + characters). ``HTTPResponse.headers`` will be empty in the final + response. This is most useful in conjunction with + ``streaming_callback``, because it's the only way to get access to + header data while the request is in progress. + :arg callable prepare_curl_callback: If set, will be called with + a ``pycurl.Curl`` object to allow the application to make additional + ``setopt`` calls. + :arg string proxy_host: HTTP proxy hostname. To use proxies, + ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username`` and + ``proxy_pass`` are optional. Proxies are currently only supported + with ``curl_httpclient``. + :arg int proxy_port: HTTP proxy port + :arg string proxy_username: HTTP proxy username + :arg string proxy_password: HTTP proxy password + :arg bool allow_nonstandard_methods: Allow unknown values for ``method`` + argument? + :arg bool validate_cert: For HTTPS requests, validate the server's + certificate? + :arg string ca_certs: filename of CA certificates in PEM format, + or None to use defaults. See note below when used with + ``curl_httpclient``. + :arg string client_key: Filename for client SSL key, if any. See + note below when used with ``curl_httpclient``. + :arg string client_cert: Filename for client SSL certificate, if any. + See note below when used with ``curl_httpclient``. + :arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in + ``simple_httpclient`` (unsupported by ``curl_httpclient``). + Overrides ``validate_cert``, ``ca_certs``, ``client_key``, + and ``client_cert``. + :arg bool allow_ipv6: Use IPv6 when available? Default is true. + :arg bool expect_100_continue: If true, send the + ``Expect: 100-continue`` header and wait for a continue response + before sending the request body. Only supported with + simple_httpclient. + + .. note:: + + When using ``curl_httpclient`` certain options may be + inherited by subsequent fetches because ``pycurl`` does + not allow them to be cleanly reset. This applies to the + ``ca_certs``, ``client_key``, ``client_cert``, and + ``network_interface`` arguments. If you use these + options, you should pass them on every request (you don't + have to always use the same values, but it's not possible + to mix requests that specify these options with ones that + use the defaults). + + .. versionadded:: 3.1 + The ``auth_mode`` argument. + + .. versionadded:: 4.0 + The ``body_producer`` and ``expect_100_continue`` arguments. + + .. versionadded:: 4.2 + The ``ssl_options`` argument. + """ + # Note that some of these attributes go through property setters + # defined below. + self.headers = headers + if if_modified_since: + self.headers["If-Modified-Since"] = httputil.format_timestamp( + if_modified_since) + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_username = proxy_username + self.proxy_password = proxy_password + self.url = url + self.method = method + self.body = body + self.body_producer = body_producer + self.auth_username = auth_username + self.auth_password = auth_password + self.auth_mode = auth_mode + self.connect_timeout = connect_timeout + self.request_timeout = request_timeout + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self.user_agent = user_agent + if decompress_response is not None: + self.decompress_response = decompress_response + else: + self.decompress_response = use_gzip + self.network_interface = network_interface + self.streaming_callback = streaming_callback + self.header_callback = header_callback + self.prepare_curl_callback = prepare_curl_callback + self.allow_nonstandard_methods = allow_nonstandard_methods + self.validate_cert = validate_cert + self.ca_certs = ca_certs + self.allow_ipv6 = allow_ipv6 + self.client_key = client_key + self.client_cert = client_cert + self.ssl_options = ssl_options + self.expect_100_continue = expect_100_continue + self.start_time = time.time() + + @property + def headers(self): + return self._headers + + @headers.setter + def headers(self, value): + if value is None: + self._headers = httputil.HTTPHeaders() + else: + self._headers = value + + @property + def body(self): + return self._body + + @body.setter + def body(self, value): + self._body = utf8(value) + + @property + def body_producer(self): + return self._body_producer + + @body_producer.setter + def body_producer(self, value): + self._body_producer = stack_context.wrap(value) + + @property + def streaming_callback(self): + return self._streaming_callback + + @streaming_callback.setter + def streaming_callback(self, value): + self._streaming_callback = stack_context.wrap(value) + + @property + def header_callback(self): + return self._header_callback + + @header_callback.setter + def header_callback(self, value): + self._header_callback = stack_context.wrap(value) + + @property + def prepare_curl_callback(self): + return self._prepare_curl_callback + + @prepare_curl_callback.setter + def prepare_curl_callback(self, value): + self._prepare_curl_callback = stack_context.wrap(value) + + +class HTTPResponse(object): + """HTTP Response object. + + Attributes: + + * request: HTTPRequest object + + * code: numeric HTTP status code, e.g. 200 or 404 + + * reason: human-readable reason phrase describing the status code + + * headers: `tornado.httputil.HTTPHeaders` object + + * effective_url: final location of the resource after following any + redirects + + * buffer: ``cStringIO`` object for response body + + * body: response body as string (created on demand from ``self.buffer``) + + * error: Exception object, if any + + * request_time: seconds from request start to finish + + * time_info: dictionary of diagnostic timing information from the request. + Available data are subject to change, but currently uses timings + available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html, + plus ``queue``, which is the delay (if any) introduced by waiting for + a slot under `AsyncHTTPClient`'s ``max_clients`` setting. + """ + def __init__(self, request, code, headers=None, buffer=None, + effective_url=None, error=None, request_time=None, + time_info=None, reason=None): + if isinstance(request, _RequestProxy): + self.request = request.request + else: + self.request = request + self.code = code + self.reason = reason or httputil.responses.get(code, "Unknown") + if headers is not None: + self.headers = headers + else: + self.headers = httputil.HTTPHeaders() + self.buffer = buffer + self._body = None + if effective_url is None: + self.effective_url = request.url + else: + self.effective_url = effective_url + if error is None: + if self.code < 200 or self.code >= 300: + self.error = HTTPError(self.code, message=self.reason, + response=self) + else: + self.error = None + else: + self.error = error + self.request_time = request_time + self.time_info = time_info or {} + + def _get_body(self): + if self.buffer is None: + return None + elif self._body is None: + self._body = self.buffer.getvalue() + + return self._body + + body = property(_get_body) + + def rethrow(self): + """If there was an error on the request, raise an `HTTPError`.""" + if self.error: + raise self.error + + def __repr__(self): + args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items())) + return "%s(%s)" % (self.__class__.__name__, args) + + +class HTTPError(Exception): + """Exception thrown for an unsuccessful HTTP request. + + Attributes: + + * ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is + used when no HTTP response was received, e.g. for a timeout. + + * ``response`` - `HTTPResponse` object, if any. + + Note that if ``follow_redirects`` is False, redirects become HTTPErrors, + and you can look at ``error.response.headers['Location']`` to see the + destination of the redirect. + """ + def __init__(self, code, message=None, response=None): + self.code = code + self.message = message or httputil.responses.get(code, "Unknown") + self.response = response + super(HTTPError, self).__init__(code, message, response) + + def __str__(self): + return "HTTP %d: %s" % (self.code, self.message) + + +class _RequestProxy(object): + """Combines an object with a dictionary of defaults. + + Used internally by AsyncHTTPClient implementations. + """ + def __init__(self, request, defaults): + self.request = request + self.defaults = defaults + + def __getattr__(self, name): + request_attr = getattr(self.request, name) + if request_attr is not None: + return request_attr + elif self.defaults is not None: + return self.defaults.get(name, None) + else: + return None + + +def main(): + from tornado.options import define, options, parse_command_line + define("print_headers", type=bool, default=False) + define("print_body", type=bool, default=True) + define("follow_redirects", type=bool, default=True) + define("validate_cert", type=bool, default=True) + args = parse_command_line() + client = HTTPClient() + for arg in args: + try: + response = client.fetch(arg, + follow_redirects=options.follow_redirects, + validate_cert=options.validate_cert, + ) + except HTTPError as e: + if e.response is not None: + response = e.response + else: + raise + if options.print_headers: + print(response.headers) + if options.print_body: + print(native_str(response.body)) + client.close() + +if __name__ == "__main__": + main() diff --git a/server/www/packages/packages-common/tornado/httpserver.py b/server/www/packages/packages-common/tornado/httpserver.py new file mode 100644 index 0000000..ff235fe --- /dev/null +++ b/server/www/packages/packages-common/tornado/httpserver.py @@ -0,0 +1,304 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking, single-threaded HTTP server. + +Typical applications have little direct interaction with the `HTTPServer` +class except to start a server at the beginning of the process +(and even that is often done indirectly via `tornado.web.Application.listen`). + +.. versionchanged:: 4.0 + + The ``HTTPRequest`` class that used to live in this module has been moved + to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import socket + +from tornado.escape import native_str +from tornado.http1connection import HTTP1ServerConnection, HTTP1ConnectionParameters +from tornado import gen +from tornado import httputil +from tornado import iostream +from tornado import netutil +from tornado.tcpserver import TCPServer +from tornado.util import Configurable + + +class HTTPServer(TCPServer, Configurable, + httputil.HTTPServerConnectionDelegate): + r"""A non-blocking, single-threaded HTTP server. + + A server is defined by a subclass of `.HTTPServerConnectionDelegate`, + or, for backwards compatibility, a callback that takes an + `.HTTPServerRequest` as an argument. The delegate is usually a + `tornado.web.Application`. + + `HTTPServer` supports keep-alive connections by default + (automatically for HTTP/1.1, or for HTTP/1.0 when the client + requests ``Connection: keep-alive``). + + If ``xheaders`` is ``True``, we support the + ``X-Real-Ip``/``X-Forwarded-For`` and + ``X-Scheme``/``X-Forwarded-Proto`` headers, which override the + remote IP and URI scheme/protocol for all requests. These headers + are useful when running Tornado behind a reverse proxy or load + balancer. The ``protocol`` argument can also be set to ``https`` + if Tornado is run behind an SSL-decoding proxy that does not set one of + the supported ``xheaders``. + + To make this server serve SSL traffic, send the ``ssl_options`` keyword + argument with an `ssl.SSLContext` object. For compatibility with older + versions of Python ``ssl_options`` may also be a dictionary of keyword + arguments for the `ssl.wrap_socket` method.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), + os.path.join(data_dir, "mydomain.key")) + HTTPServer(applicaton, ssl_options=ssl_ctx) + + `HTTPServer` initialization follows one of three patterns (the + initialization methods are defined on `tornado.tcpserver.TCPServer`): + + 1. `~tornado.tcpserver.TCPServer.listen`: simple single-process:: + + server = HTTPServer(app) + server.listen(8888) + IOLoop.current().start() + + In many cases, `tornado.web.Application.listen` can be used to avoid + the need to explicitly create the `HTTPServer`. + + 2. `~tornado.tcpserver.TCPServer.bind`/`~tornado.tcpserver.TCPServer.start`: + simple multi-process:: + + server = HTTPServer(app) + server.bind(8888) + server.start(0) # Forks multiple sub-processes + IOLoop.current().start() + + When using this interface, an `.IOLoop` must *not* be passed + to the `HTTPServer` constructor. `~.TCPServer.start` will always start + the server on the default singleton `.IOLoop`. + + 3. `~tornado.tcpserver.TCPServer.add_sockets`: advanced multi-process:: + + sockets = tornado.netutil.bind_sockets(8888) + tornado.process.fork_processes(0) + server = HTTPServer(app) + server.add_sockets(sockets) + IOLoop.current().start() + + The `~.TCPServer.add_sockets` interface is more complicated, + but it can be used with `tornado.process.fork_processes` to + give you more flexibility in when the fork happens. + `~.TCPServer.add_sockets` can also be used in single-process + servers if you want to create your listening sockets in some + way other than `tornado.netutil.bind_sockets`. + + .. versionchanged:: 4.0 + Added ``decompress_request``, ``chunk_size``, ``max_header_size``, + ``idle_connection_timeout``, ``body_timeout``, ``max_body_size`` + arguments. Added support for `.HTTPServerConnectionDelegate` + instances as ``request_callback``. + + .. versionchanged:: 4.1 + `.HTTPServerConnectionDelegate.start_request` is now called with + two arguments ``(server_conn, request_conn)`` (in accordance with the + documentation) instead of one ``(request_conn)``. + + .. versionchanged:: 4.2 + `HTTPServer` is now a subclass of `tornado.util.Configurable`. + """ + def __init__(self, *args, **kwargs): + # Ignore args to __init__; real initialization belongs in + # initialize since we're Configurable. (there's something + # weird in initialization order between this class, + # Configurable, and TCPServer so we can't leave __init__ out + # completely) + pass + + def initialize(self, request_callback, no_keep_alive=False, io_loop=None, + xheaders=False, ssl_options=None, protocol=None, + decompress_request=False, + chunk_size=None, max_header_size=None, + idle_connection_timeout=None, body_timeout=None, + max_body_size=None, max_buffer_size=None): + self.request_callback = request_callback + self.no_keep_alive = no_keep_alive + self.xheaders = xheaders + self.protocol = protocol + self.conn_params = HTTP1ConnectionParameters( + decompress=decompress_request, + chunk_size=chunk_size, + max_header_size=max_header_size, + header_timeout=idle_connection_timeout or 3600, + max_body_size=max_body_size, + body_timeout=body_timeout) + TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options, + max_buffer_size=max_buffer_size, + read_chunk_size=chunk_size) + self._connections = set() + + @classmethod + def configurable_base(cls): + return HTTPServer + + @classmethod + def configurable_default(cls): + return HTTPServer + + @gen.coroutine + def close_all_connections(self): + while self._connections: + # Peek at an arbitrary element of the set + conn = next(iter(self._connections)) + yield conn.close() + + def handle_stream(self, stream, address): + context = _HTTPRequestContext(stream, address, + self.protocol) + conn = HTTP1ServerConnection( + stream, self.conn_params, context) + self._connections.add(conn) + conn.start_serving(self) + + def start_request(self, server_conn, request_conn): + return _ServerRequestAdapter(self, server_conn, request_conn) + + def on_close(self, server_conn): + self._connections.remove(server_conn) + + +class _HTTPRequestContext(object): + def __init__(self, stream, address, protocol): + self.address = address + # Save the socket's address family now so we know how to + # interpret self.address even after the stream is closed + # and its socket attribute replaced with None. + if stream.socket is not None: + self.address_family = stream.socket.family + else: + self.address_family = None + # In HTTPServerRequest we want an IP, not a full socket address. + if (self.address_family in (socket.AF_INET, socket.AF_INET6) and + address is not None): + self.remote_ip = address[0] + else: + # Unix (or other) socket; fake the remote address. + self.remote_ip = '0.0.0.0' + if protocol: + self.protocol = protocol + elif isinstance(stream, iostream.SSLIOStream): + self.protocol = "https" + else: + self.protocol = "http" + self._orig_remote_ip = self.remote_ip + self._orig_protocol = self.protocol + + def __str__(self): + if self.address_family in (socket.AF_INET, socket.AF_INET6): + return self.remote_ip + elif isinstance(self.address, bytes): + # Python 3 with the -bb option warns about str(bytes), + # so convert it explicitly. + # Unix socket addresses are str on mac but bytes on linux. + return native_str(self.address) + else: + return str(self.address) + + def _apply_xheaders(self, headers): + """Rewrite the ``remote_ip`` and ``protocol`` fields.""" + # Squid uses X-Forwarded-For, others use X-Real-Ip + ip = headers.get("X-Forwarded-For", self.remote_ip) + ip = ip.split(',')[-1].strip() + ip = headers.get("X-Real-Ip", ip) + if netutil.is_valid_ip(ip): + self.remote_ip = ip + # AWS uses X-Forwarded-Proto + proto_header = headers.get( + "X-Scheme", headers.get("X-Forwarded-Proto", + self.protocol)) + if proto_header in ("http", "https"): + self.protocol = proto_header + + def _unapply_xheaders(self): + """Undo changes from `_apply_xheaders`. + + Xheaders are per-request so they should not leak to the next + request on the same connection. + """ + self.remote_ip = self._orig_remote_ip + self.protocol = self._orig_protocol + + +class _ServerRequestAdapter(httputil.HTTPMessageDelegate): + """Adapts the `HTTPMessageDelegate` interface to the interface expected + by our clients. + """ + def __init__(self, server, server_conn, request_conn): + self.server = server + self.connection = request_conn + self.request = None + if isinstance(server.request_callback, + httputil.HTTPServerConnectionDelegate): + self.delegate = server.request_callback.start_request( + server_conn, request_conn) + self._chunks = None + else: + self.delegate = None + self._chunks = [] + + def headers_received(self, start_line, headers): + if self.server.xheaders: + self.connection.context._apply_xheaders(headers) + if self.delegate is None: + self.request = httputil.HTTPServerRequest( + connection=self.connection, start_line=start_line, + headers=headers) + else: + return self.delegate.headers_received(start_line, headers) + + def data_received(self, chunk): + if self.delegate is None: + self._chunks.append(chunk) + else: + return self.delegate.data_received(chunk) + + def finish(self): + if self.delegate is None: + self.request.body = b''.join(self._chunks) + self.request._parse_body() + self.server.request_callback(self.request) + else: + self.delegate.finish() + self._cleanup() + + def on_connection_close(self): + if self.delegate is None: + self._chunks = None + else: + self.delegate.on_connection_close() + self._cleanup() + + def _cleanup(self): + if self.server.xheaders: + self.connection.context._unapply_xheaders() + + +HTTPRequest = httputil.HTTPServerRequest diff --git a/server/www/packages/packages-common/tornado/httputil.py b/server/www/packages/packages-common/tornado/httputil.py new file mode 100644 index 0000000..471df54 --- /dev/null +++ b/server/www/packages/packages-common/tornado/httputil.py @@ -0,0 +1,897 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""HTTP utility code shared by clients and servers. + +This module also defines the `HTTPServerRequest` class which is exposed +via `tornado.web.RequestHandler.request`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import calendar +import collections +import copy +import datetime +import email.utils +import numbers +import re +import time + +from tornado.escape import native_str, parse_qs_bytes, utf8 +from tornado.log import gen_log +from tornado.util import ObjectDict + +try: + import Cookie # py2 +except ImportError: + import http.cookies as Cookie # py3 + +try: + from httplib import responses # py2 +except ImportError: + from http.client import responses # py3 + +# responses is unused in this file, but we re-export it to other files. +# Reference it so pyflakes doesn't complain. +responses + +try: + from urllib import urlencode # py2 +except ImportError: + from urllib.parse import urlencode # py3 + +try: + from ssl import SSLError +except ImportError: + # ssl is unavailable on app engine. + class SSLError(Exception): + pass + + +# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line +# terminator and ignore any preceding CR. +_CRLF_RE = re.compile(r'\r?\n') + + +class _NormalizedHeaderCache(dict): + """Dynamic cached mapping of header names to Http-Header-Case. + + Implemented as a dict subclass so that cache hits are as fast as a + normal dict lookup, without the overhead of a python function + call. + + >>> normalized_headers = _NormalizedHeaderCache(10) + >>> normalized_headers["coNtent-TYPE"] + 'Content-Type' + """ + def __init__(self, size): + super(_NormalizedHeaderCache, self).__init__() + self.size = size + self.queue = collections.deque() + + def __missing__(self, key): + normalized = "-".join([w.capitalize() for w in key.split("-")]) + self[key] = normalized + self.queue.append(key) + if len(self.queue) > self.size: + # Limit the size of the cache. LRU would be better, but this + # simpler approach should be fine. In Python 2.7+ we could + # use OrderedDict (or in 3.2+, @functools.lru_cache). + old_key = self.queue.popleft() + del self[old_key] + return normalized + +_normalized_headers = _NormalizedHeaderCache(1000) + + +class HTTPHeaders(collections.MutableMapping): + """A dictionary that maintains ``Http-Header-Case`` for all keys. + + Supports multiple values per key via a pair of new methods, + `add()` and `get_list()`. The regular dictionary interface + returns a single value per key, with multiple values joined by a + comma. + + >>> h = HTTPHeaders({"content-type": "text/html"}) + >>> list(h.keys()) + ['Content-Type'] + >>> h["Content-Type"] + 'text/html' + + >>> h.add("Set-Cookie", "A=B") + >>> h.add("Set-Cookie", "C=D") + >>> h["set-cookie"] + 'A=B,C=D' + >>> h.get_list("set-cookie") + ['A=B', 'C=D'] + + >>> for (k,v) in sorted(h.get_all()): + ... print('%s: %s' % (k,v)) + ... + Content-Type: text/html + Set-Cookie: A=B + Set-Cookie: C=D + """ + def __init__(self, *args, **kwargs): + self._dict = {} + self._as_list = {} + self._last_key = None + if (len(args) == 1 and len(kwargs) == 0 and + isinstance(args[0], HTTPHeaders)): + # Copy constructor + for k, v in args[0].get_all(): + self.add(k, v) + else: + # Dict-style initialization + self.update(*args, **kwargs) + + # new public methods + + def add(self, name, value): + """Adds a new value for the given key.""" + norm_name = _normalized_headers[name] + self._last_key = norm_name + if norm_name in self: + self._dict[norm_name] = (native_str(self[norm_name]) + ',' + + native_str(value)) + self._as_list[norm_name].append(value) + else: + self[norm_name] = value + + def get_list(self, name): + """Returns all values for the given header as a list.""" + norm_name = _normalized_headers[name] + return self._as_list.get(norm_name, []) + + def get_all(self): + """Returns an iterable of all (name, value) pairs. + + If a header has multiple values, multiple pairs will be + returned with the same name. + """ + for name, values in self._as_list.items(): + for value in values: + yield (name, value) + + def parse_line(self, line): + """Updates the dictionary with a single header line. + + >>> h = HTTPHeaders() + >>> h.parse_line("Content-Type: text/html") + >>> h.get('content-type') + 'text/html' + """ + if line[0].isspace(): + # continuation of a multi-line header + new_part = ' ' + line.lstrip() + self._as_list[self._last_key][-1] += new_part + self._dict[self._last_key] += new_part + else: + name, value = line.split(":", 1) + self.add(name, value.strip()) + + @classmethod + def parse(cls, headers): + """Returns a dictionary from HTTP header text. + + >>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n") + >>> sorted(h.items()) + [('Content-Length', '42'), ('Content-Type', 'text/html')] + """ + h = cls() + for line in _CRLF_RE.split(headers): + if line: + h.parse_line(line) + return h + + # MutableMapping abstract method implementations. + + def __setitem__(self, name, value): + norm_name = _normalized_headers[name] + self._dict[norm_name] = value + self._as_list[norm_name] = [value] + + def __getitem__(self, name): + return self._dict[_normalized_headers[name]] + + def __delitem__(self, name): + norm_name = _normalized_headers[name] + del self._dict[norm_name] + del self._as_list[norm_name] + + def __len__(self): + return len(self._dict) + + def __iter__(self): + return iter(self._dict) + + def copy(self): + # defined in dict but not in MutableMapping. + return HTTPHeaders(self) + + # Use our overridden copy method for the copy.copy module. + # This makes shallow copies one level deeper, but preserves + # the appearance that HTTPHeaders is a single container. + __copy__ = copy + + +class HTTPServerRequest(object): + """A single HTTP request. + + All attributes are type `str` unless otherwise noted. + + .. attribute:: method + + HTTP request method, e.g. "GET" or "POST" + + .. attribute:: uri + + The requested uri. + + .. attribute:: path + + The path portion of `uri` + + .. attribute:: query + + The query portion of `uri` + + .. attribute:: version + + HTTP version specified in request, e.g. "HTTP/1.1" + + .. attribute:: headers + + `.HTTPHeaders` dictionary-like object for request headers. Acts like + a case-insensitive dictionary with additional methods for repeated + headers. + + .. attribute:: body + + Request body, if present, as a byte string. + + .. attribute:: remote_ip + + Client's IP address as a string. If ``HTTPServer.xheaders`` is set, + will pass along the real IP address provided by a load balancer + in the ``X-Real-Ip`` or ``X-Forwarded-For`` header. + + .. versionchanged:: 3.1 + The list format of ``X-Forwarded-For`` is now supported. + + .. attribute:: protocol + + The protocol used, either "http" or "https". If ``HTTPServer.xheaders`` + is set, will pass along the protocol used by a load balancer if + reported via an ``X-Scheme`` header. + + .. attribute:: host + + The requested hostname, usually taken from the ``Host`` header. + + .. attribute:: arguments + + GET/POST arguments are available in the arguments property, which + maps arguments names to lists of values (to support multiple values + for individual names). Names are of type `str`, while arguments + are byte strings. Note that this is different from + `.RequestHandler.get_argument`, which returns argument values as + unicode strings. + + .. attribute:: query_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the query string. + + .. versionadded:: 3.2 + + .. attribute:: body_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the request body. + + .. versionadded:: 3.2 + + .. attribute:: files + + File uploads are available in the files property, which maps file + names to lists of `.HTTPFile`. + + .. attribute:: connection + + An HTTP request is attached to a single HTTP connection, which can + be accessed through the "connection" attribute. Since connections + are typically kept open in HTTP/1.1, multiple requests can be handled + sequentially on a single connection. + + .. versionchanged:: 4.0 + Moved from ``tornado.httpserver.HTTPRequest``. + """ + def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None, + body=None, host=None, files=None, connection=None, + start_line=None): + if start_line is not None: + method, uri, version = start_line + self.method = method + self.uri = uri + self.version = version + self.headers = headers or HTTPHeaders() + self.body = body or b"" + + # set remote IP and protocol + context = getattr(connection, 'context', None) + self.remote_ip = getattr(context, 'remote_ip', None) + self.protocol = getattr(context, 'protocol', "http") + + self.host = host or self.headers.get("Host") or "127.0.0.1" + self.files = files or {} + self.connection = connection + self._start_time = time.time() + self._finish_time = None + + self.path, sep, self.query = uri.partition('?') + self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) + self.query_arguments = copy.deepcopy(self.arguments) + self.body_arguments = {} + + def supports_http_1_1(self): + """Returns True if this request supports HTTP/1.1 semantics. + + .. deprecated:: 4.0 + Applications are less likely to need this information with the + introduction of `.HTTPConnection`. If you still need it, access + the ``version`` attribute directly. + """ + return self.version == "HTTP/1.1" + + @property + def cookies(self): + """A dictionary of Cookie.Morsel objects.""" + if not hasattr(self, "_cookies"): + self._cookies = Cookie.SimpleCookie() + if "Cookie" in self.headers: + try: + self._cookies.load( + native_str(self.headers["Cookie"])) + except Exception: + self._cookies = {} + return self._cookies + + def write(self, chunk, callback=None): + """Writes the given chunk to the response stream. + + .. deprecated:: 4.0 + Use ``request.connection`` and the `.HTTPConnection` methods + to write the response. + """ + assert isinstance(chunk, bytes) + assert self.version.startswith("HTTP/1."), \ + "deprecated interface only supported in HTTP/1.x" + self.connection.write(chunk, callback=callback) + + def finish(self): + """Finishes this HTTP request on the open connection. + + .. deprecated:: 4.0 + Use ``request.connection`` and the `.HTTPConnection` methods + to write the response. + """ + self.connection.finish() + self._finish_time = time.time() + + def full_url(self): + """Reconstructs the full URL for this request.""" + return self.protocol + "://" + self.host + self.uri + + def request_time(self): + """Returns the amount of time it took for this request to execute.""" + if self._finish_time is None: + return time.time() - self._start_time + else: + return self._finish_time - self._start_time + + def get_ssl_certificate(self, binary_form=False): + """Returns the client's SSL certificate, if any. + + To use client certificates, the HTTPServer's + `ssl.SSLContext.verify_mode` field must be set, e.g.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain("foo.crt", "foo.key") + ssl_ctx.load_verify_locations("cacerts.pem") + ssl_ctx.verify_mode = ssl.CERT_REQUIRED + server = HTTPServer(app, ssl_options=ssl_ctx) + + By default, the return value is a dictionary (or None, if no + client certificate is present). If ``binary_form`` is true, a + DER-encoded form of the certificate is returned instead. See + SSLSocket.getpeercert() in the standard library for more + details. + http://docs.python.org/library/ssl.html#sslsocket-objects + """ + try: + return self.connection.stream.socket.getpeercert( + binary_form=binary_form) + except SSLError: + return None + + def _parse_body(self): + parse_body_arguments( + self.headers.get("Content-Type", ""), self.body, + self.body_arguments, self.files, + self.headers) + + for k, v in self.body_arguments.items(): + self.arguments.setdefault(k, []).extend(v) + + def __repr__(self): + attrs = ("protocol", "host", "method", "uri", "version", "remote_ip") + args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs]) + return "%s(%s, headers=%s)" % ( + self.__class__.__name__, args, dict(self.headers)) + + +class HTTPInputError(Exception): + """Exception class for malformed HTTP requests or responses + from remote sources. + + .. versionadded:: 4.0 + """ + pass + + +class HTTPOutputError(Exception): + """Exception class for errors in HTTP output. + + .. versionadded:: 4.0 + """ + pass + + +class HTTPServerConnectionDelegate(object): + """Implement this interface to handle requests from `.HTTPServer`. + + .. versionadded:: 4.0 + """ + def start_request(self, server_conn, request_conn): + """This method is called by the server when a new request has started. + + :arg server_conn: is an opaque object representing the long-lived + (e.g. tcp-level) connection. + :arg request_conn: is a `.HTTPConnection` object for a single + request/response exchange. + + This method should return a `.HTTPMessageDelegate`. + """ + raise NotImplementedError() + + def on_close(self, server_conn): + """This method is called when a connection has been closed. + + :arg server_conn: is a server connection that has previously been + passed to ``start_request``. + """ + pass + + +class HTTPMessageDelegate(object): + """Implement this interface to handle an HTTP request or response. + + .. versionadded:: 4.0 + """ + def headers_received(self, start_line, headers): + """Called when the HTTP headers have been received and parsed. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine` + depending on whether this is a client or server message. + :arg headers: a `.HTTPHeaders` instance. + + Some `.HTTPConnection` methods can only be called during + ``headers_received``. + + May return a `.Future`; if it does the body will not be read + until it is done. + """ + pass + + def data_received(self, chunk): + """Called when a chunk of data has been received. + + May return a `.Future` for flow control. + """ + pass + + def finish(self): + """Called after the last chunk of data has been received.""" + pass + + def on_connection_close(self): + """Called if the connection is closed without finishing the request. + + If ``headers_received`` is called, either ``finish`` or + ``on_connection_close`` will be called, but not both. + """ + pass + + +class HTTPConnection(object): + """Applications use this interface to write their responses. + + .. versionadded:: 4.0 + """ + def write_headers(self, start_line, headers, chunk=None, callback=None): + """Write an HTTP header block. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`. + :arg headers: a `.HTTPHeaders` instance. + :arg chunk: the first (optional) chunk of data. This is an optimization + so that small responses can be written in the same call as their + headers. + :arg callback: a callback to be run when the write is complete. + + The ``version`` field of ``start_line`` is ignored. + + Returns a `.Future` if no callback is given. + """ + raise NotImplementedError() + + def write(self, chunk, callback=None): + """Writes a chunk of body data. + + The callback will be run when the write is complete. If no callback + is given, returns a Future. + """ + raise NotImplementedError() + + def finish(self): + """Indicates that the last body data has been written. + """ + raise NotImplementedError() + + +def url_concat(url, args): + """Concatenate url and arguments regardless of whether + url has existing query parameters. + + ``args`` may be either a dictionary or a list of key-value pairs + (the latter allows for multiple values with the same key. + + >>> url_concat("http://example.com/foo", dict(c="d")) + 'http://example.com/foo?c=d' + >>> url_concat("http://example.com/foo?a=b", dict(c="d")) + 'http://example.com/foo?a=b&c=d' + >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")]) + 'http://example.com/foo?a=b&c=d&c=d2' + """ + if not args: + return url + if url[-1] not in ('?', '&'): + url += '&' if ('?' in url) else '?' + return url + urlencode(args) + + +class HTTPFile(ObjectDict): + """Represents a file uploaded via a form. + + For backwards compatibility, its instance attributes are also + accessible as dictionary keys. + + * ``filename`` + * ``body`` + * ``content_type`` + """ + pass + + +def _parse_request_range(range_header): + """Parses a Range header. + + Returns either ``None`` or tuple ``(start, end)``. + Note that while the HTTP headers use inclusive byte positions, + this method returns indexes suitable for use in slices. + + >>> start, end = _parse_request_range("bytes=1-2") + >>> start, end + (1, 3) + >>> [0, 1, 2, 3, 4][start:end] + [1, 2] + >>> _parse_request_range("bytes=6-") + (6, None) + >>> _parse_request_range("bytes=-6") + (-6, None) + >>> _parse_request_range("bytes=-0") + (None, 0) + >>> _parse_request_range("bytes=") + (None, None) + >>> _parse_request_range("foo=42") + >>> _parse_request_range("bytes=1-2,6-10") + + Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed). + + See [0] for the details of the range header. + + [0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges + """ + unit, _, value = range_header.partition("=") + unit, value = unit.strip(), value.strip() + if unit != "bytes": + return None + start_b, _, end_b = value.partition("-") + try: + start = _int_or_none(start_b) + end = _int_or_none(end_b) + except ValueError: + return None + if end is not None: + if start is None: + if end != 0: + start = -end + end = None + else: + end += 1 + return (start, end) + + +def _get_content_range(start, end, total): + """Returns a suitable Content-Range header: + + >>> print(_get_content_range(None, 1, 4)) + bytes 0-0/4 + >>> print(_get_content_range(1, 3, 4)) + bytes 1-2/4 + >>> print(_get_content_range(None, None, 4)) + bytes 0-3/4 + """ + start = start or 0 + end = (end or total) - 1 + return "bytes %s-%s/%s" % (start, end, total) + + +def _int_or_none(val): + val = val.strip() + if val == "": + return None + return int(val) + + +def parse_body_arguments(content_type, body, arguments, files, headers=None): + """Parses a form request body. + + Supports ``application/x-www-form-urlencoded`` and + ``multipart/form-data``. The ``content_type`` parameter should be + a string and ``body`` should be a byte string. The ``arguments`` + and ``files`` parameters are dictionaries that will be updated + with the parsed contents. + """ + if headers and 'Content-Encoding' in headers: + gen_log.warning("Unsupported Content-Encoding: %s", + headers['Content-Encoding']) + return + if content_type.startswith("application/x-www-form-urlencoded"): + try: + uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) + except Exception as e: + gen_log.warning('Invalid x-www-form-urlencoded body: %s', e) + uri_arguments = {} + for name, values in uri_arguments.items(): + if values: + arguments.setdefault(name, []).extend(values) + elif content_type.startswith("multipart/form-data"): + try: + fields = content_type.split(";") + for field in fields: + k, sep, v = field.strip().partition("=") + if k == "boundary" and v: + parse_multipart_form_data(utf8(v), body, arguments, files) + break + else: + raise ValueError("multipart boundary not found") + except Exception as e: + gen_log.warning("Invalid multipart/form-data: %s", e) + + +def parse_multipart_form_data(boundary, data, arguments, files): + """Parses a ``multipart/form-data`` body. + + The ``boundary`` and ``data`` parameters are both byte strings. + The dictionaries given in the arguments and files parameters + will be updated with the contents of the body. + """ + # The standard allows for the boundary to be quoted in the header, + # although it's rare (it happens at least for google app engine + # xmpp). I think we're also supposed to handle backslash-escapes + # here but I'll save that until we see a client that uses them + # in the wild. + if boundary.startswith(b'"') and boundary.endswith(b'"'): + boundary = boundary[1:-1] + final_boundary_index = data.rfind(b"--" + boundary + b"--") + if final_boundary_index == -1: + gen_log.warning("Invalid multipart/form-data: no final boundary") + return + parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n") + for part in parts: + if not part: + continue + eoh = part.find(b"\r\n\r\n") + if eoh == -1: + gen_log.warning("multipart/form-data missing headers") + continue + headers = HTTPHeaders.parse(part[:eoh].decode("utf-8")) + disp_header = headers.get("Content-Disposition", "") + disposition, disp_params = _parse_header(disp_header) + if disposition != "form-data" or not part.endswith(b"\r\n"): + gen_log.warning("Invalid multipart/form-data") + continue + value = part[eoh + 4:-2] + if not disp_params.get("name"): + gen_log.warning("multipart/form-data value missing name") + continue + name = disp_params["name"] + if disp_params.get("filename"): + ctype = headers.get("Content-Type", "application/unknown") + files.setdefault(name, []).append(HTTPFile( + filename=disp_params["filename"], body=value, + content_type=ctype)) + else: + arguments.setdefault(name, []).append(value) + + +def format_timestamp(ts): + """Formats a timestamp in the format used by HTTP. + + The argument may be a numeric timestamp as returned by `time.time`, + a time tuple as returned by `time.gmtime`, or a `datetime.datetime` + object. + + >>> format_timestamp(1359312200) + 'Sun, 27 Jan 2013 18:43:20 GMT' + """ + if isinstance(ts, numbers.Real): + pass + elif isinstance(ts, (tuple, time.struct_time)): + ts = calendar.timegm(ts) + elif isinstance(ts, datetime.datetime): + ts = calendar.timegm(ts.utctimetuple()) + else: + raise TypeError("unknown timestamp type: %r" % ts) + return email.utils.formatdate(ts, usegmt=True) + + +RequestStartLine = collections.namedtuple( + 'RequestStartLine', ['method', 'path', 'version']) + + +def parse_request_start_line(line): + """Returns a (method, path, version) tuple for an HTTP 1.x request line. + + The response is a `collections.namedtuple`. + + >>> parse_request_start_line("GET /foo HTTP/1.1") + RequestStartLine(method='GET', path='/foo', version='HTTP/1.1') + """ + try: + method, path, version = line.split(" ") + except ValueError: + raise HTTPInputError("Malformed HTTP request line") + if not re.match(r"^HTTP/1\.[0-9]$", version): + raise HTTPInputError( + "Malformed HTTP version in HTTP Request-Line: %r" % version) + return RequestStartLine(method, path, version) + + +ResponseStartLine = collections.namedtuple( + 'ResponseStartLine', ['version', 'code', 'reason']) + + +def parse_response_start_line(line): + """Returns a (version, code, reason) tuple for an HTTP 1.x response line. + + The response is a `collections.namedtuple`. + + >>> parse_response_start_line("HTTP/1.1 200 OK") + ResponseStartLine(version='HTTP/1.1', code=200, reason='OK') + """ + line = native_str(line) + match = re.match("(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)", line) + if not match: + raise HTTPInputError("Error parsing response start line") + return ResponseStartLine(match.group(1), int(match.group(2)), + match.group(3)) + +# _parseparam and _parse_header are copied and modified from python2.7's cgi.py +# The original 2.7 version of this code did not correctly support some +# combinations of semicolons and double quotes. +# It has also been modified to support valueless parameters as seen in +# websocket extension negotiations. + + +def _parseparam(s): + while s[:1] == ';': + s = s[1:] + end = s.find(';') + while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: + end = s.find(';', end + 1) + if end < 0: + end = len(s) + f = s[:end] + yield f.strip() + s = s[end:] + + +def _parse_header(line): + """Parse a Content-type like header. + + Return the main content-type and a dictionary of options. + + """ + parts = _parseparam(';' + line) + key = next(parts) + pdict = {} + for p in parts: + i = p.find('=') + if i >= 0: + name = p[:i].strip().lower() + value = p[i + 1:].strip() + if len(value) >= 2 and value[0] == value[-1] == '"': + value = value[1:-1] + value = value.replace('\\\\', '\\').replace('\\"', '"') + pdict[name] = value + else: + pdict[p] = None + return key, pdict + + +def _encode_header(key, pdict): + """Inverse of _parse_header. + + >>> _encode_header('permessage-deflate', + ... {'client_max_window_bits': 15, 'client_no_context_takeover': None}) + 'permessage-deflate; client_max_window_bits=15; client_no_context_takeover' + """ + if not pdict: + return key + out = [key] + # Sort the parameters just to make it easy to test. + for k, v in sorted(pdict.items()): + if v is None: + out.append(k) + else: + # TODO: quote if necessary. + out.append('%s=%s' % (k, v)) + return '; '.join(out) + + +def doctests(): + import doctest + return doctest.DocTestSuite() + + +def split_host_and_port(netloc): + """Returns ``(host, port)`` tuple from ``netloc``. + + Returned ``port`` will be ``None`` if not present. + + .. versionadded:: 4.1 + """ + match = re.match(r'^(.+):(\d+)$', netloc) + if match: + host = match.group(1) + port = int(match.group(2)) + else: + host = netloc + port = None + return (host, port) diff --git a/server/www/packages/packages-common/tornado/ioloop.py b/server/www/packages/packages-common/tornado/ioloop.py new file mode 100644 index 0000000..c23cb33 --- /dev/null +++ b/server/www/packages/packages-common/tornado/ioloop.py @@ -0,0 +1,1053 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""An I/O event loop for non-blocking sockets. + +Typical applications will use a single `IOLoop` object, in the +`IOLoop.instance` singleton. The `IOLoop.start` method should usually +be called at the end of the ``main()`` function. Atypical applications may +use more than one `IOLoop`, such as one `IOLoop` per thread, or per `unittest` +case. + +In addition to I/O events, the `IOLoop` can also schedule time-based events. +`IOLoop.add_timeout` is a non-blocking alternative to `time.sleep`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import errno +import functools +import heapq +import itertools +import logging +import numbers +import os +import select +import sys +import threading +import time +import traceback +import math + +from tornado.concurrent import TracebackFuture, is_future +from tornado.log import app_log, gen_log +from tornado import stack_context +from tornado.util import Configurable, errno_from_exception, timedelta_to_seconds + +try: + import signal +except ImportError: + signal = None + +try: + import thread # py2 +except ImportError: + import _thread as thread # py3 + +from tornado.platform.auto import set_close_exec, Waker + + +_POLL_TIMEOUT = 3600.0 + + +class TimeoutError(Exception): + pass + + +class IOLoop(Configurable): + """A level-triggered I/O loop. + + We use ``epoll`` (Linux) or ``kqueue`` (BSD and Mac OS X) if they + are available, or else we fall back on select(). If you are + implementing a system that needs to handle thousands of + simultaneous connections, you should use a system that supports + either ``epoll`` or ``kqueue``. + + Example usage for a simple TCP server: + + .. testcode:: + + import errno + import functools + import tornado.ioloop + import socket + + def connection_ready(sock, fd, events): + while True: + try: + connection, address = sock.accept() + except socket.error as e: + if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN): + raise + return + connection.setblocking(0) + handle_connection(connection, address) + + if __name__ == '__main__': + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.setblocking(0) + sock.bind(("", port)) + sock.listen(128) + + io_loop = tornado.ioloop.IOLoop.current() + callback = functools.partial(connection_ready, sock) + io_loop.add_handler(sock.fileno(), callback, io_loop.READ) + io_loop.start() + + .. testoutput:: + :hide: + + By default, a newly-constructed `IOLoop` becomes the thread's current + `IOLoop`, unless there already is a current `IOLoop`. This behavior + can be controlled with the ``make_current`` argument to the `IOLoop` + constructor: if ``make_current=True``, the new `IOLoop` will always + try to become current and it raises an error if there is already a + current instance. If ``make_current=False``, the new `IOLoop` will + not try to become current. + + .. versionchanged:: 4.2 + Added the ``make_current`` keyword argument to the `IOLoop` + constructor. + """ + # Constants from the epoll module + _EPOLLIN = 0x001 + _EPOLLPRI = 0x002 + _EPOLLOUT = 0x004 + _EPOLLERR = 0x008 + _EPOLLHUP = 0x010 + _EPOLLRDHUP = 0x2000 + _EPOLLONESHOT = (1 << 30) + _EPOLLET = (1 << 31) + + # Our events map exactly to the epoll events + NONE = 0 + READ = _EPOLLIN + WRITE = _EPOLLOUT + ERROR = _EPOLLERR | _EPOLLHUP + + # Global lock for creating global IOLoop instance + _instance_lock = threading.Lock() + + _current = threading.local() + + @staticmethod + def instance(): + """Returns a global `IOLoop` instance. + + Most applications have a single, global `IOLoop` running on the + main thread. Use this method to get this instance from + another thread. In most other cases, it is better to use `current()` + to get the current thread's `IOLoop`. + """ + if not hasattr(IOLoop, "_instance"): + with IOLoop._instance_lock: + if not hasattr(IOLoop, "_instance"): + # New instance after double check + IOLoop._instance = IOLoop() + return IOLoop._instance + + @staticmethod + def initialized(): + """Returns true if the singleton instance has been created.""" + return hasattr(IOLoop, "_instance") + + def install(self): + """Installs this `IOLoop` object as the singleton instance. + + This is normally not necessary as `instance()` will create + an `IOLoop` on demand, but you may want to call `install` to use + a custom subclass of `IOLoop`. + """ + assert not IOLoop.initialized() + IOLoop._instance = self + + @staticmethod + def clear_instance(): + """Clear the global `IOLoop` instance. + + .. versionadded:: 4.0 + """ + if hasattr(IOLoop, "_instance"): + del IOLoop._instance + + @staticmethod + def current(instance=True): + """Returns the current thread's `IOLoop`. + + If an `IOLoop` is currently running or has been marked as + current by `make_current`, returns that instance. If there is + no current `IOLoop`, returns `IOLoop.instance()` (i.e. the + main thread's `IOLoop`, creating one if necessary) if ``instance`` + is true. + + In general you should use `IOLoop.current` as the default when + constructing an asynchronous object, and use `IOLoop.instance` + when you mean to communicate to the main thread from a different + one. + + .. versionchanged:: 4.1 + Added ``instance`` argument to control the fallback to + `IOLoop.instance()`. + """ + current = getattr(IOLoop._current, "instance", None) + if current is None and instance: + return IOLoop.instance() + return current + + def make_current(self): + """Makes this the `IOLoop` for the current thread. + + An `IOLoop` automatically becomes current for its thread + when it is started, but it is sometimes useful to call + `make_current` explicitly before starting the `IOLoop`, + so that code run at startup time can find the right + instance. + + .. versionchanged:: 4.1 + An `IOLoop` created while there is no current `IOLoop` + will automatically become current. + """ + IOLoop._current.instance = self + + @staticmethod + def clear_current(): + IOLoop._current.instance = None + + @classmethod + def configurable_base(cls): + return IOLoop + + @classmethod + def configurable_default(cls): + if hasattr(select, "epoll"): + from tornado.platform.epoll import EPollIOLoop + return EPollIOLoop + if hasattr(select, "kqueue"): + # Python 2.6+ on BSD or Mac + from tornado.platform.kqueue import KQueueIOLoop + return KQueueIOLoop + from tornado.platform.select import SelectIOLoop + return SelectIOLoop + + def initialize(self, make_current=None): + if make_current is None: + if IOLoop.current(instance=False) is None: + self.make_current() + elif make_current: + if IOLoop.current(instance=False) is not None: + raise RuntimeError("current IOLoop already exists") + self.make_current() + + def close(self, all_fds=False): + """Closes the `IOLoop`, freeing any resources used. + + If ``all_fds`` is true, all file descriptors registered on the + IOLoop will be closed (not just the ones created by the + `IOLoop` itself). + + Many applications will only use a single `IOLoop` that runs for the + entire lifetime of the process. In that case closing the `IOLoop` + is not necessary since everything will be cleaned up when the + process exits. `IOLoop.close` is provided mainly for scenarios + such as unit tests, which create and destroy a large number of + ``IOLoops``. + + An `IOLoop` must be completely stopped before it can be closed. This + means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must + be allowed to return before attempting to call `IOLoop.close()`. + Therefore the call to `close` will usually appear just after + the call to `start` rather than near the call to `stop`. + + .. versionchanged:: 3.1 + If the `IOLoop` implementation supports non-integer objects + for "file descriptors", those objects will have their + ``close`` method when ``all_fds`` is true. + """ + raise NotImplementedError() + + def add_handler(self, fd, handler, events): + """Registers the given handler to receive the given events for ``fd``. + + The ``fd`` argument may either be an integer file descriptor or + a file-like object with a ``fileno()`` method (and optionally a + ``close()`` method, which may be called when the `IOLoop` is shut + down). + + The ``events`` argument is a bitwise or of the constants + ``IOLoop.READ``, ``IOLoop.WRITE``, and ``IOLoop.ERROR``. + + When an event occurs, ``handler(fd, events)`` will be run. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def update_handler(self, fd, events): + """Changes the events we listen for ``fd``. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def remove_handler(self, fd): + """Stop listening for events on ``fd``. + + .. versionchanged:: 4.0 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ + raise NotImplementedError() + + def set_blocking_signal_threshold(self, seconds, action): + """Sends a signal if the `IOLoop` is blocked for more than + ``s`` seconds. + + Pass ``seconds=None`` to disable. Requires Python 2.6 on a unixy + platform. + + The action parameter is a Python signal handler. Read the + documentation for the `signal` module for more information. + If ``action`` is None, the process will be killed if it is + blocked for too long. + """ + raise NotImplementedError() + + def set_blocking_log_threshold(self, seconds): + """Logs a stack trace if the `IOLoop` is blocked for more than + ``s`` seconds. + + Equivalent to ``set_blocking_signal_threshold(seconds, + self.log_stack)`` + """ + self.set_blocking_signal_threshold(seconds, self.log_stack) + + def log_stack(self, signal, frame): + """Signal handler to log the stack trace of the current thread. + + For use with `set_blocking_signal_threshold`. + """ + gen_log.warning('IOLoop blocked for %f seconds in\n%s', + self._blocking_signal_threshold, + ''.join(traceback.format_stack(frame))) + + def start(self): + """Starts the I/O loop. + + The loop will run until one of the callbacks calls `stop()`, which + will make the loop stop after the current event iteration completes. + """ + raise NotImplementedError() + + def _setup_logging(self): + """The IOLoop catches and logs exceptions, so it's + important that log output be visible. However, python's + default behavior for non-root loggers (prior to python + 3.2) is to print an unhelpful "no handlers could be + found" message rather than the actual log entry, so we + must explicitly configure logging if we've made it this + far without anything. + + This method should be called from start() in subclasses. + """ + if not any([logging.getLogger().handlers, + logging.getLogger('tornado').handlers, + logging.getLogger('tornado.application').handlers]): + logging.basicConfig() + + def stop(self): + """Stop the I/O loop. + + If the event loop is not currently running, the next call to `start()` + will return immediately. + + To use asynchronous methods from otherwise-synchronous code (such as + unit tests), you can start and stop the event loop like this:: + + ioloop = IOLoop() + async_method(ioloop=ioloop, callback=ioloop.stop) + ioloop.start() + + ``ioloop.start()`` will return after ``async_method`` has run + its callback, whether that callback was invoked before or + after ``ioloop.start``. + + Note that even after `stop` has been called, the `IOLoop` is not + completely stopped until `IOLoop.start` has also returned. + Some work that was scheduled before the call to `stop` may still + be run before the `IOLoop` shuts down. + """ + raise NotImplementedError() + + def run_sync(self, func, timeout=None): + """Starts the `IOLoop`, runs the given function, and stops the loop. + + The function must return either a yieldable object or + ``None``. If the function returns a yieldable object, the + `IOLoop` will run until the yieldable is resolved (and + `run_sync()` will return the yieldable's result). If it raises + an exception, the `IOLoop` will stop and the exception will be + re-raised to the caller. + + The keyword-only argument ``timeout`` may be used to set + a maximum duration for the function. If the timeout expires, + a `TimeoutError` is raised. + + This method is useful in conjunction with `tornado.gen.coroutine` + to allow asynchronous calls in a ``main()`` function:: + + @gen.coroutine + def main(): + # do stuff... + + if __name__ == '__main__': + IOLoop.current().run_sync(main) + + .. versionchanged:: 4.3 + Returning a non-``None``, non-yieldable value is now an error. + """ + future_cell = [None] + + def run(): + try: + result = func() + if result is not None: + from tornado.gen import convert_yielded + result = convert_yielded(result) + except Exception: + future_cell[0] = TracebackFuture() + future_cell[0].set_exc_info(sys.exc_info()) + else: + if is_future(result): + future_cell[0] = result + else: + future_cell[0] = TracebackFuture() + future_cell[0].set_result(result) + self.add_future(future_cell[0], lambda future: self.stop()) + self.add_callback(run) + if timeout is not None: + timeout_handle = self.add_timeout(self.time() + timeout, self.stop) + self.start() + if timeout is not None: + self.remove_timeout(timeout_handle) + if not future_cell[0].done(): + raise TimeoutError('Operation timed out after %s seconds' % timeout) + return future_cell[0].result() + + def time(self): + """Returns the current time according to the `IOLoop`'s clock. + + The return value is a floating-point number relative to an + unspecified time in the past. + + By default, the `IOLoop`'s time function is `time.time`. However, + it may be configured to use e.g. `time.monotonic` instead. + Calls to `add_timeout` that pass a number instead of a + `datetime.timedelta` should use this function to compute the + appropriate time, so they can work no matter what time function + is chosen. + """ + return time.time() + + def add_timeout(self, deadline, callback, *args, **kwargs): + """Runs the ``callback`` at the time ``deadline`` from the I/O loop. + + Returns an opaque handle that may be passed to + `remove_timeout` to cancel. + + ``deadline`` may be a number denoting a time (on the same + scale as `IOLoop.time`, normally `time.time`), or a + `datetime.timedelta` object for a deadline relative to the + current time. Since Tornado 4.0, `call_later` is a more + convenient alternative for the relative case since it does not + require a timedelta object. + + Note that it is not safe to call `add_timeout` from other threads. + Instead, you must use `add_callback` to transfer control to the + `IOLoop`'s thread, and then call `add_timeout` from there. + + Subclasses of IOLoop must implement either `add_timeout` or + `call_at`; the default implementations of each will call + the other. `call_at` is usually easier to implement, but + subclasses that wish to maintain compatibility with Tornado + versions prior to 4.0 must use `add_timeout` instead. + + .. versionchanged:: 4.0 + Now passes through ``*args`` and ``**kwargs`` to the callback. + """ + if isinstance(deadline, numbers.Real): + return self.call_at(deadline, callback, *args, **kwargs) + elif isinstance(deadline, datetime.timedelta): + return self.call_at(self.time() + timedelta_to_seconds(deadline), + callback, *args, **kwargs) + else: + raise TypeError("Unsupported deadline %r" % deadline) + + def call_later(self, delay, callback, *args, **kwargs): + """Runs the ``callback`` after ``delay`` seconds have passed. + + Returns an opaque handle that may be passed to `remove_timeout` + to cancel. Note that unlike the `asyncio` method of the same + name, the returned object does not have a ``cancel()`` method. + + See `add_timeout` for comments on thread-safety and subclassing. + + .. versionadded:: 4.0 + """ + return self.call_at(self.time() + delay, callback, *args, **kwargs) + + def call_at(self, when, callback, *args, **kwargs): + """Runs the ``callback`` at the absolute time designated by ``when``. + + ``when`` must be a number using the same reference point as + `IOLoop.time`. + + Returns an opaque handle that may be passed to `remove_timeout` + to cancel. Note that unlike the `asyncio` method of the same + name, the returned object does not have a ``cancel()`` method. + + See `add_timeout` for comments on thread-safety and subclassing. + + .. versionadded:: 4.0 + """ + return self.add_timeout(when, callback, *args, **kwargs) + + def remove_timeout(self, timeout): + """Cancels a pending timeout. + + The argument is a handle as returned by `add_timeout`. It is + safe to call `remove_timeout` even if the callback has already + been run. + """ + raise NotImplementedError() + + def add_callback(self, callback, *args, **kwargs): + """Calls the given callback on the next I/O loop iteration. + + It is safe to call this method from any thread at any time, + except from a signal handler. Note that this is the **only** + method in `IOLoop` that makes this thread-safety guarantee; all + other interaction with the `IOLoop` must be done from that + `IOLoop`'s thread. `add_callback()` may be used to transfer + control from other threads to the `IOLoop`'s thread. + + To add a callback from a signal handler, see + `add_callback_from_signal`. + """ + raise NotImplementedError() + + def add_callback_from_signal(self, callback, *args, **kwargs): + """Calls the given callback on the next I/O loop iteration. + + Safe for use from a Python signal handler; should not be used + otherwise. + + Callbacks added with this method will be run without any + `.stack_context`, to avoid picking up the context of the function + that was interrupted by the signal. + """ + raise NotImplementedError() + + def spawn_callback(self, callback, *args, **kwargs): + """Calls the given callback on the next IOLoop iteration. + + Unlike all other callback-related methods on IOLoop, + ``spawn_callback`` does not associate the callback with its caller's + ``stack_context``, so it is suitable for fire-and-forget callbacks + that should not interfere with the caller. + + .. versionadded:: 4.0 + """ + with stack_context.NullContext(): + self.add_callback(callback, *args, **kwargs) + + def add_future(self, future, callback): + """Schedules a callback on the ``IOLoop`` when the given + `.Future` is finished. + + The callback is invoked with one argument, the + `.Future`. + """ + assert is_future(future) + callback = stack_context.wrap(callback) + future.add_done_callback( + lambda future: self.add_callback(callback, future)) + + def _run_callback(self, callback): + """Runs a callback with error handling. + + For use in subclasses. + """ + try: + ret = callback() + if ret is not None: + from tornado import gen + # Functions that return Futures typically swallow all + # exceptions and store them in the Future. If a Future + # makes it out to the IOLoop, ensure its exception (if any) + # gets logged too. + try: + ret = gen.convert_yielded(ret) + except gen.BadYieldError: + # It's not unusual for add_callback to be used with + # methods returning a non-None and non-yieldable + # result, which should just be ignored. + pass + else: + self.add_future(ret, lambda f: f.result()) + except Exception: + self.handle_callback_exception(callback) + + def handle_callback_exception(self, callback): + """This method is called whenever a callback run by the `IOLoop` + throws an exception. + + By default simply logs the exception as an error. Subclasses + may override this method to customize reporting of exceptions. + + The exception itself is not passed explicitly, but is available + in `sys.exc_info`. + """ + app_log.error("Exception in callback %r", callback, exc_info=True) + + def split_fd(self, fd): + """Returns an (fd, obj) pair from an ``fd`` parameter. + + We accept both raw file descriptors and file-like objects as + input to `add_handler` and related methods. When a file-like + object is passed, we must retain the object itself so we can + close it correctly when the `IOLoop` shuts down, but the + poller interfaces favor file descriptors (they will accept + file-like objects and call ``fileno()`` for you, but they + always return the descriptor itself). + + This method is provided for use by `IOLoop` subclasses and should + not generally be used by application code. + + .. versionadded:: 4.0 + """ + try: + return fd.fileno(), fd + except AttributeError: + return fd, fd + + def close_fd(self, fd): + """Utility method to close an ``fd``. + + If ``fd`` is a file-like object, we close it directly; otherwise + we use `os.close`. + + This method is provided for use by `IOLoop` subclasses (in + implementations of ``IOLoop.close(all_fds=True)`` and should + not generally be used by application code. + + .. versionadded:: 4.0 + """ + try: + try: + fd.close() + except AttributeError: + os.close(fd) + except OSError: + pass + + +class PollIOLoop(IOLoop): + """Base class for IOLoops built around a select-like function. + + For concrete implementations, see `tornado.platform.epoll.EPollIOLoop` + (Linux), `tornado.platform.kqueue.KQueueIOLoop` (BSD and Mac), or + `tornado.platform.select.SelectIOLoop` (all platforms). + """ + def initialize(self, impl, time_func=None, **kwargs): + super(PollIOLoop, self).initialize(**kwargs) + self._impl = impl + if hasattr(self._impl, 'fileno'): + set_close_exec(self._impl.fileno()) + self.time_func = time_func or time.time + self._handlers = {} + self._events = {} + self._callbacks = [] + self._callback_lock = threading.Lock() + self._timeouts = [] + self._cancellations = 0 + self._running = False + self._stopped = False + self._closing = False + self._thread_ident = None + self._blocking_signal_threshold = None + self._timeout_counter = itertools.count() + + # Create a pipe that we send bogus data to when we want to wake + # the I/O loop when it is idle + self._waker = Waker() + self.add_handler(self._waker.fileno(), + lambda fd, events: self._waker.consume(), + self.READ) + + def close(self, all_fds=False): + with self._callback_lock: + self._closing = True + self.remove_handler(self._waker.fileno()) + if all_fds: + for fd, handler in self._handlers.values(): + self.close_fd(fd) + self._waker.close() + self._impl.close() + self._callbacks = None + self._timeouts = None + + def add_handler(self, fd, handler, events): + fd, obj = self.split_fd(fd) + self._handlers[fd] = (obj, stack_context.wrap(handler)) + self._impl.register(fd, events | self.ERROR) + + def update_handler(self, fd, events): + fd, obj = self.split_fd(fd) + self._impl.modify(fd, events | self.ERROR) + + def remove_handler(self, fd): + fd, obj = self.split_fd(fd) + self._handlers.pop(fd, None) + self._events.pop(fd, None) + try: + self._impl.unregister(fd) + except Exception: + gen_log.debug("Error deleting fd from IOLoop", exc_info=True) + + def set_blocking_signal_threshold(self, seconds, action): + if not hasattr(signal, "setitimer"): + gen_log.error("set_blocking_signal_threshold requires a signal module " + "with the setitimer method") + return + self._blocking_signal_threshold = seconds + if seconds is not None: + signal.signal(signal.SIGALRM, + action if action is not None else signal.SIG_DFL) + + def start(self): + if self._running: + raise RuntimeError("IOLoop is already running") + self._setup_logging() + if self._stopped: + self._stopped = False + return + old_current = getattr(IOLoop._current, "instance", None) + IOLoop._current.instance = self + self._thread_ident = thread.get_ident() + self._running = True + + # signal.set_wakeup_fd closes a race condition in event loops: + # a signal may arrive at the beginning of select/poll/etc + # before it goes into its interruptible sleep, so the signal + # will be consumed without waking the select. The solution is + # for the (C, synchronous) signal handler to write to a pipe, + # which will then be seen by select. + # + # In python's signal handling semantics, this only matters on the + # main thread (fortunately, set_wakeup_fd only works on the main + # thread and will raise a ValueError otherwise). + # + # If someone has already set a wakeup fd, we don't want to + # disturb it. This is an issue for twisted, which does its + # SIGCHLD processing in response to its own wakeup fd being + # written to. As long as the wakeup fd is registered on the IOLoop, + # the loop will still wake up and everything should work. + old_wakeup_fd = None + if hasattr(signal, 'set_wakeup_fd') and os.name == 'posix': + # requires python 2.6+, unix. set_wakeup_fd exists but crashes + # the python process on windows. + try: + old_wakeup_fd = signal.set_wakeup_fd(self._waker.write_fileno()) + if old_wakeup_fd != -1: + # Already set, restore previous value. This is a little racy, + # but there's no clean get_wakeup_fd and in real use the + # IOLoop is just started once at the beginning. + signal.set_wakeup_fd(old_wakeup_fd) + old_wakeup_fd = None + except ValueError: + # Non-main thread, or the previous value of wakeup_fd + # is no longer valid. + old_wakeup_fd = None + + try: + while True: + # Prevent IO event starvation by delaying new callbacks + # to the next iteration of the event loop. + with self._callback_lock: + callbacks = self._callbacks + self._callbacks = [] + + # Add any timeouts that have come due to the callback list. + # Do not run anything until we have determined which ones + # are ready, so timeouts that call add_timeout cannot + # schedule anything in this iteration. + due_timeouts = [] + if self._timeouts: + now = self.time() + while self._timeouts: + if self._timeouts[0].callback is None: + # The timeout was cancelled. Note that the + # cancellation check is repeated below for timeouts + # that are cancelled by another timeout or callback. + heapq.heappop(self._timeouts) + self._cancellations -= 1 + elif self._timeouts[0].deadline <= now: + due_timeouts.append(heapq.heappop(self._timeouts)) + else: + break + if (self._cancellations > 512 + and self._cancellations > (len(self._timeouts) >> 1)): + # Clean up the timeout queue when it gets large and it's + # more than half cancellations. + self._cancellations = 0 + self._timeouts = [x for x in self._timeouts + if x.callback is not None] + heapq.heapify(self._timeouts) + + for callback in callbacks: + self._run_callback(callback) + for timeout in due_timeouts: + if timeout.callback is not None: + self._run_callback(timeout.callback) + # Closures may be holding on to a lot of memory, so allow + # them to be freed before we go into our poll wait. + callbacks = callback = due_timeouts = timeout = None + + if self._callbacks: + # If any callbacks or timeouts called add_callback, + # we don't want to wait in poll() before we run them. + poll_timeout = 0.0 + elif self._timeouts: + # If there are any timeouts, schedule the first one. + # Use self.time() instead of 'now' to account for time + # spent running callbacks. + poll_timeout = self._timeouts[0].deadline - self.time() + poll_timeout = max(0, min(poll_timeout, _POLL_TIMEOUT)) + else: + # No timeouts and no callbacks, so use the default. + poll_timeout = _POLL_TIMEOUT + + if not self._running: + break + + if self._blocking_signal_threshold is not None: + # clear alarm so it doesn't fire while poll is waiting for + # events. + signal.setitimer(signal.ITIMER_REAL, 0, 0) + + try: + event_pairs = self._impl.poll(poll_timeout) + except Exception as e: + # Depending on python version and IOLoop implementation, + # different exception types may be thrown and there are + # two ways EINTR might be signaled: + # * e.errno == errno.EINTR + # * e.args is like (errno.EINTR, 'Interrupted system call') + if errno_from_exception(e) == errno.EINTR: + continue + else: + raise + + if self._blocking_signal_threshold is not None: + signal.setitimer(signal.ITIMER_REAL, + self._blocking_signal_threshold, 0) + + # Pop one fd at a time from the set of pending fds and run + # its handler. Since that handler may perform actions on + # other file descriptors, there may be reentrant calls to + # this IOLoop that update self._events + self._events.update(event_pairs) + while self._events: + fd, events = self._events.popitem() + try: + fd_obj, handler_func = self._handlers[fd] + handler_func(fd_obj, events) + except (OSError, IOError) as e: + if errno_from_exception(e) == errno.EPIPE: + # Happens when the client closes the connection + pass + else: + self.handle_callback_exception(self._handlers.get(fd)) + except Exception: + self.handle_callback_exception(self._handlers.get(fd)) + fd_obj = handler_func = None + + finally: + # reset the stopped flag so another start/stop pair can be issued + self._stopped = False + if self._blocking_signal_threshold is not None: + signal.setitimer(signal.ITIMER_REAL, 0, 0) + IOLoop._current.instance = old_current + if old_wakeup_fd is not None: + signal.set_wakeup_fd(old_wakeup_fd) + + def stop(self): + self._running = False + self._stopped = True + self._waker.wake() + + def time(self): + return self.time_func() + + def call_at(self, deadline, callback, *args, **kwargs): + timeout = _Timeout( + deadline, + functools.partial(stack_context.wrap(callback), *args, **kwargs), + self) + heapq.heappush(self._timeouts, timeout) + return timeout + + def remove_timeout(self, timeout): + # Removing from a heap is complicated, so just leave the defunct + # timeout object in the queue (see discussion in + # http://docs.python.org/library/heapq.html). + # If this turns out to be a problem, we could add a garbage + # collection pass whenever there are too many dead timeouts. + timeout.callback = None + self._cancellations += 1 + + def add_callback(self, callback, *args, **kwargs): + if thread.get_ident() != self._thread_ident: + # If we're not on the IOLoop's thread, we need to synchronize + # with other threads, or waking logic will induce a race. + with self._callback_lock: + if self._closing: + return + list_empty = not self._callbacks + self._callbacks.append(functools.partial( + stack_context.wrap(callback), *args, **kwargs)) + if list_empty: + # If we're not in the IOLoop's thread, and we added the + # first callback to an empty list, we may need to wake it + # up (it may wake up on its own, but an occasional extra + # wake is harmless). Waking up a polling IOLoop is + # relatively expensive, so we try to avoid it when we can. + self._waker.wake() + else: + if self._closing: + return + # If we're on the IOLoop's thread, we don't need the lock, + # since we don't need to wake anyone, just add the + # callback. Blindly insert into self._callbacks. This is + # safe even from signal handlers because the GIL makes + # list.append atomic. One subtlety is that if the signal + # is interrupting another thread holding the + # _callback_lock block in IOLoop.start, we may modify + # either the old or new version of self._callbacks, but + # either way will work. + self._callbacks.append(functools.partial( + stack_context.wrap(callback), *args, **kwargs)) + + def add_callback_from_signal(self, callback, *args, **kwargs): + with stack_context.NullContext(): + self.add_callback(callback, *args, **kwargs) + + +class _Timeout(object): + """An IOLoop timeout, a UNIX timestamp and a callback""" + + # Reduce memory overhead when there are lots of pending callbacks + __slots__ = ['deadline', 'callback', 'tiebreaker'] + + def __init__(self, deadline, callback, io_loop): + if not isinstance(deadline, numbers.Real): + raise TypeError("Unsupported deadline %r" % deadline) + self.deadline = deadline + self.callback = callback + self.tiebreaker = next(io_loop._timeout_counter) + + # Comparison methods to sort by deadline, with object id as a tiebreaker + # to guarantee a consistent ordering. The heapq module uses __le__ + # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons + # use __lt__). + def __lt__(self, other): + return ((self.deadline, self.tiebreaker) < + (other.deadline, other.tiebreaker)) + + def __le__(self, other): + return ((self.deadline, self.tiebreaker) <= + (other.deadline, other.tiebreaker)) + + +class PeriodicCallback(object): + """Schedules the given callback to be called periodically. + + The callback is called every ``callback_time`` milliseconds. + Note that the timeout is given in milliseconds, while most other + time-related functions in Tornado use seconds. + + If the callback runs for longer than ``callback_time`` milliseconds, + subsequent invocations will be skipped to get back on schedule. + + `start` must be called after the `PeriodicCallback` is created. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def __init__(self, callback, callback_time, io_loop=None): + self.callback = callback + if callback_time <= 0: + raise ValueError("Periodic callback must have a positive callback_time") + self.callback_time = callback_time + self.io_loop = io_loop or IOLoop.current() + self._running = False + self._timeout = None + + def start(self): + """Starts the timer.""" + self._running = True + self._next_timeout = self.io_loop.time() + self._schedule_next() + + def stop(self): + """Stops the timer.""" + self._running = False + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + def is_running(self): + """Return True if this `.PeriodicCallback` has been started. + + .. versionadded:: 4.1 + """ + return self._running + + def _run(self): + if not self._running: + return + try: + return self.callback() + except Exception: + self.io_loop.handle_callback_exception(self.callback) + finally: + self._schedule_next() + + def _schedule_next(self): + if self._running: + current_time = self.io_loop.time() + + if self._next_timeout <= current_time: + callback_time_sec = self.callback_time / 1000.0 + self._next_timeout += (math.floor((current_time - self._next_timeout) / callback_time_sec) + 1) * callback_time_sec + + self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run) diff --git a/server/www/packages/packages-common/tornado/iostream.py b/server/www/packages/packages-common/tornado/iostream.py new file mode 100644 index 0000000..4e304f8 --- /dev/null +++ b/server/www/packages/packages-common/tornado/iostream.py @@ -0,0 +1,1550 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utility classes to write to and read from non-blocking files and sockets. + +Contents: + +* `BaseIOStream`: Generic interface for reading and writing. +* `IOStream`: Implementation of BaseIOStream using non-blocking sockets. +* `SSLIOStream`: SSL-aware version of IOStream. +* `PipeIOStream`: Pipe-based IOStream implementation. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import collections +import errno +import numbers +import os +import socket +import sys +import re + +from tornado.concurrent import TracebackFuture +from tornado import ioloop +from tornado.log import gen_log, app_log +from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError, _client_ssl_defaults, _server_ssl_defaults +from tornado import stack_context +from tornado.util import errno_from_exception + +try: + from tornado.platform.posix import _set_nonblocking +except ImportError: + _set_nonblocking = None + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine + ssl = None + +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + +if hasattr(errno, "WSAEWOULDBLOCK"): + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + +# These errnos indicate that a connection has been abruptly terminated. +# They should be caught and handled less noisily than other errors. +_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, + errno.ETIMEDOUT) + +if hasattr(errno, "WSAECONNRESET"): + _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) + +if sys.platform == 'darwin': + # OSX appears to have a race condition that causes send(2) to return + # EPROTOTYPE if called while a socket is being torn down: + # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + # Since the socket is being closed anyway, treat this as an ECONNRESET + # instead of an unexpected error. + _ERRNO_CONNRESET += (errno.EPROTOTYPE,) + +# More non-portable errnos: +_ERRNO_INPROGRESS = (errno.EINPROGRESS,) + +if hasattr(errno, "WSAEINPROGRESS"): + _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) + + +class StreamClosedError(IOError): + """Exception raised by `IOStream` methods when the stream is closed. + + Note that the close callback is scheduled to run *after* other + callbacks on the stream (to allow for buffered data to be processed), + so you may see this error before you see the close callback. + + The ``real_error`` attribute contains the underlying error that caused + the stream to close (if any). + + .. versionchanged:: 4.3 + Added the ``real_error`` attribute. + """ + def __init__(self, real_error=None): + super(StreamClosedError, self).__init__('Stream is closed') + self.real_error = real_error + + +class UnsatisfiableReadError(Exception): + """Exception raised when a read cannot be satisfied. + + Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes`` + argument. + """ + pass + + +class StreamBufferFullError(Exception): + """Exception raised by `IOStream` methods when the buffer is full. + """ + + +class BaseIOStream(object): + """A utility class to write to and read from a non-blocking file or socket. + + We support a non-blocking ``write()`` and a family of ``read_*()`` methods. + All of the methods take an optional ``callback`` argument and return a + `.Future` only if no callback is given. When the operation completes, + the callback will be run or the `.Future` will resolve with the data + read (or ``None`` for ``write()``). All outstanding ``Futures`` will + resolve with a `StreamClosedError` when the stream is closed; users + of the callback interface will be notified via + `.BaseIOStream.set_close_callback` instead. + + When a stream is closed due to an error, the IOStream's ``error`` + attribute contains the exception object. + + Subclasses must implement `fileno`, `close_fd`, `write_to_fd`, + `read_from_fd`, and optionally `get_fd_error`. + """ + def __init__(self, io_loop=None, max_buffer_size=None, + read_chunk_size=None, max_write_buffer_size=None): + """`BaseIOStream` constructor. + + :arg io_loop: The `.IOLoop` to use; defaults to `.IOLoop.current`. + Deprecated since Tornado 4.1. + :arg max_buffer_size: Maximum amount of incoming data to buffer; + defaults to 100MB. + :arg read_chunk_size: Amount of data to read at one time from the + underlying transport; defaults to 64KB. + :arg max_write_buffer_size: Amount of outgoing data to buffer; + defaults to unlimited. + + .. versionchanged:: 4.0 + Add the ``max_write_buffer_size`` parameter. Changed default + ``read_chunk_size`` to 64KB. + """ + self.io_loop = io_loop or ioloop.IOLoop.current() + self.max_buffer_size = max_buffer_size or 104857600 + # A chunk size that is too close to max_buffer_size can cause + # spurious failures. + self.read_chunk_size = min(read_chunk_size or 65536, + self.max_buffer_size // 2) + self.max_write_buffer_size = max_write_buffer_size + self.error = None + self._read_buffer = collections.deque() + self._write_buffer = collections.deque() + self._read_buffer_size = 0 + self._write_buffer_size = 0 + self._write_buffer_frozen = False + self._read_delimiter = None + self._read_regex = None + self._read_max_bytes = None + self._read_bytes = None + self._read_partial = False + self._read_until_close = False + self._read_callback = None + self._read_future = None + self._streaming_callback = None + self._write_callback = None + self._write_future = None + self._close_callback = None + self._connect_callback = None + self._connect_future = None + # _ssl_connect_future should be defined in SSLIOStream + # but it's here so we can clean it up in maybe_run_close_callback. + # TODO: refactor that so subclasses can add additional futures + # to be cancelled. + self._ssl_connect_future = None + self._connecting = False + self._state = None + self._pending_callbacks = 0 + self._closed = False + + def fileno(self): + """Returns the file descriptor for this stream.""" + raise NotImplementedError() + + def close_fd(self): + """Closes the file underlying this stream. + + ``close_fd`` is called by `BaseIOStream` and should not be called + elsewhere; other users should call `close` instead. + """ + raise NotImplementedError() + + def write_to_fd(self, data): + """Attempts to write ``data`` to the underlying file. + + Returns the number of bytes written. + """ + raise NotImplementedError() + + def read_from_fd(self): + """Attempts to read from the underlying file. + + Returns ``None`` if there was nothing to read (the socket + returned `~errno.EWOULDBLOCK` or equivalent), otherwise + returns the data. When possible, should return no more than + ``self.read_chunk_size`` bytes at a time. + """ + raise NotImplementedError() + + def get_fd_error(self): + """Returns information about any error on the underlying file. + + This method is called after the `.IOLoop` has signaled an error on the + file descriptor, and should return an Exception (such as `socket.error` + with additional information, or None if no such information is + available. + """ + return None + + def read_until_regex(self, regex, callback=None, max_bytes=None): + """Asynchronously read until we have matched the given regex. + + The result includes the data that matches the regex and anything + that came before it. If a callback is given, it will be run + with the data as an argument; if not, this method returns a + `.Future`. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the regex is + not satisfied. + + .. versionchanged:: 4.0 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. + """ + future = self._set_read_callback(callback) + self._read_regex = re.compile(regex) + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + return future + except: + if future is not None: + # Ensure that the future doesn't log an error because its + # failure was never examined. + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_until(self, delimiter, callback=None, max_bytes=None): + """Asynchronously read until we have found the given delimiter. + + The result includes all the data read including the delimiter. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the delimiter + is not found. + + .. versionchanged:: 4.0 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. + """ + future = self._set_read_callback(callback) + self._read_delimiter = delimiter + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + return future + except: + if future is not None: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_bytes(self, num_bytes, callback=None, streaming_callback=None, + partial=False): + """Asynchronously read a number of bytes. + + If a ``streaming_callback`` is given, it will be called with chunks + of data as they become available, and the final result will be empty. + Otherwise, the result is all the data that was read. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + If ``partial`` is true, the callback is run as soon as we have + any bytes to return (but never more than ``num_bytes``) + + .. versionchanged:: 4.0 + Added the ``partial`` argument. The callback argument is now + optional and a `.Future` will be returned if it is omitted. + """ + future = self._set_read_callback(callback) + assert isinstance(num_bytes, numbers.Integral) + self._read_bytes = num_bytes + self._read_partial = partial + self._streaming_callback = stack_context.wrap(streaming_callback) + try: + self._try_inline_read() + except: + if future is not None: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def read_until_close(self, callback=None, streaming_callback=None): + """Asynchronously reads all data from the socket until it is closed. + + If a ``streaming_callback`` is given, it will be called with chunks + of data as they become available, and the final result will be empty. + Otherwise, the result is all the data that was read. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + Note that if a ``streaming_callback`` is used, data will be + read from the socket as quickly as it becomes available; there + is no way to apply backpressure or cancel the reads. If flow + control or cancellation are desired, use a loop with + `read_bytes(partial=True) <.read_bytes>` instead. + + .. versionchanged:: 4.0 + The callback argument is now optional and a `.Future` will + be returned if it is omitted. + + """ + future = self._set_read_callback(callback) + self._streaming_callback = stack_context.wrap(streaming_callback) + if self.closed(): + if self._streaming_callback is not None: + self._run_read_callback(self._read_buffer_size, True) + self._run_read_callback(self._read_buffer_size, False) + return future + self._read_until_close = True + try: + self._try_inline_read() + except: + if future is not None: + future.add_done_callback(lambda f: f.exception()) + raise + return future + + def write(self, data, callback=None): + """Asynchronously write the given data to this stream. + + If ``callback`` is given, we call it when all of the buffered write + data has been successfully written to the stream. If there was + previously buffered write data and an old write callback, that + callback is simply overwritten with this new callback. + + If no ``callback`` is given, this method returns a `.Future` that + resolves (with a result of ``None``) when the write has been + completed. If `write` is called again before that `.Future` has + resolved, the previous future will be orphaned and will never resolve. + + .. versionchanged:: 4.0 + Now returns a `.Future` if no callback is given. + """ + assert isinstance(data, bytes) + self._check_closed() + # We use bool(_write_buffer) as a proxy for write_buffer_size>0, + # so never put empty strings in the buffer. + if data: + if (self.max_write_buffer_size is not None and + self._write_buffer_size + len(data) > self.max_write_buffer_size): + raise StreamBufferFullError("Reached maximum write buffer size") + # Break up large contiguous strings before inserting them in the + # write buffer, so we don't have to recopy the entire thing + # as we slice off pieces to send to the socket. + WRITE_BUFFER_CHUNK_SIZE = 128 * 1024 + for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE): + self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE]) + self._write_buffer_size += len(data) + if callback is not None: + self._write_callback = stack_context.wrap(callback) + future = None + else: + future = self._write_future = TracebackFuture() + future.add_done_callback(lambda f: f.exception()) + if not self._connecting: + self._handle_write() + if self._write_buffer: + self._add_io_state(self.io_loop.WRITE) + self._maybe_add_error_listener() + return future + + def set_close_callback(self, callback): + """Call the given callback when the stream is closed. + + This is not necessary for applications that use the `.Future` + interface; all outstanding ``Futures`` will resolve with a + `StreamClosedError` when the stream is closed. + """ + self._close_callback = stack_context.wrap(callback) + self._maybe_add_error_listener() + + def close(self, exc_info=False): + """Close this stream. + + If ``exc_info`` is true, set the ``error`` attribute to the current + exception from `sys.exc_info` (or if ``exc_info`` is a tuple, + use that instead of `sys.exc_info`). + """ + if not self.closed(): + if exc_info: + if not isinstance(exc_info, tuple): + exc_info = sys.exc_info() + if any(exc_info): + self.error = exc_info[1] + if self._read_until_close: + if (self._streaming_callback is not None and + self._read_buffer_size): + self._run_read_callback(self._read_buffer_size, True) + self._read_until_close = False + self._run_read_callback(self._read_buffer_size, False) + if self._state is not None: + self.io_loop.remove_handler(self.fileno()) + self._state = None + self.close_fd() + self._closed = True + self._maybe_run_close_callback() + + def _maybe_run_close_callback(self): + # If there are pending callbacks, don't run the close callback + # until they're done (see _maybe_add_error_handler) + if self.closed() and self._pending_callbacks == 0: + futures = [] + if self._read_future is not None: + futures.append(self._read_future) + self._read_future = None + if self._write_future is not None: + futures.append(self._write_future) + self._write_future = None + if self._connect_future is not None: + futures.append(self._connect_future) + self._connect_future = None + if self._ssl_connect_future is not None: + futures.append(self._ssl_connect_future) + self._ssl_connect_future = None + for future in futures: + future.set_exception(StreamClosedError(real_error=self.error)) + if self._close_callback is not None: + cb = self._close_callback + self._close_callback = None + self._run_callback(cb) + # Delete any unfinished callbacks to break up reference cycles. + self._read_callback = self._write_callback = None + # Clear the buffers so they can be cleared immediately even + # if the IOStream object is kept alive by a reference cycle. + # TODO: Clear the read buffer too; it currently breaks some tests. + self._write_buffer = None + + def reading(self): + """Returns true if we are currently reading from the stream.""" + return self._read_callback is not None or self._read_future is not None + + def writing(self): + """Returns true if we are currently writing to the stream.""" + return bool(self._write_buffer) + + def closed(self): + """Returns true if the stream has been closed.""" + return self._closed + + def set_nodelay(self, value): + """Sets the no-delay flag for this stream. + + By default, data written to TCP streams may be held for a time + to make the most efficient use of bandwidth (according to + Nagle's algorithm). The no-delay flag requests that data be + written as soon as possible, even if doing so would consume + additional bandwidth. + + This flag is currently defined only for TCP-based ``IOStreams``. + + .. versionadded:: 3.1 + """ + pass + + def _handle_events(self, fd, events): + if self.closed(): + gen_log.warning("Got events for closed stream %s", fd) + return + try: + if self._connecting: + # Most IOLoops will report a write failed connect + # with the WRITE event, but SelectIOLoop reports a + # READ as well so we must check for connecting before + # either. + self._handle_connect() + if self.closed(): + return + if events & self.io_loop.READ: + self._handle_read() + if self.closed(): + return + if events & self.io_loop.WRITE: + self._handle_write() + if self.closed(): + return + if events & self.io_loop.ERROR: + self.error = self.get_fd_error() + # We may have queued up a user callback in _handle_read or + # _handle_write, so don't close the IOStream until those + # callbacks have had a chance to run. + self.io_loop.add_callback(self.close) + return + state = self.io_loop.ERROR + if self.reading(): + state |= self.io_loop.READ + if self.writing(): + state |= self.io_loop.WRITE + if state == self.io_loop.ERROR and self._read_buffer_size == 0: + # If the connection is idle, listen for reads too so + # we can tell if the connection is closed. If there is + # data in the read buffer we won't run the close callback + # yet anyway, so we don't need to listen in this case. + state |= self.io_loop.READ + if state != self._state: + assert self._state is not None, \ + "shouldn't happen: _handle_events without self._state" + self._state = state + self.io_loop.update_handler(self.fileno(), self._state) + except UnsatisfiableReadError as e: + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + except Exception: + gen_log.error("Uncaught exception, closing connection.", + exc_info=True) + self.close(exc_info=True) + raise + + def _run_callback(self, callback, *args): + def wrapper(): + self._pending_callbacks -= 1 + try: + return callback(*args) + except Exception: + app_log.error("Uncaught exception, closing connection.", + exc_info=True) + # Close the socket on an uncaught exception from a user callback + # (It would eventually get closed when the socket object is + # gc'd, but we don't want to rely on gc happening before we + # run out of file descriptors) + self.close(exc_info=True) + # Re-raise the exception so that IOLoop.handle_callback_exception + # can see it and log the error + raise + finally: + self._maybe_add_error_listener() + # We schedule callbacks to be run on the next IOLoop iteration + # rather than running them directly for several reasons: + # * Prevents unbounded stack growth when a callback calls an + # IOLoop operation that immediately runs another callback + # * Provides a predictable execution context for e.g. + # non-reentrant mutexes + # * Ensures that the try/except in wrapper() is run outside + # of the application's StackContexts + with stack_context.NullContext(): + # stack_context was already captured in callback, we don't need to + # capture it again for IOStream's wrapper. This is especially + # important if the callback was pre-wrapped before entry to + # IOStream (as in HTTPConnection._header_callback), as we could + # capture and leak the wrong context here. + self._pending_callbacks += 1 + self.io_loop.add_callback(wrapper) + + def _read_to_buffer_loop(self): + # This method is called from _handle_read and _try_inline_read. + try: + if self._read_bytes is not None: + target_bytes = self._read_bytes + elif self._read_max_bytes is not None: + target_bytes = self._read_max_bytes + elif self.reading(): + # For read_until without max_bytes, or + # read_until_close, read as much as we can before + # scanning for the delimiter. + target_bytes = None + else: + target_bytes = 0 + next_find_pos = 0 + # Pretend to have a pending callback so that an EOF in + # _read_to_buffer doesn't trigger an immediate close + # callback. At the end of this method we'll either + # establish a real pending callback via + # _read_from_buffer or run the close callback. + # + # We need two try statements here so that + # pending_callbacks is decremented before the `except` + # clause below (which calls `close` and does need to + # trigger the callback) + self._pending_callbacks += 1 + while not self.closed(): + # Read from the socket until we get EWOULDBLOCK or equivalent. + # SSL sockets do some internal buffering, and if the data is + # sitting in the SSL object's buffer select() and friends + # can't see it; the only way to find out if it's there is to + # try to read it. + if self._read_to_buffer() == 0: + break + + self._run_streaming_callback() + + # If we've read all the bytes we can use, break out of + # this loop. We can't just call read_from_buffer here + # because of subtle interactions with the + # pending_callback and error_listener mechanisms. + # + # If we've reached target_bytes, we know we're done. + if (target_bytes is not None and + self._read_buffer_size >= target_bytes): + break + + # Otherwise, we need to call the more expensive find_read_pos. + # It's inefficient to do this on every read, so instead + # do it on the first read and whenever the read buffer + # size has doubled. + if self._read_buffer_size >= next_find_pos: + pos = self._find_read_pos() + if pos is not None: + return pos + next_find_pos = self._read_buffer_size * 2 + return self._find_read_pos() + finally: + self._pending_callbacks -= 1 + + def _handle_read(self): + try: + pos = self._read_to_buffer_loop() + except UnsatisfiableReadError: + raise + except Exception as e: + gen_log.warning("error on read: %s" % e) + self.close(exc_info=True) + return + if pos is not None: + self._read_from_buffer(pos) + return + else: + self._maybe_run_close_callback() + + def _set_read_callback(self, callback): + assert self._read_callback is None, "Already reading" + assert self._read_future is None, "Already reading" + if callback is not None: + self._read_callback = stack_context.wrap(callback) + else: + self._read_future = TracebackFuture() + return self._read_future + + def _run_read_callback(self, size, streaming): + if streaming: + callback = self._streaming_callback + else: + callback = self._read_callback + self._read_callback = self._streaming_callback = None + if self._read_future is not None: + assert callback is None + future = self._read_future + self._read_future = None + future.set_result(self._consume(size)) + if callback is not None: + assert (self._read_future is None) or streaming + self._run_callback(callback, self._consume(size)) + else: + # If we scheduled a callback, we will add the error listener + # afterwards. If we didn't, we have to do it now. + self._maybe_add_error_listener() + + def _try_inline_read(self): + """Attempt to complete the current read operation from buffered data. + + If the read can be completed without blocking, schedules the + read callback on the next IOLoop iteration; otherwise starts + listening for reads on the socket. + """ + # See if we've already got the data from a previous read + self._run_streaming_callback() + pos = self._find_read_pos() + if pos is not None: + self._read_from_buffer(pos) + return + self._check_closed() + try: + pos = self._read_to_buffer_loop() + except Exception: + # If there was an in _read_to_buffer, we called close() already, + # but couldn't run the close callback because of _pending_callbacks. + # Before we escape from this function, run the close callback if + # applicable. + self._maybe_run_close_callback() + raise + if pos is not None: + self._read_from_buffer(pos) + return + # We couldn't satisfy the read inline, so either close the stream + # or listen for new data. + if self.closed(): + self._maybe_run_close_callback() + else: + self._add_io_state(ioloop.IOLoop.READ) + + def _read_to_buffer(self): + """Reads from the socket and appends the result to the read buffer. + + Returns the number of bytes read. Returns 0 if there is nothing + to read (i.e. the read returns EWOULDBLOCK or equivalent). On + error closes the socket and raises an exception. + """ + while True: + try: + chunk = self.read_from_fd() + except (socket.error, IOError, OSError) as e: + if errno_from_exception(e) == errno.EINTR: + continue + # ssl.SSLError is a subclass of socket.error + if self._is_connreset(e): + # Treat ECONNRESET as a connection close rather than + # an error to minimize log spam (the exception will + # be available on self.error for apps that care). + self.close(exc_info=True) + return + self.close(exc_info=True) + raise + break + if chunk is None: + return 0 + self._read_buffer.append(chunk) + self._read_buffer_size += len(chunk) + if self._read_buffer_size > self.max_buffer_size: + gen_log.error("Reached maximum read buffer size") + self.close() + raise StreamBufferFullError("Reached maximum read buffer size") + return len(chunk) + + def _run_streaming_callback(self): + if self._streaming_callback is not None and self._read_buffer_size: + bytes_to_consume = self._read_buffer_size + if self._read_bytes is not None: + bytes_to_consume = min(self._read_bytes, bytes_to_consume) + self._read_bytes -= bytes_to_consume + self._run_read_callback(bytes_to_consume, True) + + def _read_from_buffer(self, pos): + """Attempts to complete the currently-pending read from the buffer. + + The argument is either a position in the read buffer or None, + as returned by _find_read_pos. + """ + self._read_bytes = self._read_delimiter = self._read_regex = None + self._read_partial = False + self._run_read_callback(pos, False) + + def _find_read_pos(self): + """Attempts to find a position in the read buffer that satisfies + the currently-pending read. + + Returns a position in the buffer if the current read can be satisfied, + or None if it cannot. + """ + if (self._read_bytes is not None and + (self._read_buffer_size >= self._read_bytes or + (self._read_partial and self._read_buffer_size > 0))): + num_bytes = min(self._read_bytes, self._read_buffer_size) + return num_bytes + elif self._read_delimiter is not None: + # Multi-byte delimiters (e.g. '\r\n') may straddle two + # chunks in the read buffer, so we can't easily find them + # without collapsing the buffer. However, since protocols + # using delimited reads (as opposed to reads of a known + # length) tend to be "line" oriented, the delimiter is likely + # to be in the first few chunks. Merge the buffer gradually + # since large merges are relatively expensive and get undone in + # _consume(). + if self._read_buffer: + while True: + loc = self._read_buffer[0].find(self._read_delimiter) + if loc != -1: + delimiter_len = len(self._read_delimiter) + self._check_max_bytes(self._read_delimiter, + loc + delimiter_len) + return loc + delimiter_len + if len(self._read_buffer) == 1: + break + _double_prefix(self._read_buffer) + self._check_max_bytes(self._read_delimiter, + len(self._read_buffer[0])) + elif self._read_regex is not None: + if self._read_buffer: + while True: + m = self._read_regex.search(self._read_buffer[0]) + if m is not None: + self._check_max_bytes(self._read_regex, m.end()) + return m.end() + if len(self._read_buffer) == 1: + break + _double_prefix(self._read_buffer) + self._check_max_bytes(self._read_regex, + len(self._read_buffer[0])) + return None + + def _check_max_bytes(self, delimiter, size): + if (self._read_max_bytes is not None and + size > self._read_max_bytes): + raise UnsatisfiableReadError( + "delimiter %r not found within %d bytes" % ( + delimiter, self._read_max_bytes)) + + def _handle_write(self): + while self._write_buffer: + try: + if not self._write_buffer_frozen: + # On windows, socket.send blows up if given a + # write buffer that's too large, instead of just + # returning the number of bytes it was able to + # process. Therefore we must not call socket.send + # with more than 128KB at a time. + _merge_prefix(self._write_buffer, 128 * 1024) + num_bytes = self.write_to_fd(self._write_buffer[0]) + if num_bytes == 0: + # With OpenSSL, if we couldn't write the entire buffer, + # the very same string object must be used on the + # next call to send. Therefore we suppress + # merging the write buffer after an incomplete send. + # A cleaner solution would be to set + # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is + # not yet accessible from python + # (http://bugs.python.org/issue8240) + self._write_buffer_frozen = True + break + self._write_buffer_frozen = False + _merge_prefix(self._write_buffer, num_bytes) + self._write_buffer.popleft() + self._write_buffer_size -= num_bytes + except (socket.error, IOError, OSError) as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + self._write_buffer_frozen = True + break + else: + if not self._is_connreset(e): + # Broken pipe errors are usually caused by connection + # reset, and its better to not log EPIPE errors to + # minimize log spam + gen_log.warning("Write error on %s: %s", + self.fileno(), e) + self.close(exc_info=True) + return + if not self._write_buffer: + if self._write_callback: + callback = self._write_callback + self._write_callback = None + self._run_callback(callback) + if self._write_future: + future = self._write_future + self._write_future = None + future.set_result(None) + + def _consume(self, loc): + if loc == 0: + return b"" + _merge_prefix(self._read_buffer, loc) + self._read_buffer_size -= loc + return self._read_buffer.popleft() + + def _check_closed(self): + if self.closed(): + raise StreamClosedError(real_error=self.error) + + def _maybe_add_error_listener(self): + # This method is part of an optimization: to detect a connection that + # is closed when we're not actively reading or writing, we must listen + # for read events. However, it is inefficient to do this when the + # connection is first established because we are going to read or write + # immediately anyway. Instead, we insert checks at various times to + # see if the connection is idle and add the read listener then. + if self._pending_callbacks != 0: + return + if self._state is None or self._state == ioloop.IOLoop.ERROR: + if self.closed(): + self._maybe_run_close_callback() + elif (self._read_buffer_size == 0 and + self._close_callback is not None): + self._add_io_state(ioloop.IOLoop.READ) + + def _add_io_state(self, state): + """Adds `state` (IOLoop.{READ,WRITE} flags) to our event handler. + + Implementation notes: Reads and writes have a fast path and a + slow path. The fast path reads synchronously from socket + buffers, while the slow path uses `_add_io_state` to schedule + an IOLoop callback. Note that in both cases, the callback is + run asynchronously with `_run_callback`. + + To detect closed connections, we must have called + `_add_io_state` at some point, but we want to delay this as + much as possible so we don't have to set an `IOLoop.ERROR` + listener that will be overwritten by the next slow-path + operation. As long as there are callbacks scheduled for + fast-path ops, those callbacks may do more reads. + If a sequence of fast-path ops do not end in a slow-path op, + (e.g. for an @asynchronous long-poll request), we must add + the error handler. This is done in `_run_callback` and `write` + (since the write callback is optional so we can have a + fast-path write with no `_run_callback`) + """ + if self.closed(): + # connection has been closed, so there can be no future events + return + if self._state is None: + self._state = ioloop.IOLoop.ERROR | state + with stack_context.NullContext(): + self.io_loop.add_handler( + self.fileno(), self._handle_events, self._state) + elif not self._state & state: + self._state = self._state | state + self.io_loop.update_handler(self.fileno(), self._state) + + def _is_connreset(self, exc): + """Return true if exc is ECONNRESET or equivalent. + + May be overridden in subclasses. + """ + return (isinstance(exc, (socket.error, IOError)) and + errno_from_exception(exc) in _ERRNO_CONNRESET) + + +class IOStream(BaseIOStream): + r"""Socket-based `IOStream` implementation. + + This class supports the read and write methods from `BaseIOStream` + plus a `connect` method. + + The ``socket`` parameter may either be connected or unconnected. + For server operations the socket is the result of calling + `socket.accept `. For client operations the + socket is created with `socket.socket`, and may either be + connected before passing it to the `IOStream` or connected with + `IOStream.connect`. + + A very simple (and broken) HTTP client using this class: + + .. testcode:: + + import tornado.ioloop + import tornado.iostream + import socket + + def send_request(): + stream.write(b"GET / HTTP/1.0\r\nHost: friendfeed.com\r\n\r\n") + stream.read_until(b"\r\n\r\n", on_headers) + + def on_headers(data): + headers = {} + for line in data.split(b"\r\n"): + parts = line.split(b":") + if len(parts) == 2: + headers[parts[0].strip()] = parts[1].strip() + stream.read_bytes(int(headers[b"Content-Length"]), on_body) + + def on_body(data): + print(data) + stream.close() + tornado.ioloop.IOLoop.current().stop() + + if __name__ == '__main__': + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) + stream = tornado.iostream.IOStream(s) + stream.connect(("friendfeed.com", 80), send_request) + tornado.ioloop.IOLoop.current().start() + + .. testoutput:: + :hide: + + """ + def __init__(self, socket, *args, **kwargs): + self.socket = socket + self.socket.setblocking(False) + super(IOStream, self).__init__(*args, **kwargs) + + def fileno(self): + return self.socket + + def close_fd(self): + self.socket.close() + self.socket = None + + def get_fd_error(self): + errno = self.socket.getsockopt(socket.SOL_SOCKET, + socket.SO_ERROR) + return socket.error(errno, os.strerror(errno)) + + def read_from_fd(self): + try: + chunk = self.socket.recv(self.read_chunk_size) + except socket.error as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + return None + else: + raise + if not chunk: + self.close() + return None + return chunk + + def write_to_fd(self, data): + return self.socket.send(data) + + def connect(self, address, callback=None, server_hostname=None): + """Connects the socket to a remote address without blocking. + + May only be called if the socket passed to the constructor was + not previously connected. The address parameter is in the + same format as for `socket.connect ` for + the type of socket passed to the IOStream constructor, + e.g. an ``(ip, port)`` tuple. Hostnames are accepted here, + but will be resolved synchronously and block the IOLoop. + If you have a hostname instead of an IP address, the `.TCPClient` + class is recommended instead of calling this method directly. + `.TCPClient` will do asynchronous DNS resolution and handle + both IPv4 and IPv6. + + If ``callback`` is specified, it will be called with no + arguments when the connection is completed; if not this method + returns a `.Future` (whose result after a successful + connection will be the stream itself). + + In SSL mode, the ``server_hostname`` parameter will be used + for certificate validation (unless disabled in the + ``ssl_options``) and SNI (if supported; requires Python + 2.7.9+). + + Note that it is safe to call `IOStream.write + ` while the connection is pending, in + which case the data will be written as soon as the connection + is ready. Calling `IOStream` read methods before the socket is + connected works on some platforms but is non-portable. + + .. versionchanged:: 4.0 + If no callback is given, returns a `.Future`. + + .. versionchanged:: 4.2 + SSL certificates are validated by default; pass + ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a + suitably-configured `ssl.SSLContext` to the + `SSLIOStream` constructor to disable. + """ + self._connecting = True + if callback is not None: + self._connect_callback = stack_context.wrap(callback) + future = None + else: + future = self._connect_future = TracebackFuture() + try: + self.socket.connect(address) + except socket.error as e: + # In non-blocking mode we expect connect() to raise an + # exception with EINPROGRESS or EWOULDBLOCK. + # + # On freebsd, other errors such as ECONNREFUSED may be + # returned immediately when attempting to connect to + # localhost, so handle them the same way as an error + # reported later in _handle_connect. + if (errno_from_exception(e) not in _ERRNO_INPROGRESS and + errno_from_exception(e) not in _ERRNO_WOULDBLOCK): + if future is None: + gen_log.warning("Connect error on fd %s: %s", + self.socket.fileno(), e) + self.close(exc_info=True) + return future + self._add_io_state(self.io_loop.WRITE) + return future + + def start_tls(self, server_side, ssl_options=None, server_hostname=None): + """Convert this `IOStream` to an `SSLIOStream`. + + This enables protocols that begin in clear-text mode and + switch to SSL after some initial negotiation (such as the + ``STARTTLS`` extension to SMTP and IMAP). + + This method cannot be used if there are outstanding reads + or writes on the stream, or if there is any data in the + IOStream's buffer (data in the operating system's socket + buffer is allowed). This means it must generally be used + immediately after reading or writing the last clear-text + data. It can also be used immediately after connecting, + before any reads or writes. + + The ``ssl_options`` argument may be either an `ssl.SSLContext` + object or a dictionary of keyword arguments for the + `ssl.wrap_socket` function. The ``server_hostname`` argument + will be used for certificate validation unless disabled + in the ``ssl_options``. + + This method returns a `.Future` whose result is the new + `SSLIOStream`. After this method has been called, + any other operation on the original stream is undefined. + + If a close callback is defined on this stream, it will be + transferred to the new stream. + + .. versionadded:: 4.0 + + .. versionchanged:: 4.2 + SSL certificates are validated by default; pass + ``ssl_options=dict(cert_reqs=ssl.CERT_NONE)`` or a + suitably-configured `ssl.SSLContext` to disable. + """ + if (self._read_callback or self._read_future or + self._write_callback or self._write_future or + self._connect_callback or self._connect_future or + self._pending_callbacks or self._closed or + self._read_buffer or self._write_buffer): + raise ValueError("IOStream is not idle; cannot convert to SSL") + if ssl_options is None: + if server_side: + ssl_options = _server_ssl_defaults + else: + ssl_options = _client_ssl_defaults + + socket = self.socket + self.io_loop.remove_handler(socket) + self.socket = None + socket = ssl_wrap_socket(socket, ssl_options, + server_hostname=server_hostname, + server_side=server_side, + do_handshake_on_connect=False) + orig_close_callback = self._close_callback + self._close_callback = None + + future = TracebackFuture() + ssl_stream = SSLIOStream(socket, ssl_options=ssl_options, + io_loop=self.io_loop) + # Wrap the original close callback so we can fail our Future as well. + # If we had an "unwrap" counterpart to this method we would need + # to restore the original callback after our Future resolves + # so that repeated wrap/unwrap calls don't build up layers. + + def close_callback(): + if not future.done(): + # Note that unlike most Futures returned by IOStream, + # this one passes the underlying error through directly + # instead of wrapping everything in a StreamClosedError + # with a real_error attribute. This is because once the + # connection is established it's more helpful to raise + # the SSLError directly than to hide it behind a + # StreamClosedError (and the client is expecting SSL + # issues rather than network issues since this method is + # named start_tls). + future.set_exception(ssl_stream.error or StreamClosedError()) + if orig_close_callback is not None: + orig_close_callback() + ssl_stream.set_close_callback(close_callback) + ssl_stream._ssl_connect_callback = lambda: future.set_result(ssl_stream) + ssl_stream.max_buffer_size = self.max_buffer_size + ssl_stream.read_chunk_size = self.read_chunk_size + return future + + def _handle_connect(self): + err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + self.error = socket.error(err, os.strerror(err)) + # IOLoop implementations may vary: some of them return + # an error state before the socket becomes writable, so + # in that case a connection failure would be handled by the + # error path in _handle_events instead of here. + if self._connect_future is None: + gen_log.warning("Connect error on fd %s: %s", + self.socket.fileno(), errno.errorcode[err]) + self.close() + return + if self._connect_callback is not None: + callback = self._connect_callback + self._connect_callback = None + self._run_callback(callback) + if self._connect_future is not None: + future = self._connect_future + self._connect_future = None + future.set_result(self) + self._connecting = False + + def set_nodelay(self, value): + if (self.socket is not None and + self.socket.family in (socket.AF_INET, socket.AF_INET6)): + try: + self.socket.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, 1 if value else 0) + except socket.error as e: + # Sometimes setsockopt will fail if the socket is closed + # at the wrong time. This can happen with HTTPServer + # resetting the value to false between requests. + if e.errno != errno.EINVAL and not self._is_connreset(e): + raise + + +class SSLIOStream(IOStream): + """A utility class to write to and read from a non-blocking SSL socket. + + If the socket passed to the constructor is already connected, + it should be wrapped with:: + + ssl.wrap_socket(sock, do_handshake_on_connect=False, **kwargs) + + before constructing the `SSLIOStream`. Unconnected sockets will be + wrapped when `IOStream.connect` is finished. + """ + def __init__(self, *args, **kwargs): + """The ``ssl_options`` keyword argument may either be an + `ssl.SSLContext` object or a dictionary of keywords arguments + for `ssl.wrap_socket` + """ + self._ssl_options = kwargs.pop('ssl_options', _client_ssl_defaults) + super(SSLIOStream, self).__init__(*args, **kwargs) + self._ssl_accepting = True + self._handshake_reading = False + self._handshake_writing = False + self._ssl_connect_callback = None + self._server_hostname = None + + # If the socket is already connected, attempt to start the handshake. + try: + self.socket.getpeername() + except socket.error: + pass + else: + # Indirectly start the handshake, which will run on the next + # IOLoop iteration and then the real IO state will be set in + # _handle_events. + self._add_io_state(self.io_loop.WRITE) + + def reading(self): + return self._handshake_reading or super(SSLIOStream, self).reading() + + def writing(self): + return self._handshake_writing or super(SSLIOStream, self).writing() + + def _do_ssl_handshake(self): + # Based on code from test_ssl.py in the python stdlib + try: + self._handshake_reading = False + self._handshake_writing = False + self.socket.do_handshake() + except ssl.SSLError as err: + if err.args[0] == ssl.SSL_ERROR_WANT_READ: + self._handshake_reading = True + return + elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE: + self._handshake_writing = True + return + elif err.args[0] in (ssl.SSL_ERROR_EOF, + ssl.SSL_ERROR_ZERO_RETURN): + return self.close(exc_info=True) + elif err.args[0] == ssl.SSL_ERROR_SSL: + try: + peer = self.socket.getpeername() + except Exception: + peer = '(not connected)' + gen_log.warning("SSL Error on %s %s: %s", + self.socket.fileno(), peer, err) + return self.close(exc_info=True) + raise + except socket.error as err: + # Some port scans (e.g. nmap in -sT mode) have been known + # to cause do_handshake to raise EBADF and ENOTCONN, so make + # those errors quiet as well. + # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0 + if (self._is_connreset(err) or + err.args[0] in (errno.EBADF, errno.ENOTCONN)): + return self.close(exc_info=True) + raise + except AttributeError: + # On Linux, if the connection was reset before the call to + # wrap_socket, do_handshake will fail with an + # AttributeError. + return self.close(exc_info=True) + else: + self._ssl_accepting = False + if not self._verify_cert(self.socket.getpeercert()): + self.close() + return + self._run_ssl_connect_callback() + + def _run_ssl_connect_callback(self): + if self._ssl_connect_callback is not None: + callback = self._ssl_connect_callback + self._ssl_connect_callback = None + self._run_callback(callback) + if self._ssl_connect_future is not None: + future = self._ssl_connect_future + self._ssl_connect_future = None + future.set_result(self) + + def _verify_cert(self, peercert): + """Returns True if peercert is valid according to the configured + validation mode and hostname. + + The ssl handshake already tested the certificate for a valid + CA signature; the only thing that remains is to check + the hostname. + """ + if isinstance(self._ssl_options, dict): + verify_mode = self._ssl_options.get('cert_reqs', ssl.CERT_NONE) + elif isinstance(self._ssl_options, ssl.SSLContext): + verify_mode = self._ssl_options.verify_mode + assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) + if verify_mode == ssl.CERT_NONE or self._server_hostname is None: + return True + cert = self.socket.getpeercert() + if cert is None and verify_mode == ssl.CERT_REQUIRED: + gen_log.warning("No SSL certificate given") + return False + try: + ssl_match_hostname(peercert, self._server_hostname) + except SSLCertificateError as e: + gen_log.warning("Invalid SSL certificate: %s" % e) + return False + else: + return True + + def _handle_read(self): + if self._ssl_accepting: + self._do_ssl_handshake() + return + super(SSLIOStream, self)._handle_read() + + def _handle_write(self): + if self._ssl_accepting: + self._do_ssl_handshake() + return + super(SSLIOStream, self)._handle_write() + + def connect(self, address, callback=None, server_hostname=None): + self._server_hostname = server_hostname + # Pass a dummy callback to super.connect(), which is slightly + # more efficient than letting it return a Future we ignore. + super(SSLIOStream, self).connect(address, callback=lambda: None) + return self.wait_for_handshake(callback) + + def _handle_connect(self): + # Call the superclass method to check for errors. + super(SSLIOStream, self)._handle_connect() + if self.closed(): + return + # When the connection is complete, wrap the socket for SSL + # traffic. Note that we do this by overriding _handle_connect + # instead of by passing a callback to super().connect because + # user callbacks are enqueued asynchronously on the IOLoop, + # but since _handle_events calls _handle_connect immediately + # followed by _handle_write we need this to be synchronous. + # + # The IOLoop will get confused if we swap out self.socket while the + # fd is registered, so remove it now and re-register after + # wrap_socket(). + self.io_loop.remove_handler(self.socket) + old_state = self._state + self._state = None + self.socket = ssl_wrap_socket(self.socket, self._ssl_options, + server_hostname=self._server_hostname, + do_handshake_on_connect=False) + self._add_io_state(old_state) + + def wait_for_handshake(self, callback=None): + """Wait for the initial SSL handshake to complete. + + If a ``callback`` is given, it will be called with no + arguments once the handshake is complete; otherwise this + method returns a `.Future` which will resolve to the + stream itself after the handshake is complete. + + Once the handshake is complete, information such as + the peer's certificate and NPN/ALPN selections may be + accessed on ``self.socket``. + + This method is intended for use on server-side streams + or after using `IOStream.start_tls`; it should not be used + with `IOStream.connect` (which already waits for the + handshake to complete). It may only be called once per stream. + + .. versionadded:: 4.2 + """ + if (self._ssl_connect_callback is not None or + self._ssl_connect_future is not None): + raise RuntimeError("Already waiting") + if callback is not None: + self._ssl_connect_callback = stack_context.wrap(callback) + future = None + else: + future = self._ssl_connect_future = TracebackFuture() + if not self._ssl_accepting: + self._run_ssl_connect_callback() + return future + + def write_to_fd(self, data): + try: + return self.socket.send(data) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_WANT_WRITE: + # In Python 3.5+, SSLSocket.send raises a WANT_WRITE error if + # the socket is not writeable; we need to transform this into + # an EWOULDBLOCK socket.error or a zero return value, + # either of which will be recognized by the caller of this + # method. Prior to Python 3.5, an unwriteable socket would + # simply return 0 bytes written. + return 0 + raise + + def read_from_fd(self): + if self._ssl_accepting: + # If the handshake hasn't finished yet, there can't be anything + # to read (attempting to read may or may not raise an exception + # depending on the SSL version) + return None + try: + # SSLSocket objects have both a read() and recv() method, + # while regular sockets only have recv(). + # The recv() method blocks (at least in python 2.6) if it is + # called when there is nothing to read, so we have to use + # read() instead. + chunk = self.socket.read(self.read_chunk_size) + except ssl.SSLError as e: + # SSLError is a subclass of socket.error, so this except + # block must come first. + if e.args[0] == ssl.SSL_ERROR_WANT_READ: + return None + else: + raise + except socket.error as e: + if e.args[0] in _ERRNO_WOULDBLOCK: + return None + else: + raise + if not chunk: + self.close() + return None + return chunk + + def _is_connreset(self, e): + if isinstance(e, ssl.SSLError) and e.args[0] == ssl.SSL_ERROR_EOF: + return True + return super(SSLIOStream, self)._is_connreset(e) + + +class PipeIOStream(BaseIOStream): + """Pipe-based `IOStream` implementation. + + The constructor takes an integer file descriptor (such as one returned + by `os.pipe`) rather than an open file object. Pipes are generally + one-way, so a `PipeIOStream` can be used for reading or writing but not + both. + """ + def __init__(self, fd, *args, **kwargs): + self.fd = fd + _set_nonblocking(fd) + super(PipeIOStream, self).__init__(*args, **kwargs) + + def fileno(self): + return self.fd + + def close_fd(self): + os.close(self.fd) + + def write_to_fd(self, data): + return os.write(self.fd, data) + + def read_from_fd(self): + try: + chunk = os.read(self.fd, self.read_chunk_size) + except (IOError, OSError) as e: + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: + return None + elif errno_from_exception(e) == errno.EBADF: + # If the writing half of a pipe is closed, select will + # report it as readable but reads will fail with EBADF. + self.close(exc_info=True) + return None + else: + raise + if not chunk: + self.close() + return None + return chunk + + +def _double_prefix(deque): + """Grow by doubling, but don't split the second chunk just because the + first one is small. + """ + new_len = max(len(deque[0]) * 2, + (len(deque[0]) + len(deque[1]))) + _merge_prefix(deque, new_len) + + +def _merge_prefix(deque, size): + """Replace the first entries in a deque of strings with a single + string of up to size bytes. + + >>> d = collections.deque(['abc', 'de', 'fghi', 'j']) + >>> _merge_prefix(d, 5); print(d) + deque(['abcde', 'fghi', 'j']) + + Strings will be split as necessary to reach the desired size. + >>> _merge_prefix(d, 7); print(d) + deque(['abcdefg', 'hi', 'j']) + + >>> _merge_prefix(d, 3); print(d) + deque(['abc', 'defg', 'hi', 'j']) + + >>> _merge_prefix(d, 100); print(d) + deque(['abcdefghij']) + """ + if len(deque) == 1 and len(deque[0]) <= size: + return + prefix = [] + remaining = size + while deque and remaining > 0: + chunk = deque.popleft() + if len(chunk) > remaining: + deque.appendleft(chunk[remaining:]) + chunk = chunk[:remaining] + prefix.append(chunk) + remaining -= len(chunk) + # This data structure normally just contains byte strings, but + # the unittest gets messy if it doesn't use the default str() type, + # so do the merge based on the type of data that's actually present. + if prefix: + deque.appendleft(type(prefix[0])().join(prefix)) + if not deque: + deque.appendleft(b"") + + +def doctests(): + import doctest + return doctest.DocTestSuite() diff --git a/server/www/packages/packages-common/tornado/locale.py b/server/www/packages/packages-common/tornado/locale.py new file mode 100644 index 0000000..8310c4d --- /dev/null +++ b/server/www/packages/packages-common/tornado/locale.py @@ -0,0 +1,521 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Translation methods for generating localized strings. + +To load a locale and generate a translated string:: + + user_locale = tornado.locale.get("es_LA") + print user_locale.translate("Sign out") + +`tornado.locale.get()` returns the closest matching locale, not necessarily the +specific locale you requested. You can support pluralization with +additional arguments to `~Locale.translate()`, e.g.:: + + people = [...] + message = user_locale.translate( + "%(list)s is online", "%(list)s are online", len(people)) + print message % {"list": user_locale.list(people)} + +The first string is chosen if ``len(people) == 1``, otherwise the second +string is chosen. + +Applications should call one of `load_translations` (which uses a simple +CSV format) or `load_gettext_translations` (which uses the ``.mo`` format +supported by `gettext` and related tools). If neither method is called, +the `Locale.translate` method will simply return the original string. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import codecs +import csv +import datetime +from io import BytesIO +import numbers +import os +import re + +from tornado import escape +from tornado.log import gen_log +from tornado.util import u + +from tornado._locale_data import LOCALE_NAMES + +_default_locale = "en_US" +_translations = {} +_supported_locales = frozenset([_default_locale]) +_use_gettext = False +CONTEXT_SEPARATOR = "\x04" + + +def get(*locale_codes): + """Returns the closest match for the given locale codes. + + We iterate over all given locale codes in order. If we have a tight + or a loose match for the code (e.g., "en" for "en_US"), we return + the locale. Otherwise we move to the next code in the list. + + By default we return ``en_US`` if no translations are found for any of + the specified locales. You can change the default locale with + `set_default_locale()`. + """ + return Locale.get_closest(*locale_codes) + + +def set_default_locale(code): + """Sets the default locale. + + The default locale is assumed to be the language used for all strings + in the system. The translations loaded from disk are mappings from + the default locale to the destination locale. Consequently, you don't + need to create a translation file for the default locale. + """ + global _default_locale + global _supported_locales + _default_locale = code + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + + +def load_translations(directory, encoding=None): + """Loads translations from CSV files in a directory. + + Translations are strings with optional Python-style named placeholders + (e.g., ``My name is %(name)s``) and their associated translations. + + The directory should have translation files of the form ``LOCALE.csv``, + e.g. ``es_GT.csv``. The CSV files should have two or three columns: string, + translation, and an optional plural indicator. Plural indicators should + be one of "plural" or "singular". A given string can have both singular + and plural forms. For example ``%(name)s liked this`` may have a + different verb conjugation depending on whether %(name)s is one + name or a list of names. There should be two rows in the CSV file for + that string, one with plural indicator "singular", and one "plural". + For strings with no verbs that would change on translation, simply + use "unknown" or the empty string (or don't include the column at all). + + The file is read using the `csv` module in the default "excel" dialect. + In this format there should not be spaces after the commas. + + If no ``encoding`` parameter is given, the encoding will be + detected automatically (among UTF-8 and UTF-16) if the file + contains a byte-order marker (BOM), defaulting to UTF-8 if no BOM + is present. + + Example translation ``es_LA.csv``:: + + "I love you","Te amo" + "%(name)s liked this","A %(name)s les gust贸 esto","plural" + "%(name)s liked this","A %(name)s le gust贸 esto","singular" + + .. versionchanged:: 4.3 + Added ``encoding`` parameter. Added support for BOM-based encoding + detection, UTF-16, and UTF-8-with-BOM. + """ + global _translations + global _supported_locales + _translations = {} + for path in os.listdir(directory): + if not path.endswith(".csv"): + continue + locale, extension = path.split(".") + if not re.match("[a-z]+(_[A-Z]+)?$", locale): + gen_log.error("Unrecognized locale %r (path: %s)", locale, + os.path.join(directory, path)) + continue + full_path = os.path.join(directory, path) + if encoding is None: + # Try to autodetect encoding based on the BOM. + with open(full_path, 'rb') as f: + data = f.read(len(codecs.BOM_UTF16_LE)) + if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + encoding = 'utf-16' + else: + # utf-8-sig is "utf-8 with optional BOM". It's discouraged + # in most cases but is common with CSV files because Excel + # cannot read utf-8 files without a BOM. + encoding = 'utf-8-sig' + try: + # python 3: csv.reader requires a file open in text mode. + # Force utf8 to avoid dependence on $LANG environment variable. + f = open(full_path, "r", encoding=encoding) + except TypeError: + # python 2: csv can only handle byte strings (in ascii-compatible + # encodings), which we decode below. Transcode everything into + # utf8 before passing it to csv.reader. + f = BytesIO() + with codecs.open(full_path, "r", encoding=encoding) as infile: + f.write(escape.utf8(infile.read())) + f.seek(0) + _translations[locale] = {} + for i, row in enumerate(csv.reader(f)): + if not row or len(row) < 2: + continue + row = [escape.to_unicode(c).strip() for c in row] + english, translation = row[:2] + if len(row) > 2: + plural = row[2] or "unknown" + else: + plural = "unknown" + if plural not in ("plural", "singular", "unknown"): + gen_log.error("Unrecognized plural indicator %r in %s line %d", + plural, path, i + 1) + continue + _translations[locale].setdefault(plural, {})[english] = translation + f.close() + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + gen_log.debug("Supported locales: %s", sorted(_supported_locales)) + + +def load_gettext_translations(directory, domain): + """Loads translations from `gettext`'s locale tree + + Locale tree is similar to system's ``/usr/share/locale``, like:: + + {directory}/{lang}/LC_MESSAGES/{domain}.mo + + Three steps are required to have you app translated: + + 1. Generate POT translation file:: + + xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc + + 2. Merge against existing POT file:: + + msgmerge old.po mydomain.po > new.po + + 3. Compile:: + + msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo + """ + import gettext + global _translations + global _supported_locales + global _use_gettext + _translations = {} + for lang in os.listdir(directory): + if lang.startswith('.'): + continue # skip .svn, etc + if os.path.isfile(os.path.join(directory, lang)): + continue + try: + os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo")) + _translations[lang] = gettext.translation(domain, directory, + languages=[lang]) + except Exception as e: + gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) + continue + _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) + _use_gettext = True + gen_log.debug("Supported locales: %s", sorted(_supported_locales)) + + +def get_supported_locales(): + """Returns a list of all the supported locale codes.""" + return _supported_locales + + +class Locale(object): + """Object representing a locale. + + After calling one of `load_translations` or `load_gettext_translations`, + call `get` or `get_closest` to get a Locale object. + """ + @classmethod + def get_closest(cls, *locale_codes): + """Returns the closest match for the given locale code.""" + for code in locale_codes: + if not code: + continue + code = code.replace("-", "_") + parts = code.split("_") + if len(parts) > 2: + continue + elif len(parts) == 2: + code = parts[0].lower() + "_" + parts[1].upper() + if code in _supported_locales: + return cls.get(code) + if parts[0].lower() in _supported_locales: + return cls.get(parts[0].lower()) + return cls.get(_default_locale) + + @classmethod + def get(cls, code): + """Returns the Locale for the given locale code. + + If it is not supported, we raise an exception. + """ + if not hasattr(cls, "_cache"): + cls._cache = {} + if code not in cls._cache: + assert code in _supported_locales + translations = _translations.get(code, None) + if translations is None: + locale = CSVLocale(code, {}) + elif _use_gettext: + locale = GettextLocale(code, translations) + else: + locale = CSVLocale(code, translations) + cls._cache[code] = locale + return cls._cache[code] + + def __init__(self, code, translations): + self.code = code + self.name = LOCALE_NAMES.get(code, {}).get("name", u("Unknown")) + self.rtl = False + for prefix in ["fa", "ar", "he"]: + if self.code.startswith(prefix): + self.rtl = True + break + self.translations = translations + + # Initialize strings for date formatting + _ = self.translate + self._months = [ + _("January"), _("February"), _("March"), _("April"), + _("May"), _("June"), _("July"), _("August"), + _("September"), _("October"), _("November"), _("December")] + self._weekdays = [ + _("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), + _("Friday"), _("Saturday"), _("Sunday")] + + def translate(self, message, plural_message=None, count=None): + """Returns the translation for the given message for this locale. + + If ``plural_message`` is given, you must also provide + ``count``. We return ``plural_message`` when ``count != 1``, + and we return the singular form for the given message when + ``count == 1``. + """ + raise NotImplementedError() + + def pgettext(self, context, message, plural_message=None, count=None): + raise NotImplementedError() + + def format_date(self, date, gmt_offset=0, relative=True, shorter=False, + full_format=False): + """Formats the given date (which should be GMT). + + By default, we return a relative time (e.g., "2 minutes ago"). You + can return an absolute date string with ``relative=False``. + + You can force a full format date ("July 10, 1980") with + ``full_format=True``. + + This method is primarily intended for dates in the past. + For dates in the future, we fall back to full format. + """ + if isinstance(date, numbers.Real): + date = datetime.datetime.utcfromtimestamp(date) + now = datetime.datetime.utcnow() + if date > now: + if relative and (date - now).seconds < 60: + # Due to click skew, things are some things slightly + # in the future. Round timestamps in the immediate + # future down to now in relative mode. + date = now + else: + # Otherwise, future dates always use the full format. + full_format = True + local_date = date - datetime.timedelta(minutes=gmt_offset) + local_now = now - datetime.timedelta(minutes=gmt_offset) + local_yesterday = local_now - datetime.timedelta(hours=24) + difference = now - date + seconds = difference.seconds + days = difference.days + + _ = self.translate + format = None + if not full_format: + if relative and days == 0: + if seconds < 50: + return _("1 second ago", "%(seconds)d seconds ago", + seconds) % {"seconds": seconds} + + if seconds < 50 * 60: + minutes = round(seconds / 60.0) + return _("1 minute ago", "%(minutes)d minutes ago", + minutes) % {"minutes": minutes} + + hours = round(seconds / (60.0 * 60)) + return _("1 hour ago", "%(hours)d hours ago", + hours) % {"hours": hours} + + if days == 0: + format = _("%(time)s") + elif days == 1 and local_date.day == local_yesterday.day and \ + relative: + format = _("yesterday") if shorter else \ + _("yesterday at %(time)s") + elif days < 5: + format = _("%(weekday)s") if shorter else \ + _("%(weekday)s at %(time)s") + elif days < 334: # 11mo, since confusing for same month last year + format = _("%(month_name)s %(day)s") if shorter else \ + _("%(month_name)s %(day)s at %(time)s") + + if format is None: + format = _("%(month_name)s %(day)s, %(year)s") if shorter else \ + _("%(month_name)s %(day)s, %(year)s at %(time)s") + + tfhour_clock = self.code not in ("en", "en_US", "zh_CN") + if tfhour_clock: + str_time = "%d:%02d" % (local_date.hour, local_date.minute) + elif self.code == "zh_CN": + str_time = "%s%d:%02d" % ( + (u('\u4e0a\u5348'), u('\u4e0b\u5348'))[local_date.hour >= 12], + local_date.hour % 12 or 12, local_date.minute) + else: + str_time = "%d:%02d %s" % ( + local_date.hour % 12 or 12, local_date.minute, + ("am", "pm")[local_date.hour >= 12]) + + return format % { + "month_name": self._months[local_date.month - 1], + "weekday": self._weekdays[local_date.weekday()], + "day": str(local_date.day), + "year": str(local_date.year), + "time": str_time + } + + def format_day(self, date, gmt_offset=0, dow=True): + """Formats the given date as a day of week. + + Example: "Monday, January 22". You can remove the day of week with + ``dow=False``. + """ + local_date = date - datetime.timedelta(minutes=gmt_offset) + _ = self.translate + if dow: + return _("%(weekday)s, %(month_name)s %(day)s") % { + "month_name": self._months[local_date.month - 1], + "weekday": self._weekdays[local_date.weekday()], + "day": str(local_date.day), + } + else: + return _("%(month_name)s %(day)s") % { + "month_name": self._months[local_date.month - 1], + "day": str(local_date.day), + } + + def list(self, parts): + """Returns a comma-separated list for the given list of parts. + + The format is, e.g., "A, B and C", "A and B" or just "A" for lists + of size 1. + """ + _ = self.translate + if len(parts) == 0: + return "" + if len(parts) == 1: + return parts[0] + comma = u(' \u0648 ') if self.code.startswith("fa") else u(", ") + return _("%(commas)s and %(last)s") % { + "commas": comma.join(parts[:-1]), + "last": parts[len(parts) - 1], + } + + def friendly_number(self, value): + """Returns a comma-separated number for the given integer.""" + if self.code not in ("en", "en_US"): + return str(value) + value = str(value) + parts = [] + while value: + parts.append(value[-3:]) + value = value[:-3] + return ",".join(reversed(parts)) + + +class CSVLocale(Locale): + """Locale implementation using tornado's CSV translation format.""" + def translate(self, message, plural_message=None, count=None): + if plural_message is not None: + assert count is not None + if count != 1: + message = plural_message + message_dict = self.translations.get("plural", {}) + else: + message_dict = self.translations.get("singular", {}) + else: + message_dict = self.translations.get("unknown", {}) + return message_dict.get(message, message) + + def pgettext(self, context, message, plural_message=None, count=None): + if self.translations: + gen_log.warning('pgettext is not supported by CSVLocale') + return self.translate(message, plural_message, count) + + +class GettextLocale(Locale): + """Locale implementation using the `gettext` module.""" + def __init__(self, code, translations): + try: + # python 2 + self.ngettext = translations.ungettext + self.gettext = translations.ugettext + except AttributeError: + # python 3 + self.ngettext = translations.ngettext + self.gettext = translations.gettext + # self.gettext must exist before __init__ is called, since it + # calls into self.translate + super(GettextLocale, self).__init__(code, translations) + + def translate(self, message, plural_message=None, count=None): + if plural_message is not None: + assert count is not None + return self.ngettext(message, plural_message, count) + else: + return self.gettext(message) + + def pgettext(self, context, message, plural_message=None, count=None): + """Allows to set context for translation, accepts plural forms. + + Usage example:: + + pgettext("law", "right") + pgettext("good", "right") + + Plural message example:: + + pgettext("organization", "club", "clubs", len(clubs)) + pgettext("stick", "club", "clubs", len(clubs)) + + To generate POT file with context, add following options to step 1 + of `load_gettext_translations` sequence:: + + xgettext [basic options] --keyword=pgettext:1c,2 --keyword=pgettext:1c,2,3 + + .. versionadded:: 4.2 + """ + if plural_message is not None: + assert count is not None + msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, message), + "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message), + count) + result = self.ngettext(*msgs_with_ctxt) + if CONTEXT_SEPARATOR in result: + # Translation not found + result = self.ngettext(message, plural_message, count) + return result + else: + msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message) + result = self.gettext(msg_with_ctxt) + if CONTEXT_SEPARATOR in result: + # Translation not found + result = message + return result diff --git a/server/www/packages/packages-common/tornado/locks.py b/server/www/packages/packages-common/tornado/locks.py new file mode 100644 index 0000000..a181772 --- /dev/null +++ b/server/www/packages/packages-common/tornado/locks.py @@ -0,0 +1,512 @@ +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, with_statement + +__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] + +import collections + +from tornado import gen, ioloop +from tornado.concurrent import Future + + +class _TimeoutGarbageCollector(object): + """Base class for objects that periodically clean up timed-out waiters. + + Avoids memory leak in a common pattern like: + + while True: + yield condition.wait(short_timeout) + print('looping....') + """ + def __init__(self): + self._waiters = collections.deque() # Futures. + self._timeouts = 0 + + def _garbage_collect(self): + # Occasionally clear timed-out waiters. + self._timeouts += 1 + if self._timeouts > 100: + self._timeouts = 0 + self._waiters = collections.deque( + w for w in self._waiters if not w.done()) + + +class Condition(_TimeoutGarbageCollector): + """A condition allows one or more coroutines to wait until notified. + + Like a standard `threading.Condition`, but does not need an underlying lock + that is acquired and released. + + With a `Condition`, coroutines can wait to be notified by other coroutines: + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Condition + + condition = Condition() + + @gen.coroutine + def waiter(): + print("I'll wait right here") + yield condition.wait() # Yield a Future. + print("I'm done waiting") + + @gen.coroutine + def notifier(): + print("About to notify") + condition.notify() + print("Done notifying") + + @gen.coroutine + def runner(): + # Yield two Futures; wait for waiter() and notifier() to finish. + yield [waiter(), notifier()] + + IOLoop.current().run_sync(runner) + + .. testoutput:: + + I'll wait right here + About to notify + Done notifying + I'm done waiting + + `wait` takes an optional ``timeout`` argument, which is either an absolute + timestamp:: + + io_loop = IOLoop.current() + + # Wait up to 1 second for a notification. + yield condition.wait(timeout=io_loop.time() + 1) + + ...or a `datetime.timedelta` for a timeout relative to the current time:: + + # Wait up to 1 second. + yield condition.wait(timeout=datetime.timedelta(seconds=1)) + + The method raises `tornado.gen.TimeoutError` if there's no notification + before the deadline. + """ + + def __init__(self): + super(Condition, self).__init__() + self.io_loop = ioloop.IOLoop.current() + + def __repr__(self): + result = '<%s' % (self.__class__.__name__, ) + if self._waiters: + result += ' waiters[%s]' % len(self._waiters) + return result + '>' + + def wait(self, timeout=None): + """Wait for `.notify`. + + Returns a `.Future` that resolves ``True`` if the condition is notified, + or ``False`` after a timeout. + """ + waiter = Future() + self._waiters.append(waiter) + if timeout: + def on_timeout(): + waiter.set_result(False) + self._garbage_collect() + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + waiter.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle)) + return waiter + + def notify(self, n=1): + """Wake ``n`` waiters.""" + waiters = [] # Waiters we plan to run right now. + while n and self._waiters: + waiter = self._waiters.popleft() + if not waiter.done(): # Might have timed out. + n -= 1 + waiters.append(waiter) + + for waiter in waiters: + waiter.set_result(True) + + def notify_all(self): + """Wake all waiters.""" + self.notify(len(self._waiters)) + + +class Event(object): + """An event blocks coroutines until its internal flag is set to True. + + Similar to `threading.Event`. + + A coroutine can wait for an event to be set. Once it is set, calls to + ``yield event.wait()`` will not block unless the event has been cleared: + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Event + + event = Event() + + @gen.coroutine + def waiter(): + print("Waiting for event") + yield event.wait() + print("Not waiting this time") + yield event.wait() + print("Done") + + @gen.coroutine + def setter(): + print("About to set the event") + event.set() + + @gen.coroutine + def runner(): + yield [waiter(), setter()] + + IOLoop.current().run_sync(runner) + + .. testoutput:: + + Waiting for event + About to set the event + Not waiting this time + Done + """ + def __init__(self): + self._future = Future() + + def __repr__(self): + return '<%s %s>' % ( + self.__class__.__name__, 'set' if self.is_set() else 'clear') + + def is_set(self): + """Return ``True`` if the internal flag is true.""" + return self._future.done() + + def set(self): + """Set the internal flag to ``True``. All waiters are awakened. + + Calling `.wait` once the flag is set will not block. + """ + if not self._future.done(): + self._future.set_result(None) + + def clear(self): + """Reset the internal flag to ``False``. + + Calls to `.wait` will block until `.set` is called. + """ + if self._future.done(): + self._future = Future() + + def wait(self, timeout=None): + """Block until the internal flag is true. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + if timeout is None: + return self._future + else: + return gen.with_timeout(timeout, self._future) + + +class _ReleasingContextManager(object): + """Releases a Lock or Semaphore at the end of a "with" statement. + + with (yield semaphore.acquire()): + pass + + # Now semaphore.release() has been called. + """ + def __init__(self, obj): + self._obj = obj + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + self._obj.release() + + +class Semaphore(_TimeoutGarbageCollector): + """A lock that can be acquired a fixed number of times before blocking. + + A Semaphore manages a counter representing the number of `.release` calls + minus the number of `.acquire` calls, plus an initial value. The `.acquire` + method blocks if necessary until it can return without making the counter + negative. + + Semaphores limit access to a shared resource. To allow access for two + workers at a time: + + .. testsetup:: semaphore + + from collections import deque + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.concurrent import Future + + # Ensure reliable doctest output: resolve Futures one at a time. + futures_q = deque([Future() for _ in range(3)]) + + @gen.coroutine + def simulator(futures): + for f in futures: + yield gen.moment + f.set_result(None) + + IOLoop.current().add_callback(simulator, list(futures_q)) + + def use_some_resource(): + return futures_q.popleft() + + .. testcode:: semaphore + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Semaphore + + sem = Semaphore(2) + + @gen.coroutine + def worker(worker_id): + yield sem.acquire() + try: + print("Worker %d is working" % worker_id) + yield use_some_resource() + finally: + print("Worker %d is done" % worker_id) + sem.release() + + @gen.coroutine + def runner(): + # Join all workers. + yield [worker(i) for i in range(3)] + + IOLoop.current().run_sync(runner) + + .. testoutput:: semaphore + + Worker 0 is working + Worker 1 is working + Worker 0 is done + Worker 2 is working + Worker 1 is done + Worker 2 is done + + Workers 0 and 1 are allowed to run concurrently, but worker 2 waits until + the semaphore has been released once, by worker 0. + + `.acquire` is a context manager, so ``worker`` could be written as:: + + @gen.coroutine + def worker(worker_id): + with (yield sem.acquire()): + print("Worker %d is working" % worker_id) + yield use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + In Python 3.5, the semaphore itself can be used as an async context + manager:: + + async def worker(worker_id): + async with sem: + print("Worker %d is working" % worker_id) + await use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + .. versionchanged:: 4.3 + Added ``async with`` support in Python 3.5. + """ + def __init__(self, value=1): + super(Semaphore, self).__init__() + if value < 0: + raise ValueError('semaphore initial value must be >= 0') + + self._value = value + + def __repr__(self): + res = super(Semaphore, self).__repr__() + extra = 'locked' if self._value == 0 else 'unlocked,value:{0}'.format( + self._value) + if self._waiters: + extra = '{0},waiters:{1}'.format(extra, len(self._waiters)) + return '<{0} [{1}]>'.format(res[1:-1], extra) + + def release(self): + """Increment the counter and wake one waiter.""" + self._value += 1 + while self._waiters: + waiter = self._waiters.popleft() + if not waiter.done(): + self._value -= 1 + + # If the waiter is a coroutine paused at + # + # with (yield semaphore.acquire()): + # + # then the context manager's __exit__ calls release() at the end + # of the "with" block. + waiter.set_result(_ReleasingContextManager(self)) + break + + def acquire(self, timeout=None): + """Decrement the counter. Returns a Future. + + Block if the counter is zero and wait for a `.release`. The Future + raises `.TimeoutError` after the deadline. + """ + waiter = Future() + if self._value > 0: + self._value -= 1 + waiter.set_result(_ReleasingContextManager(self)) + else: + self._waiters.append(waiter) + if timeout: + def on_timeout(): + waiter.set_exception(gen.TimeoutError()) + self._garbage_collect() + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + waiter.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle)) + return waiter + + def __enter__(self): + raise RuntimeError( + "Use Semaphore like 'with (yield semaphore.acquire())', not like" + " 'with semaphore'") + + __exit__ = __enter__ + + @gen.coroutine + def __aenter__(self): + yield self.acquire() + + @gen.coroutine + def __aexit__(self, typ, value, tb): + self.release() + + +class BoundedSemaphore(Semaphore): + """A semaphore that prevents release() being called too many times. + + If `.release` would increment the semaphore's value past the initial + value, it raises `ValueError`. Semaphores are mostly used to guard + resources with limited capacity, so a semaphore released too many times + is a sign of a bug. + """ + def __init__(self, value=1): + super(BoundedSemaphore, self).__init__(value=value) + self._initial_value = value + + def release(self): + """Increment the counter and wake one waiter.""" + if self._value >= self._initial_value: + raise ValueError("Semaphore released too many times") + super(BoundedSemaphore, self).release() + + +class Lock(object): + """A lock for coroutines. + + A Lock begins unlocked, and `acquire` locks it immediately. While it is + locked, a coroutine that yields `acquire` waits until another coroutine + calls `release`. + + Releasing an unlocked lock raises `RuntimeError`. + + `acquire` supports the context manager protocol in all Python versions: + + >>> from tornado import gen, locks + >>> lock = locks.Lock() + >>> + >>> @gen.coroutine + ... def f(): + ... with (yield lock.acquire()): + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + In Python 3.5, `Lock` also supports the async context manager + protocol. Note that in this case there is no `acquire`, because + ``async with`` includes both the ``yield`` and the ``acquire`` + (just as it does with `threading.Lock`): + + >>> async def f(): # doctest: +SKIP + ... async with lock: + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + .. versionchanged:: 3.5 + Added ``async with`` support in Python 3.5. + + """ + def __init__(self): + self._block = BoundedSemaphore(value=1) + + def __repr__(self): + return "<%s _block=%s>" % ( + self.__class__.__name__, + self._block) + + def acquire(self, timeout=None): + """Attempt to lock. Returns a Future. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + return self._block.acquire(timeout) + + def release(self): + """Unlock. + + The first coroutine in line waiting for `acquire` gets the lock. + + If not locked, raise a `RuntimeError`. + """ + try: + self._block.release() + except ValueError: + raise RuntimeError('release unlocked lock') + + def __enter__(self): + raise RuntimeError( + "Use Lock like 'with (yield lock)', not like 'with lock'") + + __exit__ = __enter__ + + @gen.coroutine + def __aenter__(self): + yield self.acquire() + + @gen.coroutine + def __aexit__(self, typ, value, tb): + self.release() diff --git a/server/www/packages/packages-common/tornado/log.py b/server/www/packages/packages-common/tornado/log.py new file mode 100644 index 0000000..040889a --- /dev/null +++ b/server/www/packages/packages-common/tornado/log.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Logging support for Tornado. + +Tornado uses three logger streams: + +* ``tornado.access``: Per-request logging for Tornado's HTTP servers (and + potentially other servers in the future) +* ``tornado.application``: Logging of errors from application code (i.e. + uncaught exceptions from callbacks) +* ``tornado.general``: General-purpose logging, including any errors + or warnings from Tornado itself. + +These streams may be configured independently using the standard library's +`logging` module. For example, you may wish to send ``tornado.access`` logs +to a separate file for analysis. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import logging +import logging.handlers +import sys + +from tornado.escape import _unicode +from tornado.util import unicode_type, basestring_type + +try: + import curses +except ImportError: + curses = None + +# Logger objects for internal tornado use +access_log = logging.getLogger("tornado.access") +app_log = logging.getLogger("tornado.application") +gen_log = logging.getLogger("tornado.general") + + +def _stderr_supports_color(): + color = False + if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): + try: + curses.setupterm() + if curses.tigetnum("colors") > 0: + color = True + except Exception: + pass + return color + + +def _safe_unicode(s): + try: + return _unicode(s) + except UnicodeDecodeError: + return repr(s) + + +class LogFormatter(logging.Formatter): + """Log formatter used in Tornado. + + Key features of this formatter are: + + * Color support when logging to a terminal that supports it. + * Timestamps on every log line. + * Robust against str/bytes encoding problems. + + This formatter is enabled automatically by + `tornado.options.parse_command_line` (unless ``--logging=none`` is + used). + """ + DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' + DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' + DEFAULT_COLORS = { + logging.DEBUG: 4, # Blue + logging.INFO: 2, # Green + logging.WARNING: 3, # Yellow + logging.ERROR: 1, # Red + } + + def __init__(self, color=True, fmt=DEFAULT_FORMAT, + datefmt=DEFAULT_DATE_FORMAT, colors=DEFAULT_COLORS): + r""" + :arg bool color: Enables color support. + :arg string fmt: Log message format. + It will be applied to the attributes dict of log records. The + text between ``%(color)s`` and ``%(end_color)s`` will be colored + depending on the level if color support is on. + :arg dict colors: color mappings from logging level to terminal color + code + :arg string datefmt: Datetime format. + Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. + + .. versionchanged:: 3.2 + + Added ``fmt`` and ``datefmt`` arguments. + """ + logging.Formatter.__init__(self, datefmt=datefmt) + self._fmt = fmt + + self._colors = {} + if color and _stderr_supports_color(): + # The curses module has some str/bytes confusion in + # python3. Until version 3.2.3, most methods return + # bytes, but only accept strings. In addition, we want to + # output these strings with the logging module, which + # works with unicode strings. The explicit calls to + # unicode() below are harmless in python2 but will do the + # right conversion in python 3. + fg_color = (curses.tigetstr("setaf") or + curses.tigetstr("setf") or "") + if (3, 0) < sys.version_info < (3, 2, 3): + fg_color = unicode_type(fg_color, "ascii") + + for levelno, code in colors.items(): + self._colors[levelno] = unicode_type(curses.tparm(fg_color, code), "ascii") + self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") + else: + self._normal = '' + + def format(self, record): + try: + message = record.getMessage() + assert isinstance(message, basestring_type) # guaranteed by logging + # Encoding notes: The logging module prefers to work with character + # strings, but only enforces that log messages are instances of + # basestring. In python 2, non-ascii bytestrings will make + # their way through the logging framework until they blow up with + # an unhelpful decoding error (with this formatter it happens + # when we attach the prefix, but there are other opportunities for + # exceptions further along in the framework). + # + # If a byte string makes it this far, convert it to unicode to + # ensure it will make it out to the logs. Use repr() as a fallback + # to ensure that all byte strings can be converted successfully, + # but don't do it by default so we don't add extra quotes to ascii + # bytestrings. This is a bit of a hacky place to do this, but + # it's worth it since the encoding errors that would otherwise + # result are so useless (and tornado is fond of using utf8-encoded + # byte strings whereever possible). + record.message = _safe_unicode(message) + except Exception as e: + record.message = "Bad message (%r): %r" % (e, record.__dict__) + + record.asctime = self.formatTime(record, self.datefmt) + + if record.levelno in self._colors: + record.color = self._colors[record.levelno] + record.end_color = self._normal + else: + record.color = record.end_color = '' + + formatted = self._fmt % record.__dict__ + + if record.exc_info: + if not record.exc_text: + record.exc_text = self.formatException(record.exc_info) + if record.exc_text: + # exc_text contains multiple lines. We need to _safe_unicode + # each line separately so that non-utf8 bytes don't cause + # all the newlines to turn into '\n'. + lines = [formatted.rstrip()] + lines.extend(_safe_unicode(ln) for ln in record.exc_text.split('\n')) + formatted = '\n'.join(lines) + return formatted.replace("\n", "\n ") + + +def enable_pretty_logging(options=None, logger=None): + """Turns on formatted logging output as configured. + + This is called automatically by `tornado.options.parse_command_line` + and `tornado.options.parse_config_file`. + """ + if options is None: + from tornado.options import options + if options.logging is None or options.logging.lower() == 'none': + return + if logger is None: + logger = logging.getLogger() + logger.setLevel(getattr(logging, options.logging.upper())) + if options.log_file_prefix: + rotate_mode = options.log_rotate_mode + if rotate_mode == 'size': + channel = logging.handlers.RotatingFileHandler( + filename=options.log_file_prefix, + maxBytes=options.log_file_max_size, + backupCount=options.log_file_num_backups) + elif rotate_mode == 'time': + channel = logging.handlers.TimedRotatingFileHandler( + filename=options.log_file_prefix, + when=options.log_rotate_when, + interval=options.log_rotate_interval, + backupCount=options.log_file_num_backups) + else: + error_message = 'The value of log_rotate_mode option should be ' +\ + '"size" or "time", not "%s".' % rotate_mode + raise ValueError(error_message) + channel.setFormatter(LogFormatter(color=False)) + logger.addHandler(channel) + + if (options.log_to_stderr or + (options.log_to_stderr is None and not logger.handlers)): + # Set up color if we are in a tty and curses is installed + channel = logging.StreamHandler() + channel.setFormatter(LogFormatter()) + logger.addHandler(channel) + + +def define_logging_options(options=None): + """Add logging-related flags to ``options``. + + These options are present automatically on the default options instance; + this method is only necessary if you have created your own `.OptionParser`. + + .. versionadded:: 4.2 + This function existed in prior versions but was broken and undocumented until 4.2. + """ + if options is None: + # late import to prevent cycle + from tornado.options import options + options.define("logging", default="info", + help=("Set the Python log level. If 'none', tornado won't touch the " + "logging configuration."), + metavar="debug|info|warning|error|none") + options.define("log_to_stderr", type=bool, default=None, + help=("Send log output to stderr (colorized if possible). " + "By default use stderr if --log_file_prefix is not set and " + "no other logging is configured.")) + options.define("log_file_prefix", type=str, default=None, metavar="PATH", + help=("Path prefix for log files. " + "Note that if you are running multiple tornado processes, " + "log_file_prefix must be different for each of them (e.g. " + "include the port number)")) + options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, + help="max size of log files before rollover") + options.define("log_file_num_backups", type=int, default=10, + help="number of log files to keep") + + options.define("log_rotate_when", type=str, default='midnight', + help=("specify the type of TimedRotatingFileHandler interval " + "other options:('S', 'M', 'H', 'D', 'W0'-'W6')")) + options.define("log_rotate_interval", type=int, default=1, + help="The interval value of timed rotating") + + options.define("log_rotate_mode", type=str, default='size', + help="The mode of rotating files(time or size)") + + options.add_parse_callback(lambda: enable_pretty_logging(options)) diff --git a/server/www/packages/packages-common/tornado/netutil.py b/server/www/packages/packages-common/tornado/netutil.py new file mode 100644 index 0000000..4fc8d04 --- /dev/null +++ b/server/www/packages/packages-common/tornado/netutil.py @@ -0,0 +1,521 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Miscellaneous network utility code.""" + +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import os +import sys +import socket +import stat + +from tornado.concurrent import dummy_executor, run_on_executor +from tornado.ioloop import IOLoop +from tornado.platform.auto import set_close_exec +from tornado.util import u, Configurable, errno_from_exception + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine + ssl = None + +try: + import certifi +except ImportError: + # certifi is optional as long as we have ssl.create_default_context. + if ssl is None or hasattr(ssl, 'create_default_context'): + certifi = None + else: + raise + +try: + xrange # py2 +except NameError: + xrange = range # py3 + +if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ + ssl_match_hostname = ssl.match_hostname + SSLCertificateError = ssl.CertificateError +elif ssl is None: + ssl_match_hostname = SSLCertificateError = None +else: + import backports.ssl_match_hostname + ssl_match_hostname = backports.ssl_match_hostname.match_hostname + SSLCertificateError = backports.ssl_match_hostname.CertificateError + +if hasattr(ssl, 'SSLContext'): + if hasattr(ssl, 'create_default_context'): + # Python 2.7.9+, 3.4+ + # Note that the naming of ssl.Purpose is confusing; the purpose + # of a context is to authentiate the opposite side of the connection. + _client_ssl_defaults = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH) + _server_ssl_defaults = ssl.create_default_context( + ssl.Purpose.CLIENT_AUTH) + else: + # Python 3.2-3.3 + _client_ssl_defaults = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + _client_ssl_defaults.verify_mode = ssl.CERT_REQUIRED + _client_ssl_defaults.load_verify_locations(certifi.where()) + _server_ssl_defaults = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_COMPRESSION'): + # Disable TLS compression to avoid CRIME and related attacks. + # This constant wasn't added until python 3.3. + _client_ssl_defaults.options |= ssl.OP_NO_COMPRESSION + _server_ssl_defaults.options |= ssl.OP_NO_COMPRESSION + +elif ssl: + # Python 2.6-2.7.8 + _client_ssl_defaults = dict(cert_reqs=ssl.CERT_REQUIRED, + ca_certs=certifi.where()) + _server_ssl_defaults = {} +else: + # Google App Engine + _client_ssl_defaults = dict(cert_reqs=None, + ca_certs=None) + _server_ssl_defaults = {} + +# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode, +# getaddrinfo attempts to import encodings.idna. If this is done at +# module-import time, the import lock is already held by the main thread, +# leading to deadlock. Avoid it by caching the idna encoder on the main +# thread now. +u('foo').encode('idna') + +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + +if hasattr(errno, "WSAEWOULDBLOCK"): + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + +# Default backlog used when calling sock.listen() +_DEFAULT_BACKLOG = 128 + + +def bind_sockets(port, address=None, family=socket.AF_UNSPEC, + backlog=_DEFAULT_BACKLOG, flags=None, reuse_port=False): + """Creates listening sockets bound to the given port and address. + + Returns a list of socket objects (multiple sockets are returned if + the given address maps to multiple IP addresses, which is most common + for mixed IPv4 and IPv6 use). + + Address may be either an IP address or hostname. If it's a hostname, + the server will listen on all IP addresses associated with the + name. Address may be an empty string or None to listen on all + available interfaces. Family may be set to either `socket.AF_INET` + or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise + both will be used if available. + + The ``backlog`` argument has the same meaning as for + `socket.listen() `. + + ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like + ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. + + ``resuse_port`` option sets ``SO_REUSEPORT`` option for every socket + in the list. If your platform doesn't support this option ValueError will + be raised. + """ + if reuse_port and not hasattr(socket, "SO_REUSEPORT"): + raise ValueError("the platform doesn't support SO_REUSEPORT") + + sockets = [] + if address == "": + address = None + if not socket.has_ipv6 and family == socket.AF_UNSPEC: + # Python can be compiled with --disable-ipv6, which causes + # operations on AF_INET6 sockets to fail, but does not + # automatically exclude those results from getaddrinfo + # results. + # http://bugs.python.org/issue16208 + family = socket.AF_INET + if flags is None: + flags = socket.AI_PASSIVE + bound_port = None + for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM, + 0, flags)): + af, socktype, proto, canonname, sockaddr = res + if (sys.platform == 'darwin' and address == 'localhost' and + af == socket.AF_INET6 and sockaddr[3] != 0): + # Mac OS X includes a link-local address fe80::1%lo0 in the + # getaddrinfo results for 'localhost'. However, the firewall + # doesn't understand that this is a local address and will + # prompt for access (often repeatedly, due to an apparent + # bug in its ability to remember granting access to an + # application). Skip these addresses. + continue + try: + sock = socket.socket(af, socktype, proto) + except socket.error as e: + if errno_from_exception(e) == errno.EAFNOSUPPORT: + continue + raise + set_close_exec(sock.fileno()) + if os.name != 'nt': + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + if af == socket.AF_INET6: + # On linux, ipv6 sockets accept ipv4 too by default, + # but this makes it impossible to bind to both + # 0.0.0.0 in ipv4 and :: in ipv6. On other systems, + # separate sockets *must* be used to listen for both ipv4 + # and ipv6. For consistency, always disable ipv4 on our + # ipv6 sockets and use a separate ipv4 socket when needed. + # + # Python 2.x on windows doesn't have IPPROTO_IPV6. + if hasattr(socket, "IPPROTO_IPV6"): + sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # automatic port allocation with port=None + # should bind on the same port on IPv4 and IPv6 + host, requested_port = sockaddr[:2] + if requested_port == 0 and bound_port is not None: + sockaddr = tuple([host, bound_port] + list(sockaddr[2:])) + + sock.setblocking(0) + sock.bind(sockaddr) + bound_port = sock.getsockname()[1] + sock.listen(backlog) + sockets.append(sock) + return sockets + +if hasattr(socket, 'AF_UNIX'): + def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG): + """Creates a listening unix socket. + + If a socket with the given name already exists, it will be deleted. + If any other file with that name exists, an exception will be + raised. + + Returns a socket object (not a list of socket objects like + `bind_sockets`) + """ + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + set_close_exec(sock.fileno()) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.setblocking(0) + try: + st = os.stat(file) + except OSError as err: + if errno_from_exception(err) != errno.ENOENT: + raise + else: + if stat.S_ISSOCK(st.st_mode): + os.remove(file) + else: + raise ValueError("File %s exists and is not a socket", file) + sock.bind(file) + os.chmod(file, mode) + sock.listen(backlog) + return sock + + +def add_accept_handler(sock, callback, io_loop=None): + """Adds an `.IOLoop` event handler to accept new connections on ``sock``. + + When a connection is accepted, ``callback(connection, address)`` will + be run (``connection`` is a socket object, and ``address`` is the + address of the other end of the connection). Note that this signature + is different from the ``callback(fd, events)`` signature used for + `.IOLoop` handlers. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + if io_loop is None: + io_loop = IOLoop.current() + + def accept_handler(fd, events): + # More connections may come in while we're handling callbacks; + # to prevent starvation of other tasks we must limit the number + # of connections we accept at a time. Ideally we would accept + # up to the number of connections that were waiting when we + # entered this method, but this information is not available + # (and rearranging this method to call accept() as many times + # as possible before running any callbacks would have adverse + # effects on load balancing in multiprocess configurations). + # Instead, we use the (default) listen backlog as a rough + # heuristic for the number of connections we can reasonably + # accept at once. + for i in xrange(_DEFAULT_BACKLOG): + try: + connection, address = sock.accept() + except socket.error as e: + # _ERRNO_WOULDBLOCK indicate we have accepted every + # connection that is available. + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: + return + # ECONNABORTED indicates that there was a connection + # but it was closed while still in the accept queue. + # (observed on FreeBSD). + if errno_from_exception(e) == errno.ECONNABORTED: + continue + raise + callback(connection, address) + io_loop.add_handler(sock, accept_handler, IOLoop.READ) + + +def is_valid_ip(ip): + """Returns true if the given string is a well-formed IP address. + + Supports IPv4 and IPv6. + """ + if not ip or '\x00' in ip: + # getaddrinfo resolves empty strings to localhost, and truncates + # on zero bytes. + return False + try: + res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC, + socket.SOCK_STREAM, + 0, socket.AI_NUMERICHOST) + return bool(res) + except socket.gaierror as e: + if e.args[0] == socket.EAI_NONAME: + return False + raise + return True + + +class Resolver(Configurable): + """Configurable asynchronous DNS resolver interface. + + By default, a blocking implementation is used (which simply calls + `socket.getaddrinfo`). An alternative implementation can be + chosen with the `Resolver.configure <.Configurable.configure>` + class method:: + + Resolver.configure('tornado.netutil.ThreadedResolver') + + The implementations of this interface included with Tornado are + + * `tornado.netutil.BlockingResolver` + * `tornado.netutil.ThreadedResolver` + * `tornado.netutil.OverrideResolver` + * `tornado.platform.twisted.TwistedResolver` + * `tornado.platform.caresresolver.CaresResolver` + """ + @classmethod + def configurable_base(cls): + return Resolver + + @classmethod + def configurable_default(cls): + return BlockingResolver + + def resolve(self, host, port, family=socket.AF_UNSPEC, callback=None): + """Resolves an address. + + The ``host`` argument is a string which may be a hostname or a + literal IP address. + + Returns a `.Future` whose result is a list of (family, + address) pairs, where address is a tuple suitable to pass to + `socket.connect ` (i.e. a ``(host, + port)`` pair for IPv4; additional fields may be present for + IPv6). If a ``callback`` is passed, it will be run with the + result as an argument when it is complete. + """ + raise NotImplementedError() + + def close(self): + """Closes the `Resolver`, freeing any resources used. + + .. versionadded:: 3.1 + + """ + pass + + +class ExecutorResolver(Resolver): + """Resolver implementation using a `concurrent.futures.Executor`. + + Use this instead of `ThreadedResolver` when you require additional + control over the executor being used. + + The executor will be shut down when the resolver is closed unless + ``close_resolver=False``; use this if you want to reuse the same + executor elsewhere. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def initialize(self, io_loop=None, executor=None, close_executor=True): + self.io_loop = io_loop or IOLoop.current() + if executor is not None: + self.executor = executor + self.close_executor = close_executor + else: + self.executor = dummy_executor + self.close_executor = False + + def close(self): + if self.close_executor: + self.executor.shutdown() + self.executor = None + + @run_on_executor + def resolve(self, host, port, family=socket.AF_UNSPEC): + # On Solaris, getaddrinfo fails if the given port is not found + # in /etc/services and no socket type is given, so we must pass + # one here. The socket type used here doesn't seem to actually + # matter (we discard the one we get back in the results), + # so the addresses we return should still be usable with SOCK_DGRAM. + addrinfo = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM) + results = [] + for family, socktype, proto, canonname, address in addrinfo: + results.append((family, address)) + return results + + +class BlockingResolver(ExecutorResolver): + """Default `Resolver` implementation, using `socket.getaddrinfo`. + + The `.IOLoop` will be blocked during the resolution, although the + callback will not be run until the next `.IOLoop` iteration. + """ + def initialize(self, io_loop=None): + super(BlockingResolver, self).initialize(io_loop=io_loop) + + +class ThreadedResolver(ExecutorResolver): + """Multithreaded non-blocking `Resolver` implementation. + + Requires the `concurrent.futures` package to be installed + (available in the standard library since Python 3.2, + installable with ``pip install futures`` in older versions). + + The thread pool size can be configured with:: + + Resolver.configure('tornado.netutil.ThreadedResolver', + num_threads=10) + + .. versionchanged:: 3.1 + All ``ThreadedResolvers`` share a single thread pool, whose + size is set by the first one to be created. + """ + _threadpool = None + _threadpool_pid = None + + def initialize(self, io_loop=None, num_threads=10): + threadpool = ThreadedResolver._create_threadpool(num_threads) + super(ThreadedResolver, self).initialize( + io_loop=io_loop, executor=threadpool, close_executor=False) + + @classmethod + def _create_threadpool(cls, num_threads): + pid = os.getpid() + if cls._threadpool_pid != pid: + # Threads cannot survive after a fork, so if our pid isn't what it + # was when we created the pool then delete it. + cls._threadpool = None + if cls._threadpool is None: + from concurrent.futures import ThreadPoolExecutor + cls._threadpool = ThreadPoolExecutor(num_threads) + cls._threadpool_pid = pid + return cls._threadpool + + +class OverrideResolver(Resolver): + """Wraps a resolver with a mapping of overrides. + + This can be used to make local DNS changes (e.g. for testing) + without modifying system-wide settings. + + The mapping can contain either host strings or host-port pairs. + """ + def initialize(self, resolver, mapping): + self.resolver = resolver + self.mapping = mapping + + def close(self): + self.resolver.close() + + def resolve(self, host, port, *args, **kwargs): + if (host, port) in self.mapping: + host, port = self.mapping[(host, port)] + elif host in self.mapping: + host = self.mapping[host] + return self.resolver.resolve(host, port, *args, **kwargs) + + +# These are the keyword arguments to ssl.wrap_socket that must be translated +# to their SSLContext equivalents (the other arguments are still passed +# to SSLContext.wrap_socket). +_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile', + 'cert_reqs', 'ca_certs', 'ciphers']) + + +def ssl_options_to_context(ssl_options): + """Try to convert an ``ssl_options`` dictionary to an + `~ssl.SSLContext` object. + + The ``ssl_options`` dictionary contains keywords to be passed to + `ssl.wrap_socket`. In Python 2.7.9+, `ssl.SSLContext` objects can + be used instead. This function converts the dict form to its + `~ssl.SSLContext` equivalent, and may be used when a component which + accepts both forms needs to upgrade to the `~ssl.SSLContext` version + to use features like SNI or NPN. + """ + if isinstance(ssl_options, dict): + assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options + if (not hasattr(ssl, 'SSLContext') or + isinstance(ssl_options, ssl.SSLContext)): + return ssl_options + context = ssl.SSLContext( + ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23)) + if 'certfile' in ssl_options: + context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None)) + if 'cert_reqs' in ssl_options: + context.verify_mode = ssl_options['cert_reqs'] + if 'ca_certs' in ssl_options: + context.load_verify_locations(ssl_options['ca_certs']) + if 'ciphers' in ssl_options: + context.set_ciphers(ssl_options['ciphers']) + if hasattr(ssl, 'OP_NO_COMPRESSION'): + # Disable TLS compression to avoid CRIME and related attacks. + # This constant wasn't added until python 3.3. + context.options |= ssl.OP_NO_COMPRESSION + return context + + +def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs): + """Returns an ``ssl.SSLSocket`` wrapping the given socket. + + ``ssl_options`` may be either an `ssl.SSLContext` object or a + dictionary (as accepted by `ssl_options_to_context`). Additional + keyword arguments are passed to ``wrap_socket`` (either the + `~ssl.SSLContext` method or the `ssl` module function as + appropriate). + """ + context = ssl_options_to_context(ssl_options) + if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext): + if server_hostname is not None and getattr(ssl, 'HAS_SNI'): + # Python doesn't have server-side SNI support so we can't + # really unittest this, but it can be manually tested with + # python3.2 -m tornado.httpclient https://sni.velox.ch + return context.wrap_socket(socket, server_hostname=server_hostname, + **kwargs) + else: + return context.wrap_socket(socket, **kwargs) + else: + return ssl.wrap_socket(socket, **dict(context, **kwargs)) diff --git a/server/www/packages/packages-common/tornado/options.py b/server/www/packages/packages-common/tornado/options.py new file mode 100644 index 0000000..ba16b1a --- /dev/null +++ b/server/www/packages/packages-common/tornado/options.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A command line parsing module that lets modules define their own options. + +Each module defines its own options which are added to the global +option namespace, e.g.:: + + from tornado.options import define, options + + define("mysql_host", default="127.0.0.1:3306", help="Main user DB") + define("memcache_hosts", default="127.0.0.1:11011", multiple=True, + help="Main user memcache servers") + + def connect(): + db = database.Connection(options.mysql_host) + ... + +The ``main()`` method of your application does not need to be aware of all of +the options used throughout your program; they are all automatically loaded +when the modules are loaded. However, all modules that define options +must have been imported before the command line is parsed. + +Your ``main()`` method can parse the command line or parse a config file with +either:: + + tornado.options.parse_command_line() + # or + tornado.options.parse_config_file("/etc/server.conf") + +Command line formats are what you would expect (``--myoption=myvalue``). +Config files are just Python files. Global names become options, e.g.:: + + myoption = "myvalue" + myotheroption = "myothervalue" + +We support `datetimes `, `timedeltas +`, ints, and floats (just pass a ``type`` kwarg to +`define`). We also accept multi-value options. See the documentation for +`define()` below. + +`tornado.options.options` is a singleton instance of `OptionParser`, and +the top-level functions in this module (`define`, `parse_command_line`, etc) +simply call methods on it. You may create additional `OptionParser` +instances to define isolated sets of options, such as for subcommands. + +.. note:: + + By default, several options are defined that will configure the + standard `logging` module when `parse_command_line` or `parse_config_file` + are called. If you want Tornado to leave the logging configuration + alone so you can manage it yourself, either pass ``--logging=none`` + on the command line or do the following to disable it in code:: + + from tornado.options import options, parse_command_line + options.logging = None + parse_command_line() + +.. versionchanged:: 4.3 + Dashes and underscores are fully interchangeable in option names; + options can be defined, set, and read with any mix of the two. + Dashes are typical for command-line usage while config files require + underscores. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import numbers +import re +import sys +import os +import textwrap + +from tornado.escape import _unicode, native_str +from tornado.log import define_logging_options +from tornado import stack_context +from tornado.util import basestring_type, exec_in + + +class Error(Exception): + """Exception raised by errors in the options module.""" + pass + + +class OptionParser(object): + """A collection of options, a dictionary with object-like access. + + Normally accessed via static functions in the `tornado.options` module, + which reference a global instance. + """ + def __init__(self): + # we have to use self.__dict__ because we override setattr. + self.__dict__['_options'] = {} + self.__dict__['_parse_callbacks'] = [] + self.define("help", type=bool, help="show this help information", + callback=self._help_callback) + + def _normalize_name(self, name): + return name.replace('_', '-') + + def __getattr__(self, name): + name = self._normalize_name(name) + if isinstance(self._options.get(name), _Option): + return self._options[name].value() + raise AttributeError("Unrecognized option %r" % name) + + def __setattr__(self, name, value): + name = self._normalize_name(name) + if isinstance(self._options.get(name), _Option): + return self._options[name].set(value) + raise AttributeError("Unrecognized option %r" % name) + + def __iter__(self): + return (opt.name for opt in self._options.values()) + + def __contains__(self, name): + name = self._normalize_name(name) + return name in self._options + + def __getitem__(self, name): + name = self._normalize_name(name) + return self._options[name].value() + + def items(self): + """A sequence of (name, value) pairs. + + .. versionadded:: 3.1 + """ + return [(opt.name, opt.value()) for name, opt in self._options.items()] + + def groups(self): + """The set of option-groups created by ``define``. + + .. versionadded:: 3.1 + """ + return set(opt.group_name for opt in self._options.values()) + + def group_dict(self, group): + """The names and values of options in a group. + + Useful for copying options into Application settings:: + + from tornado.options import define, parse_command_line, options + + define('template_path', group='application') + define('static_path', group='application') + + parse_command_line() + + application = Application( + handlers, **options.group_dict('application')) + + .. versionadded:: 3.1 + """ + return dict( + (opt.name, opt.value()) for name, opt in self._options.items() + if not group or group == opt.group_name) + + def as_dict(self): + """The names and values of all options. + + .. versionadded:: 3.1 + """ + return dict( + (opt.name, opt.value()) for name, opt in self._options.items()) + + def define(self, name, default=None, type=None, help=None, metavar=None, + multiple=False, group=None, callback=None): + """Defines a new command line option. + + If ``type`` is given (one of str, float, int, datetime, or timedelta) + or can be inferred from the ``default``, we parse the command line + arguments based on the given type. If ``multiple`` is True, we accept + comma-separated values, and the option value is always a list. + + For multi-value integers, we also accept the syntax ``x:y``, which + turns into ``range(x, y)`` - very useful for long integer ranges. + + ``help`` and ``metavar`` are used to construct the + automatically generated command line help string. The help + message is formatted like:: + + --name=METAVAR help string + + ``group`` is used to group the defined options in logical + groups. By default, command line options are grouped by the + file in which they are defined. + + Command line option names must be unique globally. They can be parsed + from the command line with `parse_command_line` or parsed from a + config file with `parse_config_file`. + + If a ``callback`` is given, it will be run with the new value whenever + the option is changed. This can be used to combine command-line + and file-based options:: + + define("config", type=str, help="path to config file", + callback=lambda path: parse_config_file(path, final=False)) + + With this definition, options in the file specified by ``--config`` will + override options set earlier on the command line, but can be overridden + by later flags. + """ + if name in self._options: + raise Error("Option %r already defined in %s" % + (name, self._options[name].file_name)) + frame = sys._getframe(0) + options_file = frame.f_code.co_filename + + # Can be called directly, or through top level define() fn, in which + # case, step up above that frame to look for real caller. + if (frame.f_back.f_code.co_filename == options_file and + frame.f_back.f_code.co_name == 'define'): + frame = frame.f_back + + file_name = frame.f_back.f_code.co_filename + if file_name == options_file: + file_name = "" + if type is None: + if not multiple and default is not None: + type = default.__class__ + else: + type = str + if group: + group_name = group + else: + group_name = file_name + normalized = self._normalize_name(name) + option = _Option(name, file_name=file_name, + default=default, type=type, help=help, + metavar=metavar, multiple=multiple, + group_name=group_name, + callback=callback) + self._options[normalized] = option + + def parse_command_line(self, args=None, final=True): + """Parses all options given on the command line (defaults to + `sys.argv`). + + Note that ``args[0]`` is ignored since it is the program name + in `sys.argv`. + + We return a list of all arguments that are not parsed as options. + + If ``final`` is ``False``, parse callbacks will not be run. + This is useful for applications that wish to combine configurations + from multiple sources. + """ + if args is None: + args = sys.argv + remaining = [] + for i in range(1, len(args)): + # All things after the last option are command line arguments + if not args[i].startswith("-"): + remaining = args[i:] + break + if args[i] == "--": + remaining = args[i + 1:] + break + arg = args[i].lstrip("-") + name, equals, value = arg.partition("=") + name = self._normalize_name(name) + if name not in self._options: + self.print_help() + raise Error('Unrecognized command line option: %r' % name) + option = self._options[name] + if not equals: + if option.type == bool: + value = "true" + else: + raise Error('Option %r requires a value' % name) + option.parse(value) + + if final: + self.run_parse_callbacks() + + return remaining + + def parse_config_file(self, path, final=True): + """Parses and loads the Python config file at the given path. + + If ``final`` is ``False``, parse callbacks will not be run. + This is useful for applications that wish to combine configurations + from multiple sources. + + .. versionchanged:: 4.1 + Config files are now always interpreted as utf-8 instead of + the system default encoding. + """ + config = {} + with open(path, 'rb') as f: + exec_in(native_str(f.read()), config, config) + for name in config: + normalized = self._normalize_name(name) + if normalized in self._options: + self._options[normalized].set(config[name]) + + if final: + self.run_parse_callbacks() + + def print_help(self, file=None): + """Prints all the command line options to stderr (or another file).""" + if file is None: + file = sys.stderr + print("Usage: %s [OPTIONS]" % sys.argv[0], file=file) + print("\nOptions:\n", file=file) + by_group = {} + for option in self._options.values(): + by_group.setdefault(option.group_name, []).append(option) + + for filename, o in sorted(by_group.items()): + if filename: + print("\n%s options:\n" % os.path.normpath(filename), file=file) + o.sort(key=lambda option: option.name) + for option in o: + # Always print names with dashes in a CLI context. + prefix = self._normalize_name(option.name) + if option.metavar: + prefix += "=" + option.metavar + description = option.help or "" + if option.default is not None and option.default != '': + description += " (default %s)" % option.default + lines = textwrap.wrap(description, 79 - 35) + if len(prefix) > 30 or len(lines) == 0: + lines.insert(0, '') + print(" --%-30s %s" % (prefix, lines[0]), file=file) + for line in lines[1:]: + print("%-34s %s" % (' ', line), file=file) + print(file=file) + + def _help_callback(self, value): + if value: + self.print_help() + sys.exit(0) + + def add_parse_callback(self, callback): + """Adds a parse callback, to be invoked when option parsing is done.""" + self._parse_callbacks.append(stack_context.wrap(callback)) + + def run_parse_callbacks(self): + for callback in self._parse_callbacks: + callback() + + def mockable(self): + """Returns a wrapper around self that is compatible with + `mock.patch `. + + The `mock.patch ` function (included in + the standard library `unittest.mock` package since Python 3.3, + or in the third-party ``mock`` package for older versions of + Python) is incompatible with objects like ``options`` that + override ``__getattr__`` and ``__setattr__``. This function + returns an object that can be used with `mock.patch.object + ` to modify option values:: + + with mock.patch.object(options.mockable(), 'name', value): + assert options.name == value + """ + return _Mockable(self) + + +class _Mockable(object): + """`mock.patch` compatible wrapper for `OptionParser`. + + As of ``mock`` version 1.0.1, when an object uses ``__getattr__`` + hooks instead of ``__dict__``, ``patch.__exit__`` tries to delete + the attribute it set instead of setting a new one (assuming that + the object does not catpure ``__setattr__``, so the patch + created a new attribute in ``__dict__``). + + _Mockable's getattr and setattr pass through to the underlying + OptionParser, and delattr undoes the effect of a previous setattr. + """ + def __init__(self, options): + # Modify __dict__ directly to bypass __setattr__ + self.__dict__['_options'] = options + self.__dict__['_originals'] = {} + + def __getattr__(self, name): + return getattr(self._options, name) + + def __setattr__(self, name, value): + assert name not in self._originals, "don't reuse mockable objects" + self._originals[name] = getattr(self._options, name) + setattr(self._options, name, value) + + def __delattr__(self, name): + setattr(self._options, name, self._originals.pop(name)) + + +class _Option(object): + UNSET = object() + + def __init__(self, name, default=None, type=basestring_type, help=None, + metavar=None, multiple=False, file_name=None, group_name=None, + callback=None): + if default is None and multiple: + default = [] + self.name = name + self.type = type + self.help = help + self.metavar = metavar + self.multiple = multiple + self.file_name = file_name + self.group_name = group_name + self.callback = callback + self.default = default + self._value = _Option.UNSET + + def value(self): + return self.default if self._value is _Option.UNSET else self._value + + def parse(self, value): + _parse = { + datetime.datetime: self._parse_datetime, + datetime.timedelta: self._parse_timedelta, + bool: self._parse_bool, + basestring_type: self._parse_string, + }.get(self.type, self.type) + if self.multiple: + self._value = [] + for part in value.split(","): + if issubclass(self.type, numbers.Integral): + # allow ranges of the form X:Y (inclusive at both ends) + lo, _, hi = part.partition(":") + lo = _parse(lo) + hi = _parse(hi) if hi else lo + self._value.extend(range(lo, hi + 1)) + else: + self._value.append(_parse(part)) + else: + self._value = _parse(value) + if self.callback is not None: + self.callback(self._value) + return self.value() + + def set(self, value): + if self.multiple: + if not isinstance(value, list): + raise Error("Option %r is required to be a list of %s" % + (self.name, self.type.__name__)) + for item in value: + if item is not None and not isinstance(item, self.type): + raise Error("Option %r is required to be a list of %s" % + (self.name, self.type.__name__)) + else: + if value is not None and not isinstance(value, self.type): + raise Error("Option %r is required to be a %s (%s given)" % + (self.name, self.type.__name__, type(value))) + self._value = value + if self.callback is not None: + self.callback(self._value) + + # Supported date/time formats in our options + _DATETIME_FORMATS = [ + "%a %b %d %H:%M:%S %Y", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M", + "%Y-%m-%dT%H:%M", + "%Y%m%d %H:%M:%S", + "%Y%m%d %H:%M", + "%Y-%m-%d", + "%Y%m%d", + "%H:%M:%S", + "%H:%M", + ] + + def _parse_datetime(self, value): + for format in self._DATETIME_FORMATS: + try: + return datetime.datetime.strptime(value, format) + except ValueError: + pass + raise Error('Unrecognized date/time format: %r' % value) + + _TIMEDELTA_ABBREV_DICT = { + 'h': 'hours', + 'm': 'minutes', + 'min': 'minutes', + 's': 'seconds', + 'sec': 'seconds', + 'ms': 'milliseconds', + 'us': 'microseconds', + 'd': 'days', + 'w': 'weeks', + } + + _FLOAT_PATTERN = r'[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?' + + _TIMEDELTA_PATTERN = re.compile( + r'\s*(%s)\s*(\w*)\s*' % _FLOAT_PATTERN, re.IGNORECASE) + + def _parse_timedelta(self, value): + try: + sum = datetime.timedelta() + start = 0 + while start < len(value): + m = self._TIMEDELTA_PATTERN.match(value, start) + if not m: + raise Exception() + num = float(m.group(1)) + units = m.group(2) or 'seconds' + units = self._TIMEDELTA_ABBREV_DICT.get(units, units) + sum += datetime.timedelta(**{units: num}) + start = m.end() + return sum + except Exception: + raise + + def _parse_bool(self, value): + return value.lower() not in ("false", "0", "f") + + def _parse_string(self, value): + return _unicode(value) + + +options = OptionParser() +"""Global options object. + +All defined options are available as attributes on this object. +""" + + +def define(name, default=None, type=None, help=None, metavar=None, + multiple=False, group=None, callback=None): + """Defines an option in the global namespace. + + See `OptionParser.define`. + """ + return options.define(name, default=default, type=type, help=help, + metavar=metavar, multiple=multiple, group=group, + callback=callback) + + +def parse_command_line(args=None, final=True): + """Parses global options from the command line. + + See `OptionParser.parse_command_line`. + """ + return options.parse_command_line(args, final=final) + + +def parse_config_file(path, final=True): + """Parses global options from a config file. + + See `OptionParser.parse_config_file`. + """ + return options.parse_config_file(path, final=final) + + +def print_help(file=None): + """Prints all the command line options to stderr (or another file). + + See `OptionParser.print_help`. + """ + return options.print_help(file) + + +def add_parse_callback(callback): + """Adds a parse callback, to be invoked when option parsing is done. + + See `OptionParser.add_parse_callback` + """ + options.add_parse_callback(callback) + + +# Default options +define_logging_options(options) diff --git a/server/www/packages/packages-common/tornado/platform/__init__.py b/server/www/packages/packages-common/tornado/platform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/www/packages/packages-common/tornado/platform/asyncio.py b/server/www/packages/packages-common/tornado/platform/asyncio.py new file mode 100644 index 0000000..bf0428e --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/asyncio.py @@ -0,0 +1,216 @@ +"""Bridges between the `asyncio` module and Tornado IOLoop. + +.. versionadded:: 3.2 + +This module integrates Tornado with the ``asyncio`` module introduced +in Python 3.4 (and available `as a separate download +`_ for Python 3.3). This makes +it possible to combine the two libraries on the same event loop. + +Most applications should use `AsyncIOMainLoop` to run Tornado on the +default ``asyncio`` event loop. Applications that need to run event +loops on multiple threads may use `AsyncIOLoop` to create multiple +loops. + +.. note:: + + Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods, + so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows. + Use the `~asyncio.SelectorEventLoop` instead. +""" + +from __future__ import absolute_import, division, print_function, with_statement +import functools + +import tornado.concurrent +from tornado.gen import convert_yielded +from tornado.ioloop import IOLoop +from tornado import stack_context + +try: + # Import the real asyncio module for py33+ first. Older versions of the + # trollius backport also use this name. + import asyncio +except ImportError as e: + # Asyncio itself isn't available; see if trollius is (backport to py26+). + try: + import trollius as asyncio + except ImportError: + # Re-raise the original asyncio error, not the trollius one. + raise e + + +class BaseAsyncIOLoop(IOLoop): + def initialize(self, asyncio_loop, close_loop=False, **kwargs): + super(BaseAsyncIOLoop, self).initialize(**kwargs) + self.asyncio_loop = asyncio_loop + self.close_loop = close_loop + # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) + self.handlers = {} + # Set of fds listening for reads/writes + self.readers = set() + self.writers = set() + self.closing = False + + def close(self, all_fds=False): + self.closing = True + for fd in list(self.handlers): + fileobj, handler_func = self.handlers[fd] + self.remove_handler(fd) + if all_fds: + self.close_fd(fileobj) + if self.close_loop: + self.asyncio_loop.close() + + def add_handler(self, fd, handler, events): + fd, fileobj = self.split_fd(fd) + if fd in self.handlers: + raise ValueError("fd %s added twice" % fd) + self.handlers[fd] = (fileobj, stack_context.wrap(handler)) + if events & IOLoop.READ: + self.asyncio_loop.add_reader( + fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + if events & IOLoop.WRITE: + self.asyncio_loop.add_writer( + fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + + def update_handler(self, fd, events): + fd, fileobj = self.split_fd(fd) + if events & IOLoop.READ: + if fd not in self.readers: + self.asyncio_loop.add_reader( + fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + else: + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if events & IOLoop.WRITE: + if fd not in self.writers: + self.asyncio_loop.add_writer( + fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + else: + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + + def remove_handler(self, fd): + fd, fileobj = self.split_fd(fd) + if fd not in self.handlers: + return + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + del self.handlers[fd] + + def _handle_events(self, fd, events): + fileobj, handler_func = self.handlers[fd] + handler_func(fileobj, events) + + def start(self): + old_current = IOLoop.current(instance=False) + try: + self._setup_logging() + self.make_current() + self.asyncio_loop.run_forever() + finally: + if old_current is None: + IOLoop.clear_current() + else: + old_current.make_current() + + def stop(self): + self.asyncio_loop.stop() + + def call_at(self, when, callback, *args, **kwargs): + # asyncio.call_at supports *args but not **kwargs, so bind them here. + # We do not synchronize self.time and asyncio_loop.time, so + # convert from absolute to relative. + return self.asyncio_loop.call_later( + max(0, when - self.time()), self._run_callback, + functools.partial(stack_context.wrap(callback), *args, **kwargs)) + + def remove_timeout(self, timeout): + timeout.cancel() + + def add_callback(self, callback, *args, **kwargs): + if self.closing: + raise RuntimeError("IOLoop is closing") + self.asyncio_loop.call_soon_threadsafe( + self._run_callback, + functools.partial(stack_context.wrap(callback), *args, **kwargs)) + + add_callback_from_signal = add_callback + + +class AsyncIOMainLoop(BaseAsyncIOLoop): + """``AsyncIOMainLoop`` creates an `.IOLoop` that corresponds to the + current ``asyncio`` event loop (i.e. the one returned by + ``asyncio.get_event_loop()``). Recommended usage:: + + from tornado.platform.asyncio import AsyncIOMainLoop + import asyncio + AsyncIOMainLoop().install() + asyncio.get_event_loop().run_forever() + """ + def initialize(self, **kwargs): + super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), + close_loop=False, **kwargs) + + +class AsyncIOLoop(BaseAsyncIOLoop): + """``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop. + This class follows the usual Tornado semantics for creating new + ``IOLoops``; these loops are not necessarily related to the + ``asyncio`` default event loop. Recommended usage:: + + from tornado.ioloop import IOLoop + IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop') + IOLoop.current().start() + + Each ``AsyncIOLoop`` creates a new ``asyncio.EventLoop``; this object + can be accessed with the ``asyncio_loop`` attribute. + """ + def initialize(self, **kwargs): + loop = asyncio.new_event_loop() + try: + super(AsyncIOLoop, self).initialize(loop, close_loop=True, **kwargs) + except Exception: + # If initialize() does not succeed (taking ownership of the loop), + # we have to close it. + loop.close() + raise + + +def to_tornado_future(asyncio_future): + """Convert an `asyncio.Future` to a `tornado.concurrent.Future`. + + .. versionadded:: 4.1 + """ + tf = tornado.concurrent.Future() + tornado.concurrent.chain_future(asyncio_future, tf) + return tf + + +def to_asyncio_future(tornado_future): + """Convert a Tornado yieldable object to an `asyncio.Future`. + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Now accepts any yieldable object, not just + `tornado.concurrent.Future`. + """ + tornado_future = convert_yielded(tornado_future) + af = asyncio.Future() + tornado.concurrent.chain_future(tornado_future, af) + return af + +if hasattr(convert_yielded, 'register'): + convert_yielded.register(asyncio.Future, to_tornado_future) diff --git a/server/www/packages/packages-common/tornado/platform/auto.py b/server/www/packages/packages-common/tornado/platform/auto.py new file mode 100644 index 0000000..fc40c9d --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/auto.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Implementation of platform-specific functionality. + +For each function or class described in `tornado.platform.interface`, +the appropriate platform-specific implementation exists in this module. +Most code that needs access to this functionality should do e.g.:: + + from tornado.platform.auto import set_close_exec +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import os + +if 'APPENGINE_RUNTIME' in os.environ: + from tornado.platform.common import Waker + + def set_close_exec(fd): + pass +elif os.name == 'nt': + from tornado.platform.common import Waker + from tornado.platform.windows import set_close_exec +else: + from tornado.platform.posix import set_close_exec, Waker + +try: + # monotime monkey-patches the time module to have a monotonic function + # in versions of python before 3.3. + import monotime + # Silence pyflakes warning about this unused import + monotime +except ImportError: + pass +try: + from time import monotonic as monotonic_time +except ImportError: + monotonic_time = None + +__all__ = ['Waker', 'set_close_exec', 'monotonic_time'] diff --git a/server/www/packages/packages-common/tornado/platform/caresresolver.py b/server/www/packages/packages-common/tornado/platform/caresresolver.py new file mode 100644 index 0000000..5559614 --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/caresresolver.py @@ -0,0 +1,79 @@ +from __future__ import absolute_import, division, print_function, with_statement +import pycares +import socket + +from tornado import gen +from tornado.ioloop import IOLoop +from tornado.netutil import Resolver, is_valid_ip + + +class CaresResolver(Resolver): + """Name resolver based on the c-ares library. + + This is a non-blocking and non-threaded resolver. It may not produce + the same results as the system resolver, but can be used for non-blocking + resolution when threads cannot be used. + + c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``, + so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is + the default for ``tornado.simple_httpclient``, but other libraries + may default to ``AF_UNSPEC``. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def initialize(self, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb) + self.fds = {} + + def _sock_state_cb(self, fd, readable, writable): + state = ((IOLoop.READ if readable else 0) | + (IOLoop.WRITE if writable else 0)) + if not state: + self.io_loop.remove_handler(fd) + del self.fds[fd] + elif fd in self.fds: + self.io_loop.update_handler(fd, state) + self.fds[fd] = state + else: + self.io_loop.add_handler(fd, self._handle_events, state) + self.fds[fd] = state + + def _handle_events(self, fd, events): + read_fd = pycares.ARES_SOCKET_BAD + write_fd = pycares.ARES_SOCKET_BAD + if events & IOLoop.READ: + read_fd = fd + if events & IOLoop.WRITE: + write_fd = fd + self.channel.process_fd(read_fd, write_fd) + + @gen.coroutine + def resolve(self, host, port, family=0): + if is_valid_ip(host): + addresses = [host] + else: + # gethostbyname doesn't take callback as a kwarg + self.channel.gethostbyname(host, family, (yield gen.Callback(1))) + callback_args = yield gen.Wait(1) + assert isinstance(callback_args, gen.Arguments) + assert not callback_args.kwargs + result, error = callback_args.args + if error: + raise Exception('C-Ares returned error %s: %s while resolving %s' % + (error, pycares.errno.strerror(error), host)) + addresses = result.addresses + addrinfo = [] + for address in addresses: + if '.' in address: + address_family = socket.AF_INET + elif ':' in address: + address_family = socket.AF_INET6 + else: + address_family = socket.AF_UNSPEC + if family != socket.AF_UNSPEC and family != address_family: + raise Exception('Requested socket family %d but got %d' % + (family, address_family)) + addrinfo.append((address_family, (address, port))) + raise gen.Return(addrinfo) diff --git a/server/www/packages/packages-common/tornado/platform/common.py b/server/www/packages/packages-common/tornado/platform/common.py new file mode 100644 index 0000000..b409a90 --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/common.py @@ -0,0 +1,92 @@ +"""Lowest-common-denominator implementations of platform functionality.""" +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import socket + +from tornado.platform import interface + + +class Waker(interface.Waker): + """Create an OS independent asynchronous pipe. + + For use on platforms that don't have os.pipe() (or where pipes cannot + be passed to select()), but do have sockets. This includes Windows + and Jython. + """ + def __init__(self): + # Based on Zope select_trigger.py: + # https://github.com/zopefoundation/Zope/blob/master/src/ZServer/medusa/thread/select_trigger.py + + self.writer = socket.socket() + # Disable buffering -- pulling the trigger sends 1 byte, + # and we want that sent immediately, to wake up ASAP. + self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + count = 0 + while 1: + count += 1 + # Bind to a local port; for efficiency, let the OS pick + # a free port for us. + # Unfortunately, stress tests showed that we may not + # be able to connect to that port ("Address already in + # use") despite that the OS picked it. This appears + # to be a race bug in the Windows socket implementation. + # So we loop until a connect() succeeds (almost always + # on the first try). See the long thread at + # http://mail.zope.org/pipermail/zope/2005-July/160433.html + # for hideous details. + a = socket.socket() + a.bind(("127.0.0.1", 0)) + a.listen(1) + connect_address = a.getsockname() # assigned (host, port) pair + try: + self.writer.connect(connect_address) + break # success + except socket.error as detail: + if (not hasattr(errno, 'WSAEADDRINUSE') or + detail[0] != errno.WSAEADDRINUSE): + # "Address already in use" is the only error + # I've seen on two WinXP Pro SP2 boxes, under + # Pythons 2.3.5 and 2.4.1. + raise + # (10048, 'Address already in use') + # assert count <= 2 # never triggered in Tim's tests + if count >= 10: # I've never seen it go above 2 + a.close() + self.writer.close() + raise socket.error("Cannot bind trigger!") + # Close `a` and try again. Note: I originally put a short + # sleep() here, but it didn't appear to help or hurt. + a.close() + + self.reader, addr = a.accept() + self.reader.setblocking(0) + self.writer.setblocking(0) + a.close() + self.reader_fd = self.reader.fileno() + + def fileno(self): + return self.reader.fileno() + + def write_fileno(self): + return self.writer.fileno() + + def wake(self): + try: + self.writer.send(b"x") + except (IOError, socket.error): + pass + + def consume(self): + try: + while True: + result = self.reader.recv(1024) + if not result: + break + except (IOError, socket.error): + pass + + def close(self): + self.reader.close() + self.writer.close() diff --git a/server/www/packages/packages-common/tornado/platform/epoll.py b/server/www/packages/packages-common/tornado/platform/epoll.py new file mode 100644 index 0000000..b08cc62 --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/epoll.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""EPoll-based IOLoop implementation for Linux systems.""" +from __future__ import absolute_import, division, print_function, with_statement + +import select + +from tornado.ioloop import PollIOLoop + + +class EPollIOLoop(PollIOLoop): + def initialize(self, **kwargs): + super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs) diff --git a/server/www/packages/packages-common/tornado/platform/interface.py b/server/www/packages/packages-common/tornado/platform/interface.py new file mode 100644 index 0000000..07da6ba --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/interface.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Interfaces for platform-specific functionality. + +This module exists primarily for documentation purposes and as base classes +for other tornado.platform modules. Most code should import the appropriate +implementation from `tornado.platform.auto`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + + +def set_close_exec(fd): + """Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor.""" + raise NotImplementedError() + + +class Waker(object): + """A socket-like object that can wake another thread from ``select()``. + + The `~tornado.ioloop.IOLoop` will add the Waker's `fileno()` to + its ``select`` (or ``epoll`` or ``kqueue``) calls. When another + thread wants to wake up the loop, it calls `wake`. Once it has woken + up, it will call `consume` to do any necessary per-wake cleanup. When + the ``IOLoop`` is closed, it closes its waker too. + """ + def fileno(self): + """Returns the read file descriptor for this waker. + + Must be suitable for use with ``select()`` or equivalent on the + local platform. + """ + raise NotImplementedError() + + def write_fileno(self): + """Returns the write file descriptor for this waker.""" + raise NotImplementedError() + + def wake(self): + """Triggers activity on the waker's file descriptor.""" + raise NotImplementedError() + + def consume(self): + """Called after the listen has woken up to do any necessary cleanup.""" + raise NotImplementedError() + + def close(self): + """Closes the waker's file descriptor(s).""" + raise NotImplementedError() diff --git a/server/www/packages/packages-common/tornado/platform/kqueue.py b/server/www/packages/packages-common/tornado/platform/kqueue.py new file mode 100644 index 0000000..f8f3e4a --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/kqueue.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""KQueue-based IOLoop implementation for BSD/Mac systems.""" +from __future__ import absolute_import, division, print_function, with_statement + +import select + +from tornado.ioloop import IOLoop, PollIOLoop + +assert hasattr(select, 'kqueue'), 'kqueue not supported' + + +class _KQueue(object): + """A kqueue-based event loop for BSD/Mac systems.""" + def __init__(self): + self._kqueue = select.kqueue() + self._active = {} + + def fileno(self): + return self._kqueue.fileno() + + def close(self): + self._kqueue.close() + + def register(self, fd, events): + if fd in self._active: + raise IOError("fd %s already registered" % fd) + self._control(fd, events, select.KQ_EV_ADD) + self._active[fd] = events + + def modify(self, fd, events): + self.unregister(fd) + self.register(fd, events) + + def unregister(self, fd): + events = self._active.pop(fd) + self._control(fd, events, select.KQ_EV_DELETE) + + def _control(self, fd, events, flags): + kevents = [] + if events & IOLoop.WRITE: + kevents.append(select.kevent( + fd, filter=select.KQ_FILTER_WRITE, flags=flags)) + if events & IOLoop.READ: + kevents.append(select.kevent( + fd, filter=select.KQ_FILTER_READ, flags=flags)) + # Even though control() takes a list, it seems to return EINVAL + # on Mac OS X (10.6) when there is more than one event in the list. + for kevent in kevents: + self._kqueue.control([kevent], 0) + + def poll(self, timeout): + kevents = self._kqueue.control(None, 1000, timeout) + events = {} + for kevent in kevents: + fd = kevent.ident + if kevent.filter == select.KQ_FILTER_READ: + events[fd] = events.get(fd, 0) | IOLoop.READ + if kevent.filter == select.KQ_FILTER_WRITE: + if kevent.flags & select.KQ_EV_EOF: + # If an asynchronous connection is refused, kqueue + # returns a write event with the EOF flag set. + # Turn this into an error for consistency with the + # other IOLoop implementations. + # Note that for read events, EOF may be returned before + # all data has been consumed from the socket buffer, + # so we only check for EOF on write events. + events[fd] = IOLoop.ERROR + else: + events[fd] = events.get(fd, 0) | IOLoop.WRITE + if kevent.flags & select.KQ_EV_ERROR: + events[fd] = events.get(fd, 0) | IOLoop.ERROR + return events.items() + + +class KQueueIOLoop(PollIOLoop): + def initialize(self, **kwargs): + super(KQueueIOLoop, self).initialize(impl=_KQueue(), **kwargs) diff --git a/server/www/packages/packages-common/tornado/platform/posix.py b/server/www/packages/packages-common/tornado/platform/posix.py new file mode 100644 index 0000000..41a5794 --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/posix.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Posix implementations of platform-specific functionality.""" + +from __future__ import absolute_import, division, print_function, with_statement + +import fcntl +import os + +from tornado.platform import interface + + +def set_close_exec(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) + + +def _set_nonblocking(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) + + +class Waker(interface.Waker): + def __init__(self): + r, w = os.pipe() + _set_nonblocking(r) + _set_nonblocking(w) + set_close_exec(r) + set_close_exec(w) + self.reader = os.fdopen(r, "rb", 0) + self.writer = os.fdopen(w, "wb", 0) + + def fileno(self): + return self.reader.fileno() + + def write_fileno(self): + return self.writer.fileno() + + def wake(self): + try: + self.writer.write(b"x") + except IOError: + pass + + def consume(self): + try: + while True: + result = self.reader.read() + if not result: + break + except IOError: + pass + + def close(self): + self.reader.close() + self.writer.close() diff --git a/server/www/packages/packages-common/tornado/platform/select.py b/server/www/packages/packages-common/tornado/platform/select.py new file mode 100644 index 0000000..db52ef9 --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/select.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Select-based IOLoop implementation. + +Used as a fallback for systems that don't support epoll or kqueue. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import select + +from tornado.ioloop import IOLoop, PollIOLoop + + +class _Select(object): + """A simple, select()-based IOLoop implementation for non-Linux systems""" + def __init__(self): + self.read_fds = set() + self.write_fds = set() + self.error_fds = set() + self.fd_sets = (self.read_fds, self.write_fds, self.error_fds) + + def close(self): + pass + + def register(self, fd, events): + if fd in self.read_fds or fd in self.write_fds or fd in self.error_fds: + raise IOError("fd %s already registered" % fd) + if events & IOLoop.READ: + self.read_fds.add(fd) + if events & IOLoop.WRITE: + self.write_fds.add(fd) + if events & IOLoop.ERROR: + self.error_fds.add(fd) + # Closed connections are reported as errors by epoll and kqueue, + # but as zero-byte reads by select, so when errors are requested + # we need to listen for both read and error. + # self.read_fds.add(fd) + + def modify(self, fd, events): + self.unregister(fd) + self.register(fd, events) + + def unregister(self, fd): + self.read_fds.discard(fd) + self.write_fds.discard(fd) + self.error_fds.discard(fd) + + def poll(self, timeout): + readable, writeable, errors = select.select( + self.read_fds, self.write_fds, self.error_fds, timeout) + events = {} + for fd in readable: + events[fd] = events.get(fd, 0) | IOLoop.READ + for fd in writeable: + events[fd] = events.get(fd, 0) | IOLoop.WRITE + for fd in errors: + events[fd] = events.get(fd, 0) | IOLoop.ERROR + return events.items() + + +class SelectIOLoop(PollIOLoop): + def initialize(self, **kwargs): + super(SelectIOLoop, self).initialize(impl=_Select(), **kwargs) diff --git a/server/www/packages/packages-common/tornado/platform/twisted.py b/server/www/packages/packages-common/tornado/platform/twisted.py new file mode 100644 index 0000000..d3a4e75 --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/twisted.py @@ -0,0 +1,585 @@ +# Author: Ovidiu Predescu +# Date: July 2011 +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Bridges between the Twisted reactor and Tornado IOLoop. + +This module lets you run applications and libraries written for +Twisted in a Tornado application. It can be used in two modes, +depending on which library's underlying event loop you want to use. + +This module has been tested with Twisted versions 11.0.0 and newer. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import functools +import numbers +import socket +import sys + +import twisted.internet.abstract +from twisted.internet.defer import Deferred +from twisted.internet.posixbase import PosixReactorBase +from twisted.internet.interfaces import \ + IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor +from twisted.python import failure, log +from twisted.internet import error +import twisted.names.cache +import twisted.names.client +import twisted.names.hosts +import twisted.names.resolve + +from zope.interface import implementer + +from tornado.concurrent import Future +from tornado.escape import utf8 +from tornado import gen +import tornado.ioloop +from tornado.log import app_log +from tornado.netutil import Resolver +from tornado.stack_context import NullContext, wrap +from tornado.ioloop import IOLoop +from tornado.util import timedelta_to_seconds + + +@implementer(IDelayedCall) +class TornadoDelayedCall(object): + """DelayedCall object for Tornado.""" + def __init__(self, reactor, seconds, f, *args, **kw): + self._reactor = reactor + self._func = functools.partial(f, *args, **kw) + self._time = self._reactor.seconds() + seconds + self._timeout = self._reactor._io_loop.add_timeout(self._time, + self._called) + self._active = True + + def _called(self): + self._active = False + self._reactor._removeDelayedCall(self) + try: + self._func() + except: + app_log.error("_called caught exception", exc_info=True) + + def getTime(self): + return self._time + + def cancel(self): + self._active = False + self._reactor._io_loop.remove_timeout(self._timeout) + self._reactor._removeDelayedCall(self) + + def delay(self, seconds): + self._reactor._io_loop.remove_timeout(self._timeout) + self._time += seconds + self._timeout = self._reactor._io_loop.add_timeout(self._time, + self._called) + + def reset(self, seconds): + self._reactor._io_loop.remove_timeout(self._timeout) + self._time = self._reactor.seconds() + seconds + self._timeout = self._reactor._io_loop.add_timeout(self._time, + self._called) + + def active(self): + return self._active + + +@implementer(IReactorTime, IReactorFDSet) +class TornadoReactor(PosixReactorBase): + """Twisted reactor built on the Tornado IOLoop. + + `TornadoReactor` implements the Twisted reactor interface on top of + the Tornado IOLoop. To use it, simply call `install` at the beginning + of the application:: + + import tornado.platform.twisted + tornado.platform.twisted.install() + from twisted.internet import reactor + + When the app is ready to start, call ``IOLoop.current().start()`` + instead of ``reactor.run()``. + + It is also possible to create a non-global reactor by calling + ``tornado.platform.twisted.TornadoReactor(io_loop)``. However, if + the `.IOLoop` and reactor are to be short-lived (such as those used in + unit tests), additional cleanup may be required. Specifically, it is + recommended to call:: + + reactor.fireSystemEvent('shutdown') + reactor.disconnectAll() + + before closing the `.IOLoop`. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def __init__(self, io_loop=None): + if not io_loop: + io_loop = tornado.ioloop.IOLoop.current() + self._io_loop = io_loop + self._readers = {} # map of reader objects to fd + self._writers = {} # map of writer objects to fd + self._fds = {} # a map of fd to a (reader, writer) tuple + self._delayedCalls = {} + PosixReactorBase.__init__(self) + self.addSystemEventTrigger('during', 'shutdown', self.crash) + + # IOLoop.start() bypasses some of the reactor initialization. + # Fire off the necessary events if they weren't already triggered + # by reactor.run(). + def start_if_necessary(): + if not self._started: + self.fireSystemEvent('startup') + self._io_loop.add_callback(start_if_necessary) + + # IReactorTime + def seconds(self): + return self._io_loop.time() + + def callLater(self, seconds, f, *args, **kw): + dc = TornadoDelayedCall(self, seconds, f, *args, **kw) + self._delayedCalls[dc] = True + return dc + + def getDelayedCalls(self): + return [x for x in self._delayedCalls if x._active] + + def _removeDelayedCall(self, dc): + if dc in self._delayedCalls: + del self._delayedCalls[dc] + + # IReactorThreads + def callFromThread(self, f, *args, **kw): + assert callable(f), "%s is not callable" % f + with NullContext(): + # This NullContext is mainly for an edge case when running + # TwistedIOLoop on top of a TornadoReactor. + # TwistedIOLoop.add_callback uses reactor.callFromThread and + # should not pick up additional StackContexts along the way. + self._io_loop.add_callback(f, *args, **kw) + + # We don't need the waker code from the super class, Tornado uses + # its own waker. + def installWaker(self): + pass + + def wakeUp(self): + pass + + # IReactorFDSet + def _invoke_callback(self, fd, events): + if fd not in self._fds: + return + (reader, writer) = self._fds[fd] + if reader: + err = None + if reader.fileno() == -1: + err = error.ConnectionLost() + elif events & IOLoop.READ: + err = log.callWithLogger(reader, reader.doRead) + if err is None and events & IOLoop.ERROR: + err = error.ConnectionLost() + if err is not None: + self.removeReader(reader) + reader.readConnectionLost(failure.Failure(err)) + if writer: + err = None + if writer.fileno() == -1: + err = error.ConnectionLost() + elif events & IOLoop.WRITE: + err = log.callWithLogger(writer, writer.doWrite) + if err is None and events & IOLoop.ERROR: + err = error.ConnectionLost() + if err is not None: + self.removeWriter(writer) + writer.writeConnectionLost(failure.Failure(err)) + + def addReader(self, reader): + if reader in self._readers: + # Don't add the reader if it's already there + return + fd = reader.fileno() + self._readers[reader] = fd + if fd in self._fds: + (_, writer) = self._fds[fd] + self._fds[fd] = (reader, writer) + if writer: + # We already registered this fd for write events, + # update it for read events as well. + self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE) + else: + with NullContext(): + self._fds[fd] = (reader, None) + self._io_loop.add_handler(fd, self._invoke_callback, + IOLoop.READ) + + def addWriter(self, writer): + if writer in self._writers: + return + fd = writer.fileno() + self._writers[writer] = fd + if fd in self._fds: + (reader, _) = self._fds[fd] + self._fds[fd] = (reader, writer) + if reader: + # We already registered this fd for read events, + # update it for write events as well. + self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE) + else: + with NullContext(): + self._fds[fd] = (None, writer) + self._io_loop.add_handler(fd, self._invoke_callback, + IOLoop.WRITE) + + def removeReader(self, reader): + if reader in self._readers: + fd = self._readers.pop(reader) + (_, writer) = self._fds[fd] + if writer: + # We have a writer so we need to update the IOLoop for + # write events only. + self._fds[fd] = (None, writer) + self._io_loop.update_handler(fd, IOLoop.WRITE) + else: + # Since we have no writer registered, we remove the + # entry from _fds and unregister the handler from the + # IOLoop + del self._fds[fd] + self._io_loop.remove_handler(fd) + + def removeWriter(self, writer): + if writer in self._writers: + fd = self._writers.pop(writer) + (reader, _) = self._fds[fd] + if reader: + # We have a reader so we need to update the IOLoop for + # read events only. + self._fds[fd] = (reader, None) + self._io_loop.update_handler(fd, IOLoop.READ) + else: + # Since we have no reader registered, we remove the + # entry from the _fds and unregister the handler from + # the IOLoop. + del self._fds[fd] + self._io_loop.remove_handler(fd) + + def removeAll(self): + return self._removeAll(self._readers, self._writers) + + def getReaders(self): + return self._readers.keys() + + def getWriters(self): + return self._writers.keys() + + # The following functions are mainly used in twisted-style test cases; + # it is expected that most users of the TornadoReactor will call + # IOLoop.start() instead of Reactor.run(). + def stop(self): + PosixReactorBase.stop(self) + fire_shutdown = functools.partial(self.fireSystemEvent, "shutdown") + self._io_loop.add_callback(fire_shutdown) + + def crash(self): + PosixReactorBase.crash(self) + self._io_loop.stop() + + def doIteration(self, delay): + raise NotImplementedError("doIteration") + + def mainLoop(self): + # Since this class is intended to be used in applications + # where the top-level event loop is ``io_loop.start()`` rather + # than ``reactor.run()``, it is implemented a little + # differently than other Twisted reactors. We override + # ``mainLoop`` instead of ``doIteration`` and must implement + # timed call functionality on top of `.IOLoop.add_timeout` + # rather than using the implementation in + # ``PosixReactorBase``. + self._io_loop.start() + + +class _TestReactor(TornadoReactor): + """Subclass of TornadoReactor for use in unittests. + + This can't go in the test.py file because of import-order dependencies + with the Twisted reactor test builder. + """ + def __init__(self): + # always use a new ioloop + super(_TestReactor, self).__init__(IOLoop()) + + def listenTCP(self, port, factory, backlog=50, interface=''): + # default to localhost to avoid firewall prompts on the mac + if not interface: + interface = '127.0.0.1' + return super(_TestReactor, self).listenTCP( + port, factory, backlog=backlog, interface=interface) + + def listenUDP(self, port, protocol, interface='', maxPacketSize=8192): + if not interface: + interface = '127.0.0.1' + return super(_TestReactor, self).listenUDP( + port, protocol, interface=interface, maxPacketSize=maxPacketSize) + + +def install(io_loop=None): + """Install this package as the default Twisted reactor. + + ``install()`` must be called very early in the startup process, + before most other twisted-related imports. Conversely, because it + initializes the `.IOLoop`, it cannot be called before + `.fork_processes` or multi-process `~.TCPServer.start`. These + conflicting requirements make it difficult to use `.TornadoReactor` + in multi-process mode, and an external process manager such as + ``supervisord`` is recommended instead. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + + """ + if not io_loop: + io_loop = tornado.ioloop.IOLoop.current() + reactor = TornadoReactor(io_loop) + from twisted.internet.main import installReactor + installReactor(reactor) + return reactor + + +@implementer(IReadDescriptor, IWriteDescriptor) +class _FD(object): + def __init__(self, fd, fileobj, handler): + self.fd = fd + self.fileobj = fileobj + self.handler = handler + self.reading = False + self.writing = False + self.lost = False + + def fileno(self): + return self.fd + + def doRead(self): + if not self.lost: + self.handler(self.fileobj, tornado.ioloop.IOLoop.READ) + + def doWrite(self): + if not self.lost: + self.handler(self.fileobj, tornado.ioloop.IOLoop.WRITE) + + def connectionLost(self, reason): + if not self.lost: + self.handler(self.fileobj, tornado.ioloop.IOLoop.ERROR) + self.lost = True + + def logPrefix(self): + return '' + + +class TwistedIOLoop(tornado.ioloop.IOLoop): + """IOLoop implementation that runs on Twisted. + + `TwistedIOLoop` implements the Tornado IOLoop interface on top of + the Twisted reactor. Recommended usage:: + + from tornado.platform.twisted import TwistedIOLoop + from twisted.internet import reactor + TwistedIOLoop().install() + # Set up your tornado application as usual using `IOLoop.instance` + reactor.run() + + Uses the global Twisted reactor by default. To create multiple + ``TwistedIOLoops`` in the same process, you must pass a unique reactor + when constructing each one. + + Not compatible with `tornado.process.Subprocess.set_exit_callback` + because the ``SIGCHLD`` handlers used by Tornado and Twisted conflict + with each other. + """ + def initialize(self, reactor=None, **kwargs): + super(TwistedIOLoop, self).initialize(**kwargs) + if reactor is None: + import twisted.internet.reactor + reactor = twisted.internet.reactor + self.reactor = reactor + self.fds = {} + + def close(self, all_fds=False): + fds = self.fds + self.reactor.removeAll() + for c in self.reactor.getDelayedCalls(): + c.cancel() + if all_fds: + for fd in fds.values(): + self.close_fd(fd.fileobj) + + def add_handler(self, fd, handler, events): + if fd in self.fds: + raise ValueError('fd %s added twice' % fd) + fd, fileobj = self.split_fd(fd) + self.fds[fd] = _FD(fd, fileobj, wrap(handler)) + if events & tornado.ioloop.IOLoop.READ: + self.fds[fd].reading = True + self.reactor.addReader(self.fds[fd]) + if events & tornado.ioloop.IOLoop.WRITE: + self.fds[fd].writing = True + self.reactor.addWriter(self.fds[fd]) + + def update_handler(self, fd, events): + fd, fileobj = self.split_fd(fd) + if events & tornado.ioloop.IOLoop.READ: + if not self.fds[fd].reading: + self.fds[fd].reading = True + self.reactor.addReader(self.fds[fd]) + else: + if self.fds[fd].reading: + self.fds[fd].reading = False + self.reactor.removeReader(self.fds[fd]) + if events & tornado.ioloop.IOLoop.WRITE: + if not self.fds[fd].writing: + self.fds[fd].writing = True + self.reactor.addWriter(self.fds[fd]) + else: + if self.fds[fd].writing: + self.fds[fd].writing = False + self.reactor.removeWriter(self.fds[fd]) + + def remove_handler(self, fd): + fd, fileobj = self.split_fd(fd) + if fd not in self.fds: + return + self.fds[fd].lost = True + if self.fds[fd].reading: + self.reactor.removeReader(self.fds[fd]) + if self.fds[fd].writing: + self.reactor.removeWriter(self.fds[fd]) + del self.fds[fd] + + def start(self): + old_current = IOLoop.current(instance=False) + try: + self._setup_logging() + self.make_current() + self.reactor.run() + finally: + if old_current is None: + IOLoop.clear_current() + else: + old_current.make_current() + + def stop(self): + self.reactor.crash() + + def add_timeout(self, deadline, callback, *args, **kwargs): + # This method could be simplified (since tornado 4.0) by + # overriding call_at instead of add_timeout, but we leave it + # for now as a test of backwards-compatibility. + if isinstance(deadline, numbers.Real): + delay = max(deadline - self.time(), 0) + elif isinstance(deadline, datetime.timedelta): + delay = timedelta_to_seconds(deadline) + else: + raise TypeError("Unsupported deadline %r") + return self.reactor.callLater( + delay, self._run_callback, + functools.partial(wrap(callback), *args, **kwargs)) + + def remove_timeout(self, timeout): + if timeout.active(): + timeout.cancel() + + def add_callback(self, callback, *args, **kwargs): + self.reactor.callFromThread( + self._run_callback, + functools.partial(wrap(callback), *args, **kwargs)) + + def add_callback_from_signal(self, callback, *args, **kwargs): + self.add_callback(callback, *args, **kwargs) + + +class TwistedResolver(Resolver): + """Twisted-based asynchronous resolver. + + This is a non-blocking and non-threaded resolver. It is + recommended only when threads cannot be used, since it has + limitations compared to the standard ``getaddrinfo``-based + `~tornado.netutil.Resolver` and + `~tornado.netutil.ThreadedResolver`. Specifically, it returns at + most one result, and arguments other than ``host`` and ``family`` + are ignored. It may fail to resolve when ``family`` is not + ``socket.AF_UNSPEC``. + + Requires Twisted 12.1 or newer. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def initialize(self, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + # partial copy of twisted.names.client.createResolver, which doesn't + # allow for a reactor to be passed in. + self.reactor = tornado.platform.twisted.TornadoReactor(io_loop) + + host_resolver = twisted.names.hosts.Resolver('/etc/hosts') + cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor) + real_resolver = twisted.names.client.Resolver('/etc/resolv.conf', + reactor=self.reactor) + self.resolver = twisted.names.resolve.ResolverChain( + [host_resolver, cache_resolver, real_resolver]) + + @gen.coroutine + def resolve(self, host, port, family=0): + # getHostByName doesn't accept IP addresses, so if the input + # looks like an IP address just return it immediately. + if twisted.internet.abstract.isIPAddress(host): + resolved = host + resolved_family = socket.AF_INET + elif twisted.internet.abstract.isIPv6Address(host): + resolved = host + resolved_family = socket.AF_INET6 + else: + deferred = self.resolver.getHostByName(utf8(host)) + resolved = yield gen.Task(deferred.addBoth) + if isinstance(resolved, failure.Failure): + resolved.raiseException() + elif twisted.internet.abstract.isIPAddress(resolved): + resolved_family = socket.AF_INET + elif twisted.internet.abstract.isIPv6Address(resolved): + resolved_family = socket.AF_INET6 + else: + resolved_family = socket.AF_UNSPEC + if family != socket.AF_UNSPEC and family != resolved_family: + raise Exception('Requested socket family %d but got %d' % + (family, resolved_family)) + result = [ + (resolved_family, (resolved, port)), + ] + raise gen.Return(result) + +if hasattr(gen.convert_yielded, 'register'): + @gen.convert_yielded.register(Deferred) + def _(d): + f = Future() + + def errback(failure): + try: + failure.raiseException() + # Should never happen, but just in case + raise Exception("errback called without error") + except: + f.set_exc_info(sys.exc_info()) + d.addCallbacks(f.set_result, errback) + return f diff --git a/server/www/packages/packages-common/tornado/platform/windows.py b/server/www/packages/packages-common/tornado/platform/windows.py new file mode 100644 index 0000000..817bdca --- /dev/null +++ b/server/www/packages/packages-common/tornado/platform/windows.py @@ -0,0 +1,20 @@ +# NOTE: win32 support is currently experimental, and not recommended +# for production use. + + +from __future__ import absolute_import, division, print_function, with_statement +import ctypes +import ctypes.wintypes + +# See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx +SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation +SetHandleInformation.argtypes = (ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD) +SetHandleInformation.restype = ctypes.wintypes.BOOL + +HANDLE_FLAG_INHERIT = 0x00000001 + + +def set_close_exec(fd): + success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0) + if not success: + raise ctypes.GetLastError() diff --git a/server/www/packages/packages-common/tornado/process.py b/server/www/packages/packages-common/tornado/process.py new file mode 100644 index 0000000..daa9677 --- /dev/null +++ b/server/www/packages/packages-common/tornado/process.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utilities for working with multiple processes, including both forking +the server into multiple processes and managing subprocesses. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import os +import signal +import subprocess +import sys +import time + +from binascii import hexlify + +from tornado.concurrent import Future +from tornado import ioloop +from tornado.iostream import PipeIOStream +from tornado.log import gen_log +from tornado.platform.auto import set_close_exec +from tornado import stack_context +from tornado.util import errno_from_exception + +try: + import multiprocessing +except ImportError: + # Multiprocessing is not available on Google App Engine. + multiprocessing = None + +try: + long # py2 +except NameError: + long = int # py3 + + +# Re-export this exception for convenience. +try: + CalledProcessError = subprocess.CalledProcessError +except AttributeError: + # The subprocess module exists in Google App Engine, but is empty. + # This module isn't very useful in that case, but it should + # at least be importable. + if 'APPENGINE_RUNTIME' not in os.environ: + raise + + +def cpu_count(): + """Returns the number of processors on this machine.""" + if multiprocessing is None: + return 1 + try: + return multiprocessing.cpu_count() + except NotImplementedError: + pass + try: + return os.sysconf("SC_NPROCESSORS_CONF") + except ValueError: + pass + gen_log.error("Could not detect number of processors; assuming 1") + return 1 + + +def _reseed_random(): + if 'random' not in sys.modules: + return + import random + # If os.urandom is available, this method does the same thing as + # random.seed (at least as of python 2.6). If os.urandom is not + # available, we mix in the pid in addition to a timestamp. + try: + seed = long(hexlify(os.urandom(16)), 16) + except NotImplementedError: + seed = int(time.time() * 1000) ^ os.getpid() + random.seed(seed) + + +def _pipe_cloexec(): + r, w = os.pipe() + set_close_exec(r) + set_close_exec(w) + return r, w + + +_task_id = None + + +def fork_processes(num_processes, max_restarts=100): + """Starts multiple worker processes. + + If ``num_processes`` is None or <= 0, we detect the number of cores + available on this machine and fork that number of child + processes. If ``num_processes`` is given and > 0, we fork that + specific number of sub-processes. + + Since we use processes and not threads, there is no shared memory + between any server code. + + Note that multiple processes are not compatible with the autoreload + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). + When using multiple processes, no IOLoops can be created or + referenced until after the call to ``fork_processes``. + + In each child process, ``fork_processes`` returns its *task id*, a + number between 0 and ``num_processes``. Processes that exit + abnormally (due to a signal or non-zero exit status) are restarted + with the same id (up to ``max_restarts`` times). In the parent + process, ``fork_processes`` returns None if all child processes + have exited normally, but will otherwise only exit by throwing an + exception. + """ + global _task_id + assert _task_id is None + if num_processes is None or num_processes <= 0: + num_processes = cpu_count() + if ioloop.IOLoop.initialized(): + raise RuntimeError("Cannot run in multiple processes: IOLoop instance " + "has already been initialized. You cannot call " + "IOLoop.instance() before calling start_processes()") + gen_log.info("Starting %d processes", num_processes) + children = {} + + def start_child(i): + pid = os.fork() + if pid == 0: + # child process + _reseed_random() + global _task_id + _task_id = i + return i + else: + children[pid] = i + return None + for i in range(num_processes): + id = start_child(i) + if id is not None: + return id + num_restarts = 0 + while children: + try: + pid, status = os.wait() + except OSError as e: + if errno_from_exception(e) == errno.EINTR: + continue + raise + if pid not in children: + continue + id = children.pop(pid) + if os.WIFSIGNALED(status): + gen_log.warning("child %d (pid %d) killed by signal %d, restarting", + id, pid, os.WTERMSIG(status)) + elif os.WEXITSTATUS(status) != 0: + gen_log.warning("child %d (pid %d) exited with status %d, restarting", + id, pid, os.WEXITSTATUS(status)) + else: + gen_log.info("child %d (pid %d) exited normally", id, pid) + continue + num_restarts += 1 + if num_restarts > max_restarts: + raise RuntimeError("Too many child restarts, giving up") + new_id = start_child(id) + if new_id is not None: + return new_id + # All child processes exited cleanly, so exit the master process + # instead of just returning to right after the call to + # fork_processes (which will probably just start up another IOLoop + # unless the caller checks the return value). + sys.exit(0) + + +def task_id(): + """Returns the current task id, if any. + + Returns None if this process was not created by `fork_processes`. + """ + global _task_id + return _task_id + + +class Subprocess(object): + """Wraps ``subprocess.Popen`` with IOStream support. + + The constructor is the same as ``subprocess.Popen`` with the following + additions: + + * ``stdin``, ``stdout``, and ``stderr`` may have the value + ``tornado.process.Subprocess.STREAM``, which will make the corresponding + attribute of the resulting Subprocess a `.PipeIOStream`. + * A new keyword argument ``io_loop`` may be used to pass in an IOLoop. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + STREAM = object() + + _initialized = False + _waiting = {} + + def __init__(self, *args, **kwargs): + self.io_loop = kwargs.pop('io_loop', None) or ioloop.IOLoop.current() + # All FDs we create should be closed on error; those in to_close + # should be closed in the parent process on success. + pipe_fds = [] + to_close = [] + if kwargs.get('stdin') is Subprocess.STREAM: + in_r, in_w = _pipe_cloexec() + kwargs['stdin'] = in_r + pipe_fds.extend((in_r, in_w)) + to_close.append(in_r) + self.stdin = PipeIOStream(in_w, io_loop=self.io_loop) + if kwargs.get('stdout') is Subprocess.STREAM: + out_r, out_w = _pipe_cloexec() + kwargs['stdout'] = out_w + pipe_fds.extend((out_r, out_w)) + to_close.append(out_w) + self.stdout = PipeIOStream(out_r, io_loop=self.io_loop) + if kwargs.get('stderr') is Subprocess.STREAM: + err_r, err_w = _pipe_cloexec() + kwargs['stderr'] = err_w + pipe_fds.extend((err_r, err_w)) + to_close.append(err_w) + self.stderr = PipeIOStream(err_r, io_loop=self.io_loop) + try: + self.proc = subprocess.Popen(*args, **kwargs) + except: + for fd in pipe_fds: + os.close(fd) + raise + for fd in to_close: + os.close(fd) + for attr in ['stdin', 'stdout', 'stderr', 'pid']: + if not hasattr(self, attr): # don't clobber streams set above + setattr(self, attr, getattr(self.proc, attr)) + self._exit_callback = None + self.returncode = None + + def set_exit_callback(self, callback): + """Runs ``callback`` when this process exits. + + The callback takes one argument, the return code of the process. + + This method uses a ``SIGCHLD`` handler, which is a global setting + and may conflict if you have other libraries trying to handle the + same signal. If you are using more than one ``IOLoop`` it may + be necessary to call `Subprocess.initialize` first to designate + one ``IOLoop`` to run the signal handlers. + + In many cases a close callback on the stdout or stderr streams + can be used as an alternative to an exit callback if the + signal handler is causing a problem. + """ + self._exit_callback = stack_context.wrap(callback) + Subprocess.initialize(self.io_loop) + Subprocess._waiting[self.pid] = self + Subprocess._try_cleanup_process(self.pid) + + def wait_for_exit(self, raise_error=True): + """Returns a `.Future` which resolves when the process exits. + + Usage:: + + ret = yield proc.wait_for_exit() + + This is a coroutine-friendly alternative to `set_exit_callback` + (and a replacement for the blocking `subprocess.Popen.wait`). + + By default, raises `subprocess.CalledProcessError` if the process + has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` + to suppress this behavior and return the exit status without raising. + + .. versionadded:: 4.2 + """ + future = Future() + + def callback(ret): + if ret != 0 and raise_error: + # Unfortunately we don't have the original args any more. + future.set_exception(CalledProcessError(ret, None)) + else: + future.set_result(ret) + self.set_exit_callback(callback) + return future + + @classmethod + def initialize(cls, io_loop=None): + """Initializes the ``SIGCHLD`` handler. + + The signal handler is run on an `.IOLoop` to avoid locking issues. + Note that the `.IOLoop` used for signal handling need not be the + same one used by individual Subprocess objects (as long as the + ``IOLoops`` are each running in separate threads). + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + if cls._initialized: + return + if io_loop is None: + io_loop = ioloop.IOLoop.current() + cls._old_sigchld = signal.signal( + signal.SIGCHLD, + lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup)) + cls._initialized = True + + @classmethod + def uninitialize(cls): + """Removes the ``SIGCHLD`` handler.""" + if not cls._initialized: + return + signal.signal(signal.SIGCHLD, cls._old_sigchld) + cls._initialized = False + + @classmethod + def _cleanup(cls): + for pid in list(cls._waiting.keys()): # make a copy + cls._try_cleanup_process(pid) + + @classmethod + def _try_cleanup_process(cls, pid): + try: + ret_pid, status = os.waitpid(pid, os.WNOHANG) + except OSError as e: + if errno_from_exception(e) == errno.ECHILD: + return + if ret_pid == 0: + return + assert ret_pid == pid + subproc = cls._waiting.pop(pid) + subproc.io_loop.add_callback_from_signal( + subproc._set_returncode, status) + + def _set_returncode(self, status): + if os.WIFSIGNALED(status): + self.returncode = -os.WTERMSIG(status) + else: + assert os.WIFEXITED(status) + self.returncode = os.WEXITSTATUS(status) + if self._exit_callback: + callback = self._exit_callback + self._exit_callback = None + callback(self.returncode) diff --git a/server/www/packages/packages-common/tornado/queues.py b/server/www/packages/packages-common/tornado/queues.py new file mode 100644 index 0000000..129b204 --- /dev/null +++ b/server/www/packages/packages-common/tornado/queues.py @@ -0,0 +1,357 @@ +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, with_statement + +__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] + +import collections +import heapq + +from tornado import gen, ioloop +from tornado.concurrent import Future +from tornado.locks import Event + + +class QueueEmpty(Exception): + """Raised by `.Queue.get_nowait` when the queue has no items.""" + pass + + +class QueueFull(Exception): + """Raised by `.Queue.put_nowait` when a queue is at its maximum size.""" + pass + + +def _set_timeout(future, timeout): + if timeout: + def on_timeout(): + future.set_exception(gen.TimeoutError()) + io_loop = ioloop.IOLoop.current() + timeout_handle = io_loop.add_timeout(timeout, on_timeout) + future.add_done_callback( + lambda _: io_loop.remove_timeout(timeout_handle)) + + +class _QueueIterator(object): + def __init__(self, q): + self.q = q + + def __anext__(self): + return self.q.get() + + +class Queue(object): + """Coordinate producer and consumer coroutines. + + If maxsize is 0 (the default) the queue size is unbounded. + + .. testcode:: + + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.queues import Queue + + q = Queue(maxsize=2) + + @gen.coroutine + def consumer(): + while True: + item = yield q.get() + try: + print('Doing work on %s' % item) + yield gen.sleep(0.01) + finally: + q.task_done() + + @gen.coroutine + def producer(): + for item in range(5): + yield q.put(item) + print('Put %s' % item) + + @gen.coroutine + def main(): + # Start consumer without waiting (since it never finishes). + IOLoop.current().spawn_callback(consumer) + yield producer() # Wait for producer to put all tasks. + yield q.join() # Wait for consumer to finish all tasks. + print('Done') + + IOLoop.current().run_sync(main) + + .. testoutput:: + + Put 0 + Put 1 + Doing work on 0 + Put 2 + Doing work on 1 + Put 3 + Doing work on 2 + Put 4 + Doing work on 3 + Doing work on 4 + Done + + In Python 3.5, `Queue` implements the async iterator protocol, so + ``consumer()`` could be rewritten as:: + + async def consumer(): + async for item in q: + try: + print('Doing work on %s' % item) + yield gen.sleep(0.01) + finally: + q.task_done() + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + + """ + def __init__(self, maxsize=0): + if maxsize is None: + raise TypeError("maxsize can't be None") + + if maxsize < 0: + raise ValueError("maxsize can't be negative") + + self._maxsize = maxsize + self._init() + self._getters = collections.deque([]) # Futures. + self._putters = collections.deque([]) # Pairs of (item, Future). + self._unfinished_tasks = 0 + self._finished = Event() + self._finished.set() + + @property + def maxsize(self): + """Number of items allowed in the queue.""" + return self._maxsize + + def qsize(self): + """Number of items in the queue.""" + return len(self._queue) + + def empty(self): + return not self._queue + + def full(self): + if self.maxsize == 0: + return False + else: + return self.qsize() >= self.maxsize + + def put(self, item, timeout=None): + """Put an item into the queue, perhaps waiting until there is room. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + try: + self.put_nowait(item) + except QueueFull: + future = Future() + self._putters.append((item, future)) + _set_timeout(future, timeout) + return future + else: + return gen._null_future + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + If no free slot is immediately available, raise `QueueFull`. + """ + self._consume_expired() + if self._getters: + assert self.empty(), "queue non-empty, why are getters waiting?" + getter = self._getters.popleft() + self.__put_internal(item) + getter.set_result(self._get()) + elif self.full(): + raise QueueFull + else: + self.__put_internal(item) + + def get(self, timeout=None): + """Remove and return an item from the queue. + + Returns a Future which resolves once an item is available, or raises + `tornado.gen.TimeoutError` after a timeout. + """ + future = Future() + try: + future.set_result(self.get_nowait()) + except QueueEmpty: + self._getters.append(future) + _set_timeout(future, timeout) + return future + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + Return an item if one is immediately available, else raise + `QueueEmpty`. + """ + self._consume_expired() + if self._putters: + assert self.full(), "queue not full, why are putters waiting?" + item, putter = self._putters.popleft() + self.__put_internal(item) + putter.set_result(None) + return self._get() + elif self.qsize(): + return self._get() + else: + raise QueueEmpty + + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each `.get` used to fetch a task, a + subsequent call to `.task_done` tells the queue that the processing + on the task is complete. + + If a `.join` is blocking, it resumes when all items have been + processed; that is, when every `.put` is matched by a `.task_done`. + + Raises `ValueError` if called more times than `.put`. + """ + if self._unfinished_tasks <= 0: + raise ValueError('task_done() called too many times') + self._unfinished_tasks -= 1 + if self._unfinished_tasks == 0: + self._finished.set() + + def join(self, timeout=None): + """Block until all items in the queue are processed. + + Returns a Future, which raises `tornado.gen.TimeoutError` after a + timeout. + """ + return self._finished.wait(timeout) + + @gen.coroutine + def __aiter__(self): + return _QueueIterator(self) + + # These three are overridable in subclasses. + def _init(self): + self._queue = collections.deque() + + def _get(self): + return self._queue.popleft() + + def _put(self, item): + self._queue.append(item) + # End of the overridable methods. + + def __put_internal(self, item): + self._unfinished_tasks += 1 + self._finished.clear() + self._put(item) + + def _consume_expired(self): + # Remove timed-out waiters. + while self._putters and self._putters[0][1].done(): + self._putters.popleft() + + while self._getters and self._getters[0].done(): + self._getters.popleft() + + def __repr__(self): + return '<%s at %s %s>' % ( + type(self).__name__, hex(id(self)), self._format()) + + def __str__(self): + return '<%s %s>' % (type(self).__name__, self._format()) + + def _format(self): + result = 'maxsize=%r' % (self.maxsize, ) + if getattr(self, '_queue', None): + result += ' queue=%r' % self._queue + if self._getters: + result += ' getters[%s]' % len(self._getters) + if self._putters: + result += ' putters[%s]' % len(self._putters) + if self._unfinished_tasks: + result += ' tasks=%s' % self._unfinished_tasks + return result + + +class PriorityQueue(Queue): + """A `.Queue` that retrieves entries in priority order, lowest first. + + Entries are typically tuples like ``(priority number, data)``. + + .. testcode:: + + from tornado.queues import PriorityQueue + + q = PriorityQueue() + q.put((1, 'medium-priority item')) + q.put((0, 'high-priority item')) + q.put((10, 'low-priority item')) + + print(q.get_nowait()) + print(q.get_nowait()) + print(q.get_nowait()) + + .. testoutput:: + + (0, 'high-priority item') + (1, 'medium-priority item') + (10, 'low-priority item') + """ + def _init(self): + self._queue = [] + + def _put(self, item): + heapq.heappush(self._queue, item) + + def _get(self): + return heapq.heappop(self._queue) + + +class LifoQueue(Queue): + """A `.Queue` that retrieves the most recently put items first. + + .. testcode:: + + from tornado.queues import LifoQueue + + q = LifoQueue() + q.put(3) + q.put(2) + q.put(1) + + print(q.get_nowait()) + print(q.get_nowait()) + print(q.get_nowait()) + + .. testoutput:: + + 1 + 2 + 3 + """ + def _init(self): + self._queue = [] + + def _put(self, item): + self._queue.append(item) + + def _get(self): + return self._queue.pop() diff --git a/server/www/packages/packages-common/tornado/simple_httpclient.py b/server/www/packages/packages-common/tornado/simple_httpclient.py new file mode 100644 index 0000000..37b0bc2 --- /dev/null +++ b/server/www/packages/packages-common/tornado/simple_httpclient.py @@ -0,0 +1,549 @@ +#!/usr/bin/env python +from __future__ import absolute_import, division, print_function, with_statement + +from tornado.escape import utf8, _unicode +from tornado import gen +from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy +from tornado import httputil +from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters +from tornado.iostream import StreamClosedError +from tornado.netutil import Resolver, OverrideResolver, _client_ssl_defaults +from tornado.log import gen_log +from tornado import stack_context +from tornado.tcpclient import TCPClient + +import base64 +import collections +import copy +import functools +import re +import socket +import sys +from io import BytesIO + + +try: + import urlparse # py2 +except ImportError: + import urllib.parse as urlparse # py3 + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine. + ssl = None + +try: + import certifi +except ImportError: + certifi = None + + +def _default_ca_certs(): + if certifi is None: + raise Exception("The 'certifi' package is required to use https " + "in simple_httpclient") + return certifi.where() + + +class SimpleAsyncHTTPClient(AsyncHTTPClient): + """Non-blocking HTTP client with no external dependencies. + + This class implements an HTTP 1.1 client on top of Tornado's IOStreams. + Some features found in the curl-based AsyncHTTPClient are not yet + supported. In particular, proxies are not supported, connections + are not reused, and callers cannot select the network interface to be + used. + """ + def initialize(self, io_loop, max_clients=10, + hostname_mapping=None, max_buffer_size=104857600, + resolver=None, defaults=None, max_header_size=None, + max_body_size=None): + """Creates a AsyncHTTPClient. + + Only a single AsyncHTTPClient instance exists per IOLoop + in order to provide limitations on the number of pending connections. + ``force_instance=True`` may be used to suppress this behavior. + + Note that because of this implicit reuse, unless ``force_instance`` + is used, only the first call to the constructor actually uses + its arguments. It is recommended to use the ``configure`` method + instead of the constructor to ensure that arguments take effect. + + ``max_clients`` is the number of concurrent requests that can be + in progress; when this limit is reached additional requests will be + queued. Note that time spent waiting in this queue still counts + against the ``request_timeout``. + + ``hostname_mapping`` is a dictionary mapping hostnames to IP addresses. + It can be used to make local DNS changes when modifying system-wide + settings like ``/etc/hosts`` is not possible or desirable (e.g. in + unittests). + + ``max_buffer_size`` (default 100MB) is the number of bytes + that can be read into memory at once. ``max_body_size`` + (defaults to ``max_buffer_size``) is the largest response body + that the client will accept. Without a + ``streaming_callback``, the smaller of these two limits + applies; with a ``streaming_callback`` only ``max_body_size`` + does. + + .. versionchanged:: 4.2 + Added the ``max_body_size`` argument. + """ + super(SimpleAsyncHTTPClient, self).initialize(io_loop, + defaults=defaults) + self.max_clients = max_clients + self.queue = collections.deque() + self.active = {} + self.waiting = {} + self.max_buffer_size = max_buffer_size + self.max_header_size = max_header_size + self.max_body_size = max_body_size + # TCPClient could create a Resolver for us, but we have to do it + # ourselves to support hostname_mapping. + if resolver: + self.resolver = resolver + self.own_resolver = False + else: + self.resolver = Resolver(io_loop=io_loop) + self.own_resolver = True + if hostname_mapping is not None: + self.resolver = OverrideResolver(resolver=self.resolver, + mapping=hostname_mapping) + self.tcp_client = TCPClient(resolver=self.resolver, io_loop=io_loop) + + def close(self): + super(SimpleAsyncHTTPClient, self).close() + if self.own_resolver: + self.resolver.close() + self.tcp_client.close() + + def fetch_impl(self, request, callback): + key = object() + self.queue.append((key, request, callback)) + if not len(self.active) < self.max_clients: + timeout_handle = self.io_loop.add_timeout( + self.io_loop.time() + min(request.connect_timeout, + request.request_timeout), + functools.partial(self._on_timeout, key)) + else: + timeout_handle = None + self.waiting[key] = (request, callback, timeout_handle) + self._process_queue() + if self.queue: + gen_log.debug("max_clients limit reached, request queued. " + "%d active, %d queued requests." % ( + len(self.active), len(self.queue))) + + def _process_queue(self): + with stack_context.NullContext(): + while self.queue and len(self.active) < self.max_clients: + key, request, callback = self.queue.popleft() + if key not in self.waiting: + continue + self._remove_timeout(key) + self.active[key] = (request, callback) + release_callback = functools.partial(self._release_fetch, key) + self._handle_request(request, release_callback, callback) + + def _connection_class(self): + return _HTTPConnection + + def _handle_request(self, request, release_callback, final_callback): + self._connection_class()( + self.io_loop, self, request, release_callback, + final_callback, self.max_buffer_size, self.tcp_client, + self.max_header_size, self.max_body_size) + + def _release_fetch(self, key): + del self.active[key] + self._process_queue() + + def _remove_timeout(self, key): + if key in self.waiting: + request, callback, timeout_handle = self.waiting[key] + if timeout_handle is not None: + self.io_loop.remove_timeout(timeout_handle) + del self.waiting[key] + + def _on_timeout(self, key): + request, callback, timeout_handle = self.waiting[key] + self.queue.remove((key, request, callback)) + timeout_response = HTTPResponse( + request, 599, error=HTTPError(599, "Timeout"), + request_time=self.io_loop.time() - request.start_time) + self.io_loop.add_callback(callback, timeout_response) + del self.waiting[key] + + +class _HTTPConnection(httputil.HTTPMessageDelegate): + _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]) + + def __init__(self, io_loop, client, request, release_callback, + final_callback, max_buffer_size, tcp_client, + max_header_size, max_body_size): + self.start_time = io_loop.time() + self.io_loop = io_loop + self.client = client + self.request = request + self.release_callback = release_callback + self.final_callback = final_callback + self.max_buffer_size = max_buffer_size + self.tcp_client = tcp_client + self.max_header_size = max_header_size + self.max_body_size = max_body_size + self.code = None + self.headers = None + self.chunks = [] + self._decompressor = None + # Timeout handle returned by IOLoop.add_timeout + self._timeout = None + self._sockaddr = None + with stack_context.ExceptionStackContext(self._handle_exception): + self.parsed = urlparse.urlsplit(_unicode(self.request.url)) + if self.parsed.scheme not in ("http", "https"): + raise ValueError("Unsupported url scheme: %s" % + self.request.url) + # urlsplit results have hostname and port results, but they + # didn't support ipv6 literals until python 2.7. + netloc = self.parsed.netloc + if "@" in netloc: + userpass, _, netloc = netloc.rpartition("@") + host, port = httputil.split_host_and_port(netloc) + if port is None: + port = 443 if self.parsed.scheme == "https" else 80 + if re.match(r'^\[.*\]$', host): + # raw ipv6 addresses in urls are enclosed in brackets + host = host[1:-1] + self.parsed_hostname = host # save final host for _on_connect + + if request.allow_ipv6 is False: + af = socket.AF_INET + else: + af = socket.AF_UNSPEC + + ssl_options = self._get_ssl_options(self.parsed.scheme) + + timeout = min(self.request.connect_timeout, self.request.request_timeout) + if timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + timeout, + stack_context.wrap(self._on_timeout)) + self.tcp_client.connect(host, port, af=af, + ssl_options=ssl_options, + max_buffer_size=self.max_buffer_size, + callback=self._on_connect) + + def _get_ssl_options(self, scheme): + if scheme == "https": + if self.request.ssl_options is not None: + return self.request.ssl_options + # If we are using the defaults, don't construct a + # new SSLContext. + if (self.request.validate_cert and + self.request.ca_certs is None and + self.request.client_cert is None and + self.request.client_key is None): + return _client_ssl_defaults + ssl_options = {} + if self.request.validate_cert: + ssl_options["cert_reqs"] = ssl.CERT_REQUIRED + if self.request.ca_certs is not None: + ssl_options["ca_certs"] = self.request.ca_certs + elif not hasattr(ssl, 'create_default_context'): + # When create_default_context is present, + # we can omit the "ca_certs" parameter entirely, + # which avoids the dependency on "certifi" for py34. + ssl_options["ca_certs"] = _default_ca_certs() + if self.request.client_key is not None: + ssl_options["keyfile"] = self.request.client_key + if self.request.client_cert is not None: + ssl_options["certfile"] = self.request.client_cert + + # SSL interoperability is tricky. We want to disable + # SSLv2 for security reasons; it wasn't disabled by default + # until openssl 1.0. The best way to do this is to use + # the SSL_OP_NO_SSLv2, but that wasn't exposed to python + # until 3.2. Python 2.7 adds the ciphers argument, which + # can also be used to disable SSLv2. As a last resort + # on python 2.6, we set ssl_version to TLSv1. This is + # more narrow than we'd like since it also breaks + # compatibility with servers configured for SSLv3 only, + # but nearly all servers support both SSLv3 and TLSv1: + # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html + if sys.version_info >= (2, 7): + # In addition to disabling SSLv2, we also exclude certain + # classes of insecure ciphers. + ssl_options["ciphers"] = "DEFAULT:!SSLv2:!EXPORT:!DES" + else: + # This is really only necessary for pre-1.0 versions + # of openssl, but python 2.6 doesn't expose version + # information. + ssl_options["ssl_version"] = ssl.PROTOCOL_TLSv1 + return ssl_options + return None + + def _on_timeout(self): + self._timeout = None + if self.final_callback is not None: + raise HTTPError(599, "Timeout") + + def _remove_timeout(self): + if self._timeout is not None: + self.io_loop.remove_timeout(self._timeout) + self._timeout = None + + def _on_connect(self, stream): + if self.final_callback is None: + # final_callback is cleared if we've hit our timeout. + stream.close() + return + self.stream = stream + self.stream.set_close_callback(self.on_connection_close) + self._remove_timeout() + if self.final_callback is None: + return + if self.request.request_timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + self.request.request_timeout, + stack_context.wrap(self._on_timeout)) + if (self.request.method not in self._SUPPORTED_METHODS and + not self.request.allow_nonstandard_methods): + raise KeyError("unknown method %s" % self.request.method) + for key in ('network_interface', + 'proxy_host', 'proxy_port', + 'proxy_username', 'proxy_password'): + if getattr(self.request, key, None): + raise NotImplementedError('%s not supported' % key) + if "Connection" not in self.request.headers: + self.request.headers["Connection"] = "close" + if "Host" not in self.request.headers: + if '@' in self.parsed.netloc: + self.request.headers["Host"] = self.parsed.netloc.rpartition('@')[-1] + else: + self.request.headers["Host"] = self.parsed.netloc + username, password = None, None + if self.parsed.username is not None: + username, password = self.parsed.username, self.parsed.password + elif self.request.auth_username is not None: + username = self.request.auth_username + password = self.request.auth_password or '' + if username is not None: + if self.request.auth_mode not in (None, "basic"): + raise ValueError("unsupported auth_mode %s", + self.request.auth_mode) + auth = utf8(username) + b":" + utf8(password) + self.request.headers["Authorization"] = (b"Basic " + + base64.b64encode(auth)) + if self.request.user_agent: + self.request.headers["User-Agent"] = self.request.user_agent + if not self.request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + body_expected = self.request.method in ("POST", "PATCH", "PUT") + body_present = (self.request.body is not None or + self.request.body_producer is not None) + if ((body_expected and not body_present) or + (body_present and not body_expected)): + raise ValueError( + 'Body must %sbe None for method %s (unless ' + 'allow_nonstandard_methods is true)' % + ('not ' if body_expected else '', self.request.method)) + if self.request.expect_100_continue: + self.request.headers["Expect"] = "100-continue" + if self.request.body is not None: + # When body_producer is used the caller is responsible for + # setting Content-Length (or else chunked encoding will be used). + self.request.headers["Content-Length"] = str(len( + self.request.body)) + if (self.request.method == "POST" and + "Content-Type" not in self.request.headers): + self.request.headers["Content-Type"] = "application/x-www-form-urlencoded" + if self.request.decompress_response: + self.request.headers["Accept-Encoding"] = "gzip" + req_path = ((self.parsed.path or '/') + + (('?' + self.parsed.query) if self.parsed.query else '')) + self.connection = self._create_connection(stream) + start_line = httputil.RequestStartLine(self.request.method, + req_path, '') + self.connection.write_headers(start_line, self.request.headers) + if self.request.expect_100_continue: + self._read_response() + else: + self._write_body(True) + + def _create_connection(self, stream): + stream.set_nodelay(True) + connection = HTTP1Connection( + stream, True, + HTTP1ConnectionParameters( + no_keep_alive=True, + max_header_size=self.max_header_size, + max_body_size=self.max_body_size, + decompress=self.request.decompress_response), + self._sockaddr) + return connection + + def _write_body(self, start_read): + if self.request.body is not None: + self.connection.write(self.request.body) + elif self.request.body_producer is not None: + fut = self.request.body_producer(self.connection.write) + if fut is not None: + fut = gen.convert_yielded(fut) + + def on_body_written(fut): + fut.result() + self.connection.finish() + if start_read: + self._read_response() + self.io_loop.add_future(fut, on_body_written) + return + self.connection.finish() + if start_read: + self._read_response() + + def _read_response(self): + # Ensure that any exception raised in read_response ends up in our + # stack context. + self.io_loop.add_future( + self.connection.read_response(self), + lambda f: f.result()) + + def _release(self): + if self.release_callback is not None: + release_callback = self.release_callback + self.release_callback = None + release_callback() + + def _run_callback(self, response): + self._release() + if self.final_callback is not None: + final_callback = self.final_callback + self.final_callback = None + self.io_loop.add_callback(final_callback, response) + + def _handle_exception(self, typ, value, tb): + if self.final_callback: + self._remove_timeout() + if isinstance(value, StreamClosedError): + if value.real_error is None: + value = HTTPError(599, "Stream closed") + else: + value = value.real_error + self._run_callback(HTTPResponse(self.request, 599, error=value, + request_time=self.io_loop.time() - self.start_time, + )) + + if hasattr(self, "stream"): + # TODO: this may cause a StreamClosedError to be raised + # by the connection's Future. Should we cancel the + # connection more gracefully? + self.stream.close() + return True + else: + # If our callback has already been called, we are probably + # catching an exception that is not caused by us but rather + # some child of our callback. Rather than drop it on the floor, + # pass it along, unless it's just the stream being closed. + return isinstance(value, StreamClosedError) + + def on_connection_close(self): + if self.final_callback is not None: + message = "Connection closed" + if self.stream.error: + raise self.stream.error + try: + raise HTTPError(599, message) + except HTTPError: + self._handle_exception(*sys.exc_info()) + + def headers_received(self, first_line, headers): + if self.request.expect_100_continue and first_line.code == 100: + self._write_body(False) + return + self.code = first_line.code + self.reason = first_line.reason + self.headers = headers + + if self._should_follow_redirect(): + return + + if self.request.header_callback is not None: + # Reassemble the start line. + self.request.header_callback('%s %s %s\r\n' % first_line) + for k, v in self.headers.get_all(): + self.request.header_callback("%s: %s\r\n" % (k, v)) + self.request.header_callback('\r\n') + + def _should_follow_redirect(self): + return (self.request.follow_redirects and + self.request.max_redirects > 0 and + self.code in (301, 302, 303, 307)) + + def finish(self): + data = b''.join(self.chunks) + self._remove_timeout() + original_request = getattr(self.request, "original_request", + self.request) + if self._should_follow_redirect(): + assert isinstance(self.request, _RequestProxy) + new_request = copy.copy(self.request.request) + new_request.url = urlparse.urljoin(self.request.url, + self.headers["Location"]) + new_request.max_redirects = self.request.max_redirects - 1 + del new_request.headers["Host"] + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 + # Client SHOULD make a GET request after a 303. + # According to the spec, 302 should be followed by the same + # method as the original request, but in practice browsers + # treat 302 the same as 303, and many servers use 302 for + # compatibility with pre-HTTP/1.1 user agents which don't + # understand the 303 status. + if self.code in (302, 303): + new_request.method = "GET" + new_request.body = None + for h in ["Content-Length", "Content-Type", + "Content-Encoding", "Transfer-Encoding"]: + try: + del self.request.headers[h] + except KeyError: + pass + new_request.original_request = original_request + final_callback = self.final_callback + self.final_callback = None + self._release() + self.client.fetch(new_request, final_callback) + self._on_end_request() + return + if self.request.streaming_callback: + buffer = BytesIO() + else: + buffer = BytesIO(data) # TODO: don't require one big string? + response = HTTPResponse(original_request, + self.code, reason=getattr(self, 'reason', None), + headers=self.headers, + request_time=self.io_loop.time() - self.start_time, + buffer=buffer, + effective_url=self.request.url) + self._run_callback(response) + self._on_end_request() + + def _on_end_request(self): + self.stream.close() + + def data_received(self, chunk): + if self._should_follow_redirect(): + # We're going to follow a redirect so just discard the body. + return + if self.request.streaming_callback is not None: + self.request.streaming_callback(chunk) + else: + self.chunks.append(chunk) + + +if __name__ == "__main__": + AsyncHTTPClient.configure(SimpleAsyncHTTPClient) + main() diff --git a/server/www/packages/packages-common/tornado/stack_context.py b/server/www/packages/packages-common/tornado/stack_context.py new file mode 100644 index 0000000..2c0d9ee --- /dev/null +++ b/server/www/packages/packages-common/tornado/stack_context.py @@ -0,0 +1,388 @@ +#!/usr/bin/env python +# +# Copyright 2010 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""`StackContext` allows applications to maintain threadlocal-like state +that follows execution as it moves to other execution contexts. + +The motivating examples are to eliminate the need for explicit +``async_callback`` wrappers (as in `tornado.web.RequestHandler`), and to +allow some additional context to be kept for logging. + +This is slightly magic, but it's an extension of the idea that an +exception handler is a kind of stack-local state and when that stack +is suspended and resumed in a new context that state needs to be +preserved. `StackContext` shifts the burden of restoring that state +from each call site (e.g. wrapping each `.AsyncHTTPClient` callback +in ``async_callback``) to the mechanisms that transfer control from +one context to another (e.g. `.AsyncHTTPClient` itself, `.IOLoop`, +thread pools, etc). + +Example usage:: + + @contextlib.contextmanager + def die_on_error(): + try: + yield + except Exception: + logging.error("exception in asynchronous operation",exc_info=True) + sys.exit(1) + + with StackContext(die_on_error): + # Any exception thrown here *or in callback and its descendants* + # will cause the process to exit instead of spinning endlessly + # in the ioloop. + http_client.fetch(url, callback) + ioloop.start() + +Most applications shouldn't have to work with `StackContext` directly. +Here are a few rules of thumb for when it's necessary: + +* If you're writing an asynchronous library that doesn't rely on a + stack_context-aware library like `tornado.ioloop` or `tornado.iostream` + (for example, if you're writing a thread pool), use + `.stack_context.wrap()` before any asynchronous operations to capture the + stack context from where the operation was started. + +* If you're writing an asynchronous library that has some shared + resources (such as a connection pool), create those shared resources + within a ``with stack_context.NullContext():`` block. This will prevent + ``StackContexts`` from leaking from one request to another. + +* If you want to write something like an exception handler that will + persist across asynchronous calls, create a new `StackContext` (or + `ExceptionStackContext`), and make your asynchronous calls in a ``with`` + block that references your `StackContext`. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import sys +import threading + +from tornado.util import raise_exc_info + + +class StackContextInconsistentError(Exception): + pass + + +class _State(threading.local): + def __init__(self): + self.contexts = (tuple(), None) +_state = _State() + + +class StackContext(object): + """Establishes the given context as a StackContext that will be transferred. + + Note that the parameter is a callable that returns a context + manager, not the context itself. That is, where for a + non-transferable context manager you would say:: + + with my_context(): + + StackContext takes the function itself rather than its result:: + + with StackContext(my_context): + + The result of ``with StackContext() as cb:`` is a deactivation + callback. Run this callback when the StackContext is no longer + needed to ensure that it is not propagated any further (note that + deactivating a context does not affect any instances of that + context that are currently pending). This is an advanced feature + and not necessary in most applications. + """ + def __init__(self, context_factory): + self.context_factory = context_factory + self.contexts = [] + self.active = True + + def _deactivate(self): + self.active = False + + # StackContext protocol + def enter(self): + context = self.context_factory() + self.contexts.append(context) + context.__enter__() + + def exit(self, type, value, traceback): + context = self.contexts.pop() + context.__exit__(type, value, traceback) + + # Note that some of this code is duplicated in ExceptionStackContext + # below. ExceptionStackContext is more common and doesn't need + # the full generality of this class. + def __enter__(self): + self.old_contexts = _state.contexts + self.new_contexts = (self.old_contexts[0] + (self,), self) + _state.contexts = self.new_contexts + + try: + self.enter() + except: + _state.contexts = self.old_contexts + raise + + return self._deactivate + + def __exit__(self, type, value, traceback): + try: + self.exit(type, value, traceback) + finally: + final_contexts = _state.contexts + _state.contexts = self.old_contexts + + # Generator coroutines and with-statements with non-local + # effects interact badly. Check here for signs of + # the stack getting out of sync. + # Note that this check comes after restoring _state.context + # so that if it fails things are left in a (relatively) + # consistent state. + if final_contexts is not self.new_contexts: + raise StackContextInconsistentError( + 'stack_context inconsistency (may be caused by yield ' + 'within a "with StackContext" block)') + + # Break up a reference to itself to allow for faster GC on CPython. + self.new_contexts = None + + +class ExceptionStackContext(object): + """Specialization of StackContext for exception handling. + + The supplied ``exception_handler`` function will be called in the + event of an uncaught exception in this context. The semantics are + similar to a try/finally clause, and intended use cases are to log + an error, close a socket, or similar cleanup actions. The + ``exc_info`` triple ``(type, value, traceback)`` will be passed to the + exception_handler function. + + If the exception handler returns true, the exception will be + consumed and will not be propagated to other exception handlers. + """ + def __init__(self, exception_handler): + self.exception_handler = exception_handler + self.active = True + + def _deactivate(self): + self.active = False + + def exit(self, type, value, traceback): + if type is not None: + return self.exception_handler(type, value, traceback) + + def __enter__(self): + self.old_contexts = _state.contexts + self.new_contexts = (self.old_contexts[0], self) + _state.contexts = self.new_contexts + + return self._deactivate + + def __exit__(self, type, value, traceback): + try: + if type is not None: + return self.exception_handler(type, value, traceback) + finally: + final_contexts = _state.contexts + _state.contexts = self.old_contexts + + if final_contexts is not self.new_contexts: + raise StackContextInconsistentError( + 'stack_context inconsistency (may be caused by yield ' + 'within a "with StackContext" block)') + + # Break up a reference to itself to allow for faster GC on CPython. + self.new_contexts = None + + +class NullContext(object): + """Resets the `StackContext`. + + Useful when creating a shared resource on demand (e.g. an + `.AsyncHTTPClient`) where the stack that caused the creating is + not relevant to future operations. + """ + def __enter__(self): + self.old_contexts = _state.contexts + _state.contexts = (tuple(), None) + + def __exit__(self, type, value, traceback): + _state.contexts = self.old_contexts + + +def _remove_deactivated(contexts): + """Remove deactivated handlers from the chain""" + # Clean ctx handlers + stack_contexts = tuple([h for h in contexts[0] if h.active]) + + # Find new head + head = contexts[1] + while head is not None and not head.active: + head = head.old_contexts[1] + + # Process chain + ctx = head + while ctx is not None: + parent = ctx.old_contexts[1] + + while parent is not None: + if parent.active: + break + ctx.old_contexts = parent.old_contexts + parent = parent.old_contexts[1] + + ctx = parent + + return (stack_contexts, head) + + +def wrap(fn): + """Returns a callable object that will restore the current `StackContext` + when executed. + + Use this whenever saving a callback to be executed later in a + different execution context (either in a different thread or + asynchronously in the same thread). + """ + # Check if function is already wrapped + if fn is None or hasattr(fn, '_wrapped'): + return fn + + # Capture current stack head + # TODO: Any other better way to store contexts and update them in wrapped function? + cap_contexts = [_state.contexts] + + if not cap_contexts[0][0] and not cap_contexts[0][1]: + # Fast path when there are no active contexts. + def null_wrapper(*args, **kwargs): + try: + current_state = _state.contexts + _state.contexts = cap_contexts[0] + return fn(*args, **kwargs) + finally: + _state.contexts = current_state + null_wrapper._wrapped = True + return null_wrapper + + def wrapped(*args, **kwargs): + ret = None + try: + # Capture old state + current_state = _state.contexts + + # Remove deactivated items + cap_contexts[0] = contexts = _remove_deactivated(cap_contexts[0]) + + # Force new state + _state.contexts = contexts + + # Current exception + exc = (None, None, None) + top = None + + # Apply stack contexts + last_ctx = 0 + stack = contexts[0] + + # Apply state + for n in stack: + try: + n.enter() + last_ctx += 1 + except: + # Exception happened. Record exception info and store top-most handler + exc = sys.exc_info() + top = n.old_contexts[1] + + # Execute callback if no exception happened while restoring state + if top is None: + try: + ret = fn(*args, **kwargs) + except: + exc = sys.exc_info() + top = contexts[1] + + # If there was exception, try to handle it by going through the exception chain + if top is not None: + exc = _handle_exception(top, exc) + else: + # Otherwise take shorter path and run stack contexts in reverse order + while last_ctx > 0: + last_ctx -= 1 + c = stack[last_ctx] + + try: + c.exit(*exc) + except: + exc = sys.exc_info() + top = c.old_contexts[1] + break + else: + top = None + + # If if exception happened while unrolling, take longer exception handler path + if top is not None: + exc = _handle_exception(top, exc) + + # If exception was not handled, raise it + if exc != (None, None, None): + raise_exc_info(exc) + finally: + _state.contexts = current_state + return ret + + wrapped._wrapped = True + return wrapped + + +def _handle_exception(tail, exc): + while tail is not None: + try: + if tail.exit(*exc): + exc = (None, None, None) + except: + exc = sys.exc_info() + + tail = tail.old_contexts[1] + + return exc + + +def run_with_stack_context(context, func): + """Run a coroutine ``func`` in the given `StackContext`. + + It is not safe to have a ``yield`` statement within a ``with StackContext`` + block, so it is difficult to use stack context with `.gen.coroutine`. + This helper function runs the function in the correct context while + keeping the ``yield`` and ``with`` statements syntactically separate. + + Example:: + + @gen.coroutine + def incorrect(): + with StackContext(ctx): + # ERROR: this will raise StackContextInconsistentError + yield other_coroutine() + + @gen.coroutine + def correct(): + yield run_with_stack_context(StackContext(ctx), other_coroutine) + + .. versionadded:: 3.1 + """ + with context: + return func() diff --git a/server/www/packages/packages-common/tornado/tcpclient.py b/server/www/packages/packages-common/tornado/tcpclient.py new file mode 100644 index 0000000..f594d91 --- /dev/null +++ b/server/www/packages/packages-common/tornado/tcpclient.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking TCP connection factory. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import socket + +from tornado.concurrent import Future +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream +from tornado import gen +from tornado.netutil import Resolver + +_INITIAL_CONNECT_TIMEOUT = 0.3 + + +class _Connector(object): + """A stateless implementation of the "Happy Eyeballs" algorithm. + + "Happy Eyeballs" is documented in RFC6555 as the recommended practice + for when both IPv4 and IPv6 addresses are available. + + In this implementation, we partition the addresses by family, and + make the first connection attempt to whichever address was + returned first by ``getaddrinfo``. If that connection fails or + times out, we begin a connection in parallel to the first address + of the other family. If there are additional failures we retry + with other addresses, keeping one connection attempt per family + in flight at a time. + + http://tools.ietf.org/html/rfc6555 + + """ + def __init__(self, addrinfo, io_loop, connect): + self.io_loop = io_loop + self.connect = connect + + self.future = Future() + self.timeout = None + self.last_error = None + self.remaining = len(addrinfo) + self.primary_addrs, self.secondary_addrs = self.split(addrinfo) + + @staticmethod + def split(addrinfo): + """Partition the ``addrinfo`` list by address family. + + Returns two lists. The first list contains the first entry from + ``addrinfo`` and all others with the same family, and the + second list contains all other addresses (normally one list will + be AF_INET and the other AF_INET6, although non-standard resolvers + may return additional families). + """ + primary = [] + secondary = [] + primary_af = addrinfo[0][0] + for af, addr in addrinfo: + if af == primary_af: + primary.append((af, addr)) + else: + secondary.append((af, addr)) + return primary, secondary + + def start(self, timeout=_INITIAL_CONNECT_TIMEOUT): + self.try_connect(iter(self.primary_addrs)) + self.set_timout(timeout) + return self.future + + def try_connect(self, addrs): + try: + af, addr = next(addrs) + except StopIteration: + # We've reached the end of our queue, but the other queue + # might still be working. Send a final error on the future + # only when both queues are finished. + if self.remaining == 0 and not self.future.done(): + self.future.set_exception(self.last_error or + IOError("connection failed")) + return + future = self.connect(af, addr) + future.add_done_callback(functools.partial(self.on_connect_done, + addrs, af, addr)) + + def on_connect_done(self, addrs, af, addr, future): + self.remaining -= 1 + try: + stream = future.result() + except Exception as e: + if self.future.done(): + return + # Error: try again (but remember what happened so we have an + # error to raise in the end) + self.last_error = e + self.try_connect(addrs) + if self.timeout is not None: + # If the first attempt failed, don't wait for the + # timeout to try an address from the secondary queue. + self.io_loop.remove_timeout(self.timeout) + self.on_timeout() + return + self.clear_timeout() + if self.future.done(): + # This is a late arrival; just drop it. + stream.close() + else: + self.future.set_result((af, addr, stream)) + + def set_timout(self, timeout): + self.timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, + self.on_timeout) + + def on_timeout(self): + self.timeout = None + self.try_connect(iter(self.secondary_addrs)) + + def clear_timeout(self): + if self.timeout is not None: + self.io_loop.remove_timeout(self.timeout) + + +class TCPClient(object): + """A non-blocking TCP connection factory. + + .. versionchanged:: 4.1 + The ``io_loop`` argument is deprecated. + """ + def __init__(self, resolver=None, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + if resolver is not None: + self.resolver = resolver + self._own_resolver = False + else: + self.resolver = Resolver(io_loop=io_loop) + self._own_resolver = True + + def close(self): + if self._own_resolver: + self.resolver.close() + + @gen.coroutine + def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None, + max_buffer_size=None): + """Connect to the given host and port. + + Asynchronously returns an `.IOStream` (or `.SSLIOStream` if + ``ssl_options`` is not None). + """ + addrinfo = yield self.resolver.resolve(host, port, af) + connector = _Connector( + addrinfo, self.io_loop, + functools.partial(self._create_stream, max_buffer_size)) + af, addr, stream = yield connector.start() + # TODO: For better performance we could cache the (af, addr) + # information here and re-use it on subsequent connections to + # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2) + if ssl_options is not None: + stream = yield stream.start_tls(False, ssl_options=ssl_options, + server_hostname=host) + raise gen.Return(stream) + + def _create_stream(self, max_buffer_size, af, addr): + # Always connect in plaintext; we'll convert to ssl if necessary + # after one connection has completed. + stream = IOStream(socket.socket(af), + io_loop=self.io_loop, + max_buffer_size=max_buffer_size) + return stream.connect(addr) diff --git a/server/www/packages/packages-common/tornado/tcpserver.py b/server/www/packages/packages-common/tornado/tcpserver.py new file mode 100644 index 0000000..c9d148a --- /dev/null +++ b/server/www/packages/packages-common/tornado/tcpserver.py @@ -0,0 +1,273 @@ +#!/usr/bin/env python +# +# Copyright 2011 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking, single-threaded TCP server.""" +from __future__ import absolute_import, division, print_function, with_statement + +import errno +import os +import socket + +from tornado.log import app_log +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream, SSLIOStream +from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket +from tornado import process +from tornado.util import errno_from_exception + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine. + ssl = None + + +class TCPServer(object): + r"""A non-blocking, single-threaded TCP server. + + To use `TCPServer`, define a subclass which overrides the `handle_stream` + method. + + To make this server serve SSL traffic, send the ``ssl_options`` keyword + argument with an `ssl.SSLContext` object. For compatibility with older + versions of Python ``ssl_options`` may also be a dictionary of keyword + arguments for the `ssl.wrap_socket` method.:: + + ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), + os.path.join(data_dir, "mydomain.key")) + TCPServer(ssl_options=ssl_ctx) + + `TCPServer` initialization follows one of three patterns: + + 1. `listen`: simple single-process:: + + server = TCPServer() + server.listen(8888) + IOLoop.current().start() + + 2. `bind`/`start`: simple multi-process:: + + server = TCPServer() + server.bind(8888) + server.start(0) # Forks multiple sub-processes + IOLoop.current().start() + + When using this interface, an `.IOLoop` must *not* be passed + to the `TCPServer` constructor. `start` will always start + the server on the default singleton `.IOLoop`. + + 3. `add_sockets`: advanced multi-process:: + + sockets = bind_sockets(8888) + tornado.process.fork_processes(0) + server = TCPServer() + server.add_sockets(sockets) + IOLoop.current().start() + + The `add_sockets` interface is more complicated, but it can be + used with `tornado.process.fork_processes` to give you more + flexibility in when the fork happens. `add_sockets` can + also be used in single-process servers if you want to create + your listening sockets in some way other than + `~tornado.netutil.bind_sockets`. + + .. versionadded:: 3.1 + The ``max_buffer_size`` argument. + """ + def __init__(self, io_loop=None, ssl_options=None, max_buffer_size=None, + read_chunk_size=None): + self.io_loop = io_loop + self.ssl_options = ssl_options + self._sockets = {} # fd -> socket object + self._pending_sockets = [] + self._started = False + self.max_buffer_size = max_buffer_size + self.read_chunk_size = read_chunk_size + + # Verify the SSL options. Otherwise we don't get errors until clients + # connect. This doesn't verify that the keys are legitimate, but + # the SSL module doesn't do that until there is a connected socket + # which seems like too much work + if self.ssl_options is not None and isinstance(self.ssl_options, dict): + # Only certfile is required: it can contain both keys + if 'certfile' not in self.ssl_options: + raise KeyError('missing key "certfile" in ssl_options') + + if not os.path.exists(self.ssl_options['certfile']): + raise ValueError('certfile "%s" does not exist' % + self.ssl_options['certfile']) + if ('keyfile' in self.ssl_options and + not os.path.exists(self.ssl_options['keyfile'])): + raise ValueError('keyfile "%s" does not exist' % + self.ssl_options['keyfile']) + + def listen(self, port, address=""): + """Starts accepting connections on the given port. + + This method may be called more than once to listen on multiple ports. + `listen` takes effect immediately; it is not necessary to call + `TCPServer.start` afterwards. It is, however, necessary to start + the `.IOLoop`. + """ + sockets = bind_sockets(port, address=address) + self.add_sockets(sockets) + + def add_sockets(self, sockets): + """Makes this server start accepting connections on the given sockets. + + The ``sockets`` parameter is a list of socket objects such as + those returned by `~tornado.netutil.bind_sockets`. + `add_sockets` is typically used in combination with that + method and `tornado.process.fork_processes` to provide greater + control over the initialization of a multi-process server. + """ + if self.io_loop is None: + self.io_loop = IOLoop.current() + + for sock in sockets: + self._sockets[sock.fileno()] = sock + add_accept_handler(sock, self._handle_connection, + io_loop=self.io_loop) + + def add_socket(self, socket): + """Singular version of `add_sockets`. Takes a single socket object.""" + self.add_sockets([socket]) + + def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128): + """Binds this server to the given port on the given address. + + To start the server, call `start`. If you want to run this server + in a single process, you can call `listen` as a shortcut to the + sequence of `bind` and `start` calls. + + Address may be either an IP address or hostname. If it's a hostname, + the server will listen on all IP addresses associated with the + name. Address may be an empty string or None to listen on all + available interfaces. Family may be set to either `socket.AF_INET` + or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise + both will be used if available. + + The ``backlog`` argument has the same meaning as for + `socket.listen `. + + This method may be called multiple times prior to `start` to listen + on multiple ports or interfaces. + """ + sockets = bind_sockets(port, address=address, family=family, + backlog=backlog) + if self._started: + self.add_sockets(sockets) + else: + self._pending_sockets.extend(sockets) + + def start(self, num_processes=1): + """Starts this server in the `.IOLoop`. + + By default, we run the server in this process and do not fork any + additional child process. + + If num_processes is ``None`` or <= 0, we detect the number of cores + available on this machine and fork that number of child + processes. If num_processes is given and > 1, we fork that + specific number of sub-processes. + + Since we use processes and not threads, there is no shared memory + between any server code. + + Note that multiple processes are not compatible with the autoreload + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). + When using multiple processes, no IOLoops can be created or + referenced until after the call to ``TCPServer.start(n)``. + """ + assert not self._started + self._started = True + if num_processes != 1: + process.fork_processes(num_processes) + sockets = self._pending_sockets + self._pending_sockets = [] + self.add_sockets(sockets) + + def stop(self): + """Stops listening for new connections. + + Requests currently in progress may still continue after the + server is stopped. + """ + for fd, sock in self._sockets.items(): + self.io_loop.remove_handler(fd) + sock.close() + + def handle_stream(self, stream, address): + """Override to handle a new `.IOStream` from an incoming connection. + + This method may be a coroutine; if so any exceptions it raises + asynchronously will be logged. Accepting of incoming connections + will not be blocked by this coroutine. + + If this `TCPServer` is configured for SSL, ``handle_stream`` + may be called before the SSL handshake has completed. Use + `.SSLIOStream.wait_for_handshake` if you need to verify the client's + certificate or use NPN/ALPN. + + .. versionchanged:: 4.2 + Added the option for this method to be a coroutine. + """ + raise NotImplementedError() + + def _handle_connection(self, connection, address): + if self.ssl_options is not None: + assert ssl, "Python 2.6+ and OpenSSL required for SSL" + try: + connection = ssl_wrap_socket(connection, + self.ssl_options, + server_side=True, + do_handshake_on_connect=False) + except ssl.SSLError as err: + if err.args[0] == ssl.SSL_ERROR_EOF: + return connection.close() + else: + raise + except socket.error as err: + # If the connection is closed immediately after it is created + # (as in a port scan), we can get one of several errors. + # wrap_socket makes an internal call to getpeername, + # which may return either EINVAL (Mac OS X) or ENOTCONN + # (Linux). If it returns ENOTCONN, this error is + # silently swallowed by the ssl module, so we need to + # catch another error later on (AttributeError in + # SSLIOStream._do_ssl_handshake). + # To test this behavior, try nmap with the -sT flag. + # https://github.com/tornadoweb/tornado/pull/750 + if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL): + return connection.close() + else: + raise + try: + if self.ssl_options is not None: + stream = SSLIOStream(connection, io_loop=self.io_loop, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size) + else: + stream = IOStream(connection, io_loop=self.io_loop, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size) + future = self.handle_stream(stream, address) + if future is not None: + self.io_loop.add_future(future, lambda f: f.result()) + except Exception: + app_log.error("Error in connection callback", exc_info=True) diff --git a/server/www/packages/packages-common/tornado/template.py b/server/www/packages/packages-common/tornado/template.py new file mode 100644 index 0000000..fa58899 --- /dev/null +++ b/server/www/packages/packages-common/tornado/template.py @@ -0,0 +1,975 @@ +#!/usr/bin/env python +# +# Copyright 2009 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A simple template system that compiles templates to Python code. + +Basic usage looks like:: + + t = template.Template("{{ myvalue }}") + print t.generate(myvalue="XXX") + +`Loader` is a class that loads templates from a root directory and caches +the compiled templates:: + + loader = template.Loader("/home/btaylor") + print loader.load("test.html").generate(myvalue="XXX") + +We compile all templates to raw Python. Error-reporting is currently... uh, +interesting. Syntax for the templates:: + + ### base.html + + + {% block title %}Default title{% end %} + + +
    + {% for student in students %} + {% block student %} +
  • {{ escape(student.name) }}
  • + {% end %} + {% end %} +
+ + + + ### bold.html + {% extends "base.html" %} + + {% block title %}A bolder title{% end %} + + {% block student %} +
  • {{ escape(student.name) }}
  • + {% end %} + +Unlike most other template systems, we do not put any restrictions on the +expressions you can include in your statements. ``if`` and ``for`` blocks get +translated exactly into Python, so you can do complex expressions like:: + + {% for student in [p for p in people if p.student and p.age > 23] %} +
  • {{ escape(student.name) }}
  • + {% end %} + +Translating directly to Python means you can apply functions to expressions +easily, like the ``escape()`` function in the examples above. You can pass +functions in to your template just like any other variable +(In a `.RequestHandler`, override `.RequestHandler.get_template_namespace`):: + + ### Python code + def add(x, y): + return x + y + template.execute(add=add) + + ### The template + {{ add(1, 2) }} + +We provide the functions `escape() <.xhtml_escape>`, `.url_escape()`, +`.json_encode()`, and `.squeeze()` to all templates by default. + +Typical applications do not create `Template` or `Loader` instances by +hand, but instead use the `~.RequestHandler.render` and +`~.RequestHandler.render_string` methods of +`tornado.web.RequestHandler`, which load templates automatically based +on the ``template_path`` `.Application` setting. + +Variable names beginning with ``_tt_`` are reserved by the template +system and should not be used by application code. + +Syntax Reference +---------------- + +Template expressions are surrounded by double curly braces: ``{{ ... }}``. +The contents may be any python expression, which will be escaped according +to the current autoescape setting and inserted into the output. Other +template directives use ``{% %}``. These tags may be escaped as ``{{!`` +and ``{%!`` if you need to include a literal ``{{`` or ``{%`` in the output. + +To comment out a section so that it is omitted from the output, surround it +with ``{# ... #}``. + +``{% apply *function* %}...{% end %}`` + Applies a function to the output of all template code between ``apply`` + and ``end``:: + + {% apply linkify %}{{name}} said: {{message}}{% end %} + + Note that as an implementation detail apply blocks are implemented + as nested functions and thus may interact strangely with variables + set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}`` + within loops. + +``{% autoescape *function* %}`` + Sets the autoescape mode for the current file. This does not affect + other files, even those referenced by ``{% include %}``. Note that + autoescaping can also be configured globally, at the `.Application` + or `Loader`.:: + + {% autoescape xhtml_escape %} + {% autoescape None %} + +``{% block *name* %}...{% end %}`` + Indicates a named, replaceable block for use with ``{% extends %}``. + Blocks in the parent template will be replaced with the contents of + the same-named block in a child template.:: + + + {% block title %}Default title{% end %} + + + {% extends "base.html" %} + {% block title %}My page title{% end %} + +``{% comment ... %}`` + A comment which will be removed from the template output. Note that + there is no ``{% end %}`` tag; the comment goes from the word ``comment`` + to the closing ``%}`` tag. + +``{% extends *filename* %}`` + Inherit from another template. Templates that use ``extends`` should + contain one or more ``block`` tags to replace content from the parent + template. Anything in the child template not contained in a ``block`` + tag will be ignored. For an example, see the ``{% block %}`` tag. + +``{% for *var* in *expr* %}...{% end %}`` + Same as the python ``for`` statement. ``{% break %}`` and + ``{% continue %}`` may be used inside the loop. + +``{% from *x* import *y* %}`` + Same as the python ``import`` statement. + +``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}`` + Conditional statement - outputs the first section whose condition is + true. (The ``elif`` and ``else`` sections are optional) + +``{% import *module* %}`` + Same as the python ``import`` statement. + +``{% include *filename* %}`` + Includes another template file. The included file can see all the local + variables as if it were copied directly to the point of the ``include`` + directive (the ``{% autoescape %}`` directive is an exception). + Alternately, ``{% module Template(filename, **kwargs) %}`` may be used + to include another template with an isolated namespace. + +``{% module *expr* %}`` + Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is + not escaped:: + + {% module Template("foo.html", arg=42) %} + + ``UIModules`` are a feature of the `tornado.web.RequestHandler` + class (and specifically its ``render`` method) and will not work + when the template system is used on its own in other contexts. + +``{% raw *expr* %}`` + Outputs the result of the given expression without autoescaping. + +``{% set *x* = *y* %}`` + Sets a local variable. + +``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}`` + Same as the python ``try`` statement. + +``{% while *condition* %}... {% end %}`` + Same as the python ``while`` statement. ``{% break %}`` and + ``{% continue %}`` may be used inside the loop. + +``{% whitespace *mode* %}`` + Sets the whitespace mode for the remainder of the current file + (or until the next ``{% whitespace %}`` directive). See + `filter_whitespace` for available options. New in Tornado 4.3. +""" + +from __future__ import absolute_import, division, print_function, with_statement + +import datetime +import linecache +import os.path +import posixpath +import re +import threading + +from tornado import escape +from tornado.log import app_log +from tornado.util import ObjectDict, exec_in, unicode_type + +try: + from cStringIO import StringIO # py2 +except ImportError: + from io import StringIO # py3 + +_DEFAULT_AUTOESCAPE = "xhtml_escape" +_UNSET = object() + + +def filter_whitespace(mode, text): + """Transform whitespace in ``text`` according to ``mode``. + + Available modes are: + + * ``all``: Return all whitespace unmodified. + * ``single``: Collapse consecutive whitespace with a single whitespace + character, preserving newlines. + * ``oneline``: Collapse all runs of whitespace into a single space + character, removing all newlines in the process. + + .. versionadded:: 4.3 + """ + if mode == 'all': + return text + elif mode == 'single': + text = re.sub(r"([\t ]+)", " ", text) + text = re.sub(r"(\s*\n\s*)", "\n", text) + return text + elif mode == 'oneline': + return re.sub(r"(\s+)", " ", text) + else: + raise Exception("invalid whitespace mode %s" % mode) + + +class Template(object): + """A compiled template. + + We compile into Python from the given template_string. You can generate + the template from variables with generate(). + """ + # note that the constructor's signature is not extracted with + # autodoc because _UNSET looks like garbage. When changing + # this signature update website/sphinx/template.rst too. + def __init__(self, template_string, name="", loader=None, + compress_whitespace=_UNSET, autoescape=_UNSET, + whitespace=None): + """Construct a Template. + + :arg str template_string: the contents of the template file. + :arg str name: the filename from which the template was loaded + (used for error message). + :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible for this template, + used to resolve ``{% include %}`` and ``{% extend %}`` + directives. + :arg bool compress_whitespace: Deprecated since Tornado 4.3. + Equivalent to ``whitespace="single"`` if true and + ``whitespace="all"`` if false. + :arg str autoescape: The name of a function in the template + namespace, or ``None`` to disable escaping by default. + :arg str whitespace: A string specifying treatment of whitespace; + see `filter_whitespace` for options. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter; deprecated ``compress_whitespace``. + """ + self.name = escape.native_str(name) + + if compress_whitespace is not _UNSET: + # Convert deprecated compress_whitespace (bool) to whitespace (str). + if whitespace is not None: + raise Exception("cannot set both whitespace and compress_whitespace") + whitespace = "single" if compress_whitespace else "all" + if whitespace is None: + if loader and loader.whitespace: + whitespace = loader.whitespace + else: + # Whitespace defaults by filename. + if name.endswith(".html") or name.endswith(".js"): + whitespace = "single" + else: + whitespace = "all" + # Validate the whitespace setting. + filter_whitespace(whitespace, '') + + if autoescape is not _UNSET: + self.autoescape = autoescape + elif loader: + self.autoescape = loader.autoescape + else: + self.autoescape = _DEFAULT_AUTOESCAPE + + self.namespace = loader.namespace if loader else {} + reader = _TemplateReader(name, escape.native_str(template_string), + whitespace) + self.file = _File(self, _parse(reader, self)) + self.code = self._generate_python(loader) + self.loader = loader + try: + # Under python2.5, the fake filename used here must match + # the module name used in __name__ below. + # The dont_inherit flag prevents template.py's future imports + # from being applied to the generated code. + self.compiled = compile( + escape.to_unicode(self.code), + "%s.generated.py" % self.name.replace('.', '_'), + "exec", dont_inherit=True) + except Exception: + formatted_code = _format_code(self.code).rstrip() + app_log.error("%s code:\n%s", self.name, formatted_code) + raise + + def generate(self, **kwargs): + """Generate this template with the given arguments.""" + namespace = { + "escape": escape.xhtml_escape, + "xhtml_escape": escape.xhtml_escape, + "url_escape": escape.url_escape, + "json_encode": escape.json_encode, + "squeeze": escape.squeeze, + "linkify": escape.linkify, + "datetime": datetime, + "_tt_utf8": escape.utf8, # for internal use + "_tt_string_types": (unicode_type, bytes), + # __name__ and __loader__ allow the traceback mechanism to find + # the generated source code. + "__name__": self.name.replace('.', '_'), + "__loader__": ObjectDict(get_source=lambda name: self.code), + } + namespace.update(self.namespace) + namespace.update(kwargs) + exec_in(self.compiled, namespace) + execute = namespace["_tt_execute"] + # Clear the traceback module's cache of source data now that + # we've generated a new template (mainly for this module's + # unittests, where different tests reuse the same name). + linecache.clearcache() + return execute() + + def _generate_python(self, loader): + buffer = StringIO() + try: + # named_blocks maps from names to _NamedBlock objects + named_blocks = {} + ancestors = self._get_ancestors(loader) + ancestors.reverse() + for ancestor in ancestors: + ancestor.find_named_blocks(loader, named_blocks) + writer = _CodeWriter(buffer, named_blocks, loader, + ancestors[0].template) + ancestors[0].generate(writer) + return buffer.getvalue() + finally: + buffer.close() + + def _get_ancestors(self, loader): + ancestors = [self.file] + for chunk in self.file.body.chunks: + if isinstance(chunk, _ExtendsBlock): + if not loader: + raise ParseError("{% extends %} block found, but no " + "template loader") + template = loader.load(chunk.name, self.name) + ancestors.extend(template._get_ancestors(loader)) + return ancestors + + +class BaseLoader(object): + """Base class for template loaders. + + You must use a template loader to use template constructs like + ``{% extends %}`` and ``{% include %}``. The loader caches all + templates after they are loaded the first time. + """ + def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None, + whitespace=None): + """Construct a template loader. + + :arg str autoescape: The name of a function in the template + namespace, such as "xhtml_escape", or ``None`` to disable + autoescaping by default. + :arg dict namespace: A dictionary to be added to the default template + namespace, or ``None``. + :arg str whitespace: A string specifying default behavior for + whitespace in templates; see `filter_whitespace` for options. + Default is "single" for files ending in ".html" and ".js" and + "all" for other files. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter. + """ + self.autoescape = autoescape + self.namespace = namespace or {} + self.whitespace = whitespace + self.templates = {} + # self.lock protects self.templates. It's a reentrant lock + # because templates may load other templates via `include` or + # `extends`. Note that thanks to the GIL this code would be safe + # even without the lock, but could lead to wasted work as multiple + # threads tried to compile the same template simultaneously. + self.lock = threading.RLock() + + def reset(self): + """Resets the cache of compiled templates.""" + with self.lock: + self.templates = {} + + def resolve_path(self, name, parent_path=None): + """Converts a possibly-relative path to absolute (used internally).""" + raise NotImplementedError() + + def load(self, name, parent_path=None): + """Loads a template.""" + name = self.resolve_path(name, parent_path=parent_path) + with self.lock: + if name not in self.templates: + self.templates[name] = self._create_template(name) + return self.templates[name] + + def _create_template(self, name): + raise NotImplementedError() + + +class Loader(BaseLoader): + """A template loader that loads from a single root directory. + """ + def __init__(self, root_directory, **kwargs): + super(Loader, self).__init__(**kwargs) + self.root = os.path.abspath(root_directory) + + def resolve_path(self, name, parent_path=None): + if parent_path and not parent_path.startswith("<") and \ + not parent_path.startswith("/") and \ + not name.startswith("/"): + current_path = os.path.join(self.root, parent_path) + file_dir = os.path.dirname(os.path.abspath(current_path)) + relative_path = os.path.abspath(os.path.join(file_dir, name)) + if relative_path.startswith(self.root): + name = relative_path[len(self.root) + 1:] + return name + + def _create_template(self, name): + path = os.path.join(self.root, name) + with open(path, "rb") as f: + template = Template(f.read(), name=name, loader=self) + return template + + +class DictLoader(BaseLoader): + """A template loader that loads from a dictionary.""" + def __init__(self, dict, **kwargs): + super(DictLoader, self).__init__(**kwargs) + self.dict = dict + + def resolve_path(self, name, parent_path=None): + if parent_path and not parent_path.startswith("<") and \ + not parent_path.startswith("/") and \ + not name.startswith("/"): + file_dir = posixpath.dirname(parent_path) + name = posixpath.normpath(posixpath.join(file_dir, name)) + return name + + def _create_template(self, name): + return Template(self.dict[name], name=name, loader=self) + + +class _Node(object): + def each_child(self): + return () + + def generate(self, writer): + raise NotImplementedError() + + def find_named_blocks(self, loader, named_blocks): + for child in self.each_child(): + child.find_named_blocks(loader, named_blocks) + + +class _File(_Node): + def __init__(self, template, body): + self.template = template + self.body = body + self.line = 0 + + def generate(self, writer): + writer.write_line("def _tt_execute():", self.line) + with writer.indent(): + writer.write_line("_tt_buffer = []", self.line) + writer.write_line("_tt_append = _tt_buffer.append", self.line) + self.body.generate(writer) + writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) + + def each_child(self): + return (self.body,) + + +class _ChunkList(_Node): + def __init__(self, chunks): + self.chunks = chunks + + def generate(self, writer): + for chunk in self.chunks: + chunk.generate(writer) + + def each_child(self): + return self.chunks + + +class _NamedBlock(_Node): + def __init__(self, name, body, template, line): + self.name = name + self.body = body + self.template = template + self.line = line + + def each_child(self): + return (self.body,) + + def generate(self, writer): + block = writer.named_blocks[self.name] + with writer.include(block.template, self.line): + block.body.generate(writer) + + def find_named_blocks(self, loader, named_blocks): + named_blocks[self.name] = self + _Node.find_named_blocks(self, loader, named_blocks) + + +class _ExtendsBlock(_Node): + def __init__(self, name): + self.name = name + + +class _IncludeBlock(_Node): + def __init__(self, name, reader, line): + self.name = name + self.template_name = reader.name + self.line = line + + def find_named_blocks(self, loader, named_blocks): + included = loader.load(self.name, self.template_name) + included.file.find_named_blocks(loader, named_blocks) + + def generate(self, writer): + included = writer.loader.load(self.name, self.template_name) + with writer.include(included, self.line): + included.file.body.generate(writer) + + +class _ApplyBlock(_Node): + def __init__(self, method, line, body=None): + self.method = method + self.line = line + self.body = body + + def each_child(self): + return (self.body,) + + def generate(self, writer): + method_name = "_tt_apply%d" % writer.apply_counter + writer.apply_counter += 1 + writer.write_line("def %s():" % method_name, self.line) + with writer.indent(): + writer.write_line("_tt_buffer = []", self.line) + writer.write_line("_tt_append = _tt_buffer.append", self.line) + self.body.generate(writer) + writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line) + writer.write_line("_tt_append(_tt_utf8(%s(%s())))" % ( + self.method, method_name), self.line) + + +class _ControlBlock(_Node): + def __init__(self, statement, line, body=None): + self.statement = statement + self.line = line + self.body = body + + def each_child(self): + return (self.body,) + + def generate(self, writer): + writer.write_line("%s:" % self.statement, self.line) + with writer.indent(): + self.body.generate(writer) + # Just in case the body was empty + writer.write_line("pass", self.line) + + +class _IntermediateControlBlock(_Node): + def __init__(self, statement, line): + self.statement = statement + self.line = line + + def generate(self, writer): + # In case the previous block was empty + writer.write_line("pass", self.line) + writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1) + + +class _Statement(_Node): + def __init__(self, statement, line): + self.statement = statement + self.line = line + + def generate(self, writer): + writer.write_line(self.statement, self.line) + + +class _Expression(_Node): + def __init__(self, expression, line, raw=False): + self.expression = expression + self.line = line + self.raw = raw + + def generate(self, writer): + writer.write_line("_tt_tmp = %s" % self.expression, self.line) + writer.write_line("if isinstance(_tt_tmp, _tt_string_types):" + " _tt_tmp = _tt_utf8(_tt_tmp)", self.line) + writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line) + if not self.raw and writer.current_template.autoescape is not None: + # In python3 functions like xhtml_escape return unicode, + # so we have to convert to utf8 again. + writer.write_line("_tt_tmp = _tt_utf8(%s(_tt_tmp))" % + writer.current_template.autoescape, self.line) + writer.write_line("_tt_append(_tt_tmp)", self.line) + + +class _Module(_Expression): + def __init__(self, expression, line): + super(_Module, self).__init__("_tt_modules." + expression, line, + raw=True) + + +class _Text(_Node): + def __init__(self, value, line, whitespace): + self.value = value + self.line = line + self.whitespace = whitespace + + def generate(self, writer): + value = self.value + + # Compress whitespace if requested, with a crude heuristic to avoid + # altering preformatted whitespace. + if "
    " not in value:
    +            value = filter_whitespace(self.whitespace, value)
    +
    +        if value:
    +            writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
    +
    +
    +class ParseError(Exception):
    +    """Raised for template syntax errors.
    +
    +    ``ParseError`` instances have ``filename`` and ``lineno`` attributes
    +    indicating the position of the error.
    +
    +    .. versionchanged:: 4.3
    +       Added ``filename`` and ``lineno`` attributes.
    +    """
    +    def __init__(self, message, filename, lineno):
    +        self.message = message
    +        # The names "filename" and "lineno" are chosen for consistency
    +        # with python SyntaxError.
    +        self.filename = filename
    +        self.lineno = lineno
    +
    +    def __str__(self):
    +        return '%s at %s:%d' % (self.message, self.filename, self.lineno)
    +
    +
    +class _CodeWriter(object):
    +    def __init__(self, file, named_blocks, loader, current_template):
    +        self.file = file
    +        self.named_blocks = named_blocks
    +        self.loader = loader
    +        self.current_template = current_template
    +        self.apply_counter = 0
    +        self.include_stack = []
    +        self._indent = 0
    +
    +    def indent_size(self):
    +        return self._indent
    +
    +    def indent(self):
    +        class Indenter(object):
    +            def __enter__(_):
    +                self._indent += 1
    +                return self
    +
    +            def __exit__(_, *args):
    +                assert self._indent > 0
    +                self._indent -= 1
    +
    +        return Indenter()
    +
    +    def include(self, template, line):
    +        self.include_stack.append((self.current_template, line))
    +        self.current_template = template
    +
    +        class IncludeTemplate(object):
    +            def __enter__(_):
    +                return self
    +
    +            def __exit__(_, *args):
    +                self.current_template = self.include_stack.pop()[0]
    +
    +        return IncludeTemplate()
    +
    +    def write_line(self, line, line_number, indent=None):
    +        if indent is None:
    +            indent = self._indent
    +        line_comment = '  # %s:%d' % (self.current_template.name, line_number)
    +        if self.include_stack:
    +            ancestors = ["%s:%d" % (tmpl.name, lineno)
    +                         for (tmpl, lineno) in self.include_stack]
    +            line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
    +        print("    " * indent + line + line_comment, file=self.file)
    +
    +
    +class _TemplateReader(object):
    +    def __init__(self, name, text, whitespace):
    +        self.name = name
    +        self.text = text
    +        self.whitespace = whitespace
    +        self.line = 1
    +        self.pos = 0
    +
    +    def find(self, needle, start=0, end=None):
    +        assert start >= 0, start
    +        pos = self.pos
    +        start += pos
    +        if end is None:
    +            index = self.text.find(needle, start)
    +        else:
    +            end += pos
    +            assert end >= start
    +            index = self.text.find(needle, start, end)
    +        if index != -1:
    +            index -= pos
    +        return index
    +
    +    def consume(self, count=None):
    +        if count is None:
    +            count = len(self.text) - self.pos
    +        newpos = self.pos + count
    +        self.line += self.text.count("\n", self.pos, newpos)
    +        s = self.text[self.pos:newpos]
    +        self.pos = newpos
    +        return s
    +
    +    def remaining(self):
    +        return len(self.text) - self.pos
    +
    +    def __len__(self):
    +        return self.remaining()
    +
    +    def __getitem__(self, key):
    +        if type(key) is slice:
    +            size = len(self)
    +            start, stop, step = key.indices(size)
    +            if start is None:
    +                start = self.pos
    +            else:
    +                start += self.pos
    +            if stop is not None:
    +                stop += self.pos
    +            return self.text[slice(start, stop, step)]
    +        elif key < 0:
    +            return self.text[key]
    +        else:
    +            return self.text[self.pos + key]
    +
    +    def __str__(self):
    +        return self.text[self.pos:]
    +
    +    def raise_parse_error(self, msg):
    +        raise ParseError(msg, self.name, self.line)
    +
    +
    +def _format_code(code):
    +    lines = code.splitlines()
    +    format = "%%%dd  %%s\n" % len(repr(len(lines) + 1))
    +    return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
    +
    +
    +def _parse(reader, template, in_block=None, in_loop=None):
    +    body = _ChunkList([])
    +    while True:
    +        # Find next template directive
    +        curly = 0
    +        while True:
    +            curly = reader.find("{", curly)
    +            if curly == -1 or curly + 1 == reader.remaining():
    +                # EOF
    +                if in_block:
    +                    reader.raise_parse_error(
    +                        "Missing {%% end %%} block for %s" % in_block)
    +                body.chunks.append(_Text(reader.consume(), reader.line,
    +                                         reader.whitespace))
    +                return body
    +            # If the first curly brace is not the start of a special token,
    +            # start searching from the character after it
    +            if reader[curly + 1] not in ("{", "%", "#"):
    +                curly += 1
    +                continue
    +            # When there are more than 2 curlies in a row, use the
    +            # innermost ones.  This is useful when generating languages
    +            # like latex where curlies are also meaningful
    +            if (curly + 2 < reader.remaining() and
    +                    reader[curly + 1] == '{' and reader[curly + 2] == '{'):
    +                curly += 1
    +                continue
    +            break
    +
    +        # Append any text before the special token
    +        if curly > 0:
    +            cons = reader.consume(curly)
    +            body.chunks.append(_Text(cons, reader.line,
    +                                     reader.whitespace))
    +
    +        start_brace = reader.consume(2)
    +        line = reader.line
    +
    +        # Template directives may be escaped as "{{!" or "{%!".
    +        # In this case output the braces and consume the "!".
    +        # This is especially useful in conjunction with jquery templates,
    +        # which also use double braces.
    +        if reader.remaining() and reader[0] == "!":
    +            reader.consume(1)
    +            body.chunks.append(_Text(start_brace, line,
    +                                     reader.whitespace))
    +            continue
    +
    +        # Comment
    +        if start_brace == "{#":
    +            end = reader.find("#}")
    +            if end == -1:
    +                reader.raise_parse_error("Missing end comment #}")
    +            contents = reader.consume(end).strip()
    +            reader.consume(2)
    +            continue
    +
    +        # Expression
    +        if start_brace == "{{":
    +            end = reader.find("}}")
    +            if end == -1:
    +                reader.raise_parse_error("Missing end expression }}")
    +            contents = reader.consume(end).strip()
    +            reader.consume(2)
    +            if not contents:
    +                reader.raise_parse_error("Empty expression")
    +            body.chunks.append(_Expression(contents, line))
    +            continue
    +
    +        # Block
    +        assert start_brace == "{%", start_brace
    +        end = reader.find("%}")
    +        if end == -1:
    +            reader.raise_parse_error("Missing end block %}")
    +        contents = reader.consume(end).strip()
    +        reader.consume(2)
    +        if not contents:
    +            reader.raise_parse_error("Empty block tag ({% %})")
    +
    +        operator, space, suffix = contents.partition(" ")
    +        suffix = suffix.strip()
    +
    +        # Intermediate ("else", "elif", etc) blocks
    +        intermediate_blocks = {
    +            "else": set(["if", "for", "while", "try"]),
    +            "elif": set(["if"]),
    +            "except": set(["try"]),
    +            "finally": set(["try"]),
    +        }
    +        allowed_parents = intermediate_blocks.get(operator)
    +        if allowed_parents is not None:
    +            if not in_block:
    +                reader.raise_parse_error("%s outside %s block" %
    +                                         (operator, allowed_parents))
    +            if in_block not in allowed_parents:
    +                reader.raise_parse_error(
    +                    "%s block cannot be attached to %s block" %
    +                    (operator, in_block))
    +            body.chunks.append(_IntermediateControlBlock(contents, line))
    +            continue
    +
    +        # End tag
    +        elif operator == "end":
    +            if not in_block:
    +                reader.raise_parse_error("Extra {% end %} block")
    +            return body
    +
    +        elif operator in ("extends", "include", "set", "import", "from",
    +                          "comment", "autoescape", "whitespace", "raw",
    +                          "module"):
    +            if operator == "comment":
    +                continue
    +            if operator == "extends":
    +                suffix = suffix.strip('"').strip("'")
    +                if not suffix:
    +                    reader.raise_parse_error("extends missing file path")
    +                block = _ExtendsBlock(suffix)
    +            elif operator in ("import", "from"):
    +                if not suffix:
    +                    reader.raise_parse_error("import missing statement")
    +                block = _Statement(contents, line)
    +            elif operator == "include":
    +                suffix = suffix.strip('"').strip("'")
    +                if not suffix:
    +                    reader.raise_parse_error("include missing file path")
    +                block = _IncludeBlock(suffix, reader, line)
    +            elif operator == "set":
    +                if not suffix:
    +                    reader.raise_parse_error("set missing statement")
    +                block = _Statement(suffix, line)
    +            elif operator == "autoescape":
    +                fn = suffix.strip()
    +                if fn == "None":
    +                    fn = None
    +                template.autoescape = fn
    +                continue
    +            elif operator == "whitespace":
    +                mode = suffix.strip()
    +                # Validate the selected mode
    +                filter_whitespace(mode, '')
    +                reader.whitespace = mode
    +                continue
    +            elif operator == "raw":
    +                block = _Expression(suffix, line, raw=True)
    +            elif operator == "module":
    +                block = _Module(suffix, line)
    +            body.chunks.append(block)
    +            continue
    +
    +        elif operator in ("apply", "block", "try", "if", "for", "while"):
    +            # parse inner body recursively
    +            if operator in ("for", "while"):
    +                block_body = _parse(reader, template, operator, operator)
    +            elif operator == "apply":
    +                # apply creates a nested function so syntactically it's not
    +                # in the loop.
    +                block_body = _parse(reader, template, operator, None)
    +            else:
    +                block_body = _parse(reader, template, operator, in_loop)
    +
    +            if operator == "apply":
    +                if not suffix:
    +                    reader.raise_parse_error("apply missing method name")
    +                block = _ApplyBlock(suffix, line, block_body)
    +            elif operator == "block":
    +                if not suffix:
    +                    reader.raise_parse_error("block missing name")
    +                block = _NamedBlock(suffix, block_body, template, line)
    +            else:
    +                block = _ControlBlock(contents, line, block_body)
    +            body.chunks.append(block)
    +            continue
    +
    +        elif operator in ("break", "continue"):
    +            if not in_loop:
    +                reader.raise_parse_error("%s outside %s block" %
    +                                         (operator, set(["for", "while"])))
    +            body.chunks.append(_Statement(contents, line))
    +            continue
    +
    +        else:
    +            reader.raise_parse_error("unknown operator: %r" % operator)
    diff --git a/server/www/packages/packages-common/tornado/testing.py b/server/www/packages/packages-common/tornado/testing.py
    new file mode 100644
    index 0000000..54d76fe
    --- /dev/null
    +++ b/server/www/packages/packages-common/tornado/testing.py
    @@ -0,0 +1,735 @@
    +#!/usr/bin/env python
    +"""Support classes for automated testing.
    +
    +* `AsyncTestCase` and `AsyncHTTPTestCase`:  Subclasses of unittest.TestCase
    +  with additional support for testing asynchronous (`.IOLoop` based) code.
    +
    +* `ExpectLog` and `LogTrapTestCase`: Make test logs less spammy.
    +
    +* `main()`: A simple test runner (wrapper around unittest.main()) with support
    +  for the tornado.autoreload module to rerun the tests when code changes.
    +"""
    +
    +from __future__ import absolute_import, division, print_function, with_statement
    +
    +try:
    +    from tornado import gen
    +    from tornado.httpclient import AsyncHTTPClient
    +    from tornado.httpserver import HTTPServer
    +    from tornado.simple_httpclient import SimpleAsyncHTTPClient
    +    from tornado.ioloop import IOLoop, TimeoutError
    +    from tornado import netutil
    +    from tornado.process import Subprocess
    +except ImportError:
    +    # These modules are not importable on app engine.  Parts of this module
    +    # won't work, but e.g. LogTrapTestCase and main() will.
    +    AsyncHTTPClient = None
    +    gen = None
    +    HTTPServer = None
    +    IOLoop = None
    +    netutil = None
    +    SimpleAsyncHTTPClient = None
    +    Subprocess = None
    +from tornado.log import gen_log, app_log
    +from tornado.stack_context import ExceptionStackContext
    +from tornado.util import raise_exc_info, basestring_type
    +import functools
    +import inspect
    +import logging
    +import os
    +import re
    +import signal
    +import socket
    +import sys
    +
    +try:
    +    from cStringIO import StringIO  # py2
    +except ImportError:
    +    from io import StringIO  # py3
    +
    +try:
    +    from collections.abc import Generator as GeneratorType  # py35+
    +except ImportError:
    +    from types import GeneratorType
    +
    +if sys.version_info >= (3, 5):
    +    iscoroutine = inspect.iscoroutine
    +    iscoroutinefunction = inspect.iscoroutinefunction
    +else:
    +    iscoroutine = iscoroutinefunction = lambda f: False
    +
    +# Tornado's own test suite requires the updated unittest module
    +# (either py27+ or unittest2) so tornado.test.util enforces
    +# this requirement, but for other users of tornado.testing we want
    +# to allow the older version if unitest2 is not available.
    +if sys.version_info >= (3,):
    +    # On python 3, mixing unittest2 and unittest (including doctest)
    +    # doesn't seem to work, so always use unittest.
    +    import unittest
    +else:
    +    # On python 2, prefer unittest2 when available.
    +    try:
    +        import unittest2 as unittest
    +    except ImportError:
    +        import unittest
    +
    +_next_port = 10000
    +
    +
    +def get_unused_port():
    +    """Returns a (hopefully) unused port number.
    +
    +    This function does not guarantee that the port it returns is available,
    +    only that a series of get_unused_port calls in a single process return
    +    distinct ports.
    +
    +    .. deprecated::
    +       Use bind_unused_port instead, which is guaranteed to find an unused port.
    +    """
    +    global _next_port
    +    port = _next_port
    +    _next_port = _next_port + 1
    +    return port
    +
    +
    +def bind_unused_port(reuse_port=False):
    +    """Binds a server socket to an available port on localhost.
    +
    +    Returns a tuple (socket, port).
    +    """
    +    [sock] = netutil.bind_sockets(None, 'localhost', family=socket.AF_INET,
    +                                  reuse_port=reuse_port)
    +    port = sock.getsockname()[1]
    +    return sock, port
    +
    +
    +def get_async_test_timeout():
    +    """Get the global timeout setting for async tests.
    +
    +    Returns a float, the timeout in seconds.
    +
    +    .. versionadded:: 3.1
    +    """
    +    try:
    +        return float(os.environ.get('ASYNC_TEST_TIMEOUT'))
    +    except (ValueError, TypeError):
    +        return 5
    +
    +
    +class _TestMethodWrapper(object):
    +    """Wraps a test method to raise an error if it returns a value.
    +
    +    This is mainly used to detect undecorated generators (if a test
    +    method yields it must use a decorator to consume the generator),
    +    but will also detect other kinds of return values (these are not
    +    necessarily errors, but we alert anyway since there is no good
    +    reason to return a value from a test.
    +    """
    +    def __init__(self, orig_method):
    +        self.orig_method = orig_method
    +
    +    def __call__(self, *args, **kwargs):
    +        result = self.orig_method(*args, **kwargs)
    +        if isinstance(result, GeneratorType) or iscoroutine(result):
    +            raise TypeError("Generator and coroutine test methods should be"
    +                            " decorated with tornado.testing.gen_test")
    +        elif result is not None:
    +            raise ValueError("Return value from test method ignored: %r" %
    +                             result)
    +
    +    def __getattr__(self, name):
    +        """Proxy all unknown attributes to the original method.
    +
    +        This is important for some of the decorators in the `unittest`
    +        module, such as `unittest.skipIf`.
    +        """
    +        return getattr(self.orig_method, name)
    +
    +
    +class AsyncTestCase(unittest.TestCase):
    +    """`~unittest.TestCase` subclass for testing `.IOLoop`-based
    +    asynchronous code.
    +
    +    The unittest framework is synchronous, so the test must be
    +    complete by the time the test method returns.  This means that
    +    asynchronous code cannot be used in quite the same way as usual.
    +    To write test functions that use the same ``yield``-based patterns
    +    used with the `tornado.gen` module, decorate your test methods
    +    with `tornado.testing.gen_test` instead of
    +    `tornado.gen.coroutine`.  This class also provides the `stop()`
    +    and `wait()` methods for a more manual style of testing.  The test
    +    method itself must call ``self.wait()``, and asynchronous
    +    callbacks should call ``self.stop()`` to signal completion.
    +
    +    By default, a new `.IOLoop` is constructed for each test and is available
    +    as ``self.io_loop``.  This `.IOLoop` should be used in the construction of
    +    HTTP clients/servers, etc.  If the code being tested requires a
    +    global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
    +
    +    The `.IOLoop`'s ``start`` and ``stop`` methods should not be
    +    called directly.  Instead, use `self.stop ` and `self.wait
    +    `.  Arguments passed to ``self.stop`` are returned from
    +    ``self.wait``.  It is possible to have multiple ``wait``/``stop``
    +    cycles in the same test.
    +
    +    Example::
    +
    +        # This test uses coroutine style.
    +        class MyTestCase(AsyncTestCase):
    +            @tornado.testing.gen_test
    +            def test_http_fetch(self):
    +                client = AsyncHTTPClient(self.io_loop)
    +                response = yield client.fetch("http://www.tornadoweb.org")
    +                # Test contents of response
    +                self.assertIn("FriendFeed", response.body)
    +
    +        # This test uses argument passing between self.stop and self.wait.
    +        class MyTestCase2(AsyncTestCase):
    +            def test_http_fetch(self):
    +                client = AsyncHTTPClient(self.io_loop)
    +                client.fetch("http://www.tornadoweb.org/", self.stop)
    +                response = self.wait()
    +                # Test contents of response
    +                self.assertIn("FriendFeed", response.body)
    +
    +        # This test uses an explicit callback-based style.
    +        class MyTestCase3(AsyncTestCase):
    +            def test_http_fetch(self):
    +                client = AsyncHTTPClient(self.io_loop)
    +                client.fetch("http://www.tornadoweb.org/", self.handle_fetch)
    +                self.wait()
    +
    +            def handle_fetch(self, response):
    +                # Test contents of response (failures and exceptions here
    +                # will cause self.wait() to throw an exception and end the
    +                # test).
    +                # Exceptions thrown here are magically propagated to
    +                # self.wait() in test_http_fetch() via stack_context.
    +                self.assertIn("FriendFeed", response.body)
    +                self.stop()
    +    """
    +    def __init__(self, methodName='runTest', **kwargs):
    +        super(AsyncTestCase, self).__init__(methodName, **kwargs)
    +        self.__stopped = False
    +        self.__running = False
    +        self.__failure = None
    +        self.__stop_args = None
    +        self.__timeout = None
    +
    +        # It's easy to forget the @gen_test decorator, but if you do
    +        # the test will silently be ignored because nothing will consume
    +        # the generator.  Replace the test method with a wrapper that will
    +        # make sure it's not an undecorated generator.
    +        setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName)))
    +
    +    def setUp(self):
    +        super(AsyncTestCase, self).setUp()
    +        self.io_loop = self.get_new_ioloop()
    +        self.io_loop.make_current()
    +
    +    def tearDown(self):
    +        # Clean up Subprocess, so it can be used again with a new ioloop.
    +        Subprocess.uninitialize()
    +        self.io_loop.clear_current()
    +        if (not IOLoop.initialized() or
    +                self.io_loop is not IOLoop.instance()):
    +            # Try to clean up any file descriptors left open in the ioloop.
    +            # This avoids leaks, especially when tests are run repeatedly
    +            # in the same process with autoreload (because curl does not
    +            # set FD_CLOEXEC on its file descriptors)
    +            self.io_loop.close(all_fds=True)
    +        super(AsyncTestCase, self).tearDown()
    +        # In case an exception escaped or the StackContext caught an exception
    +        # when there wasn't a wait() to re-raise it, do so here.
    +        # This is our last chance to raise an exception in a way that the
    +        # unittest machinery understands.
    +        self.__rethrow()
    +
    +    def get_new_ioloop(self):
    +        """Creates a new `.IOLoop` for this test.  May be overridden in
    +        subclasses for tests that require a specific `.IOLoop` (usually
    +        the singleton `.IOLoop.instance()`).
    +        """
    +        return IOLoop()
    +
    +    def _handle_exception(self, typ, value, tb):
    +        if self.__failure is None:
    +            self.__failure = (typ, value, tb)
    +        else:
    +            app_log.error("multiple unhandled exceptions in test",
    +                          exc_info=(typ, value, tb))
    +        self.stop()
    +        return True
    +
    +    def __rethrow(self):
    +        if self.__failure is not None:
    +            failure = self.__failure
    +            self.__failure = None
    +            raise_exc_info(failure)
    +
    +    def run(self, result=None):
    +        with ExceptionStackContext(self._handle_exception):
    +            super(AsyncTestCase, self).run(result)
    +        # As a last resort, if an exception escaped super.run() and wasn't
    +        # re-raised in tearDown, raise it here.  This will cause the
    +        # unittest run to fail messily, but that's better than silently
    +        # ignoring an error.
    +        self.__rethrow()
    +
    +    def stop(self, _arg=None, **kwargs):
    +        """Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
    +        to return.
    +
    +        Keyword arguments or a single positional argument passed to `stop()` are
    +        saved and will be returned by `wait()`.
    +        """
    +        assert _arg is None or not kwargs
    +        self.__stop_args = kwargs or _arg
    +        if self.__running:
    +            self.io_loop.stop()
    +            self.__running = False
    +        self.__stopped = True
    +
    +    def wait(self, condition=None, timeout=None):
    +        """Runs the `.IOLoop` until stop is called or timeout has passed.
    +
    +        In the event of a timeout, an exception will be thrown. The
    +        default timeout is 5 seconds; it may be overridden with a
    +        ``timeout`` keyword argument or globally with the
    +        ``ASYNC_TEST_TIMEOUT`` environment variable.
    +
    +        If ``condition`` is not None, the `.IOLoop` will be restarted
    +        after `stop()` until ``condition()`` returns true.
    +
    +        .. versionchanged:: 3.1
    +           Added the ``ASYNC_TEST_TIMEOUT`` environment variable.
    +        """
    +        if timeout is None:
    +            timeout = get_async_test_timeout()
    +
    +        if not self.__stopped:
    +            if timeout:
    +                def timeout_func():
    +                    try:
    +                        raise self.failureException(
    +                            'Async operation timed out after %s seconds' %
    +                            timeout)
    +                    except Exception:
    +                        self.__failure = sys.exc_info()
    +                    self.stop()
    +                self.__timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, timeout_func)
    +            while True:
    +                self.__running = True
    +                self.io_loop.start()
    +                if (self.__failure is not None or
    +                        condition is None or condition()):
    +                    break
    +            if self.__timeout is not None:
    +                self.io_loop.remove_timeout(self.__timeout)
    +                self.__timeout = None
    +        assert self.__stopped
    +        self.__stopped = False
    +        self.__rethrow()
    +        result = self.__stop_args
    +        self.__stop_args = None
    +        return result
    +
    +
    +class AsyncHTTPTestCase(AsyncTestCase):
    +    """A test case that starts up an HTTP server.
    +
    +    Subclasses must override `get_app()`, which returns the
    +    `tornado.web.Application` (or other `.HTTPServer` callback) to be tested.
    +    Tests will typically use the provided ``self.http_client`` to fetch
    +    URLs from this server.
    +
    +    Example, assuming the "Hello, world" example from the user guide is in
    +    ``hello.py``::
    +
    +        import hello
    +
    +        class TestHelloApp(AsyncHTTPTestCase):
    +            def get_app(self):
    +                return hello.make_app()
    +
    +            def test_homepage(self):
    +                response = self.fetch('/')
    +                self.assertEqual(response.code, 200)
    +                self.assertEqual(response.body, 'Hello, world')
    +
    +    That call to ``self.fetch()`` is equivalent to ::
    +
    +        self.http_client.fetch(self.get_url('/'), self.stop)
    +        response = self.wait()
    +
    +    which illustrates how AsyncTestCase can turn an asynchronous operation,
    +    like ``http_client.fetch()``, into a synchronous operation. If you need
    +    to do other asynchronous operations in tests, you'll probably need to use
    +    ``stop()`` and ``wait()`` yourself.
    +    """
    +    def setUp(self):
    +        super(AsyncHTTPTestCase, self).setUp()
    +        sock, port = bind_unused_port()
    +        self.__port = port
    +
    +        self.http_client = self.get_http_client()
    +        self._app = self.get_app()
    +        self.http_server = self.get_http_server()
    +        self.http_server.add_sockets([sock])
    +
    +    def get_http_client(self):
    +        return AsyncHTTPClient(io_loop=self.io_loop)
    +
    +    def get_http_server(self):
    +        return HTTPServer(self._app, io_loop=self.io_loop,
    +                          **self.get_httpserver_options())
    +
    +    def get_app(self):
    +        """Should be overridden by subclasses to return a
    +        `tornado.web.Application` or other `.HTTPServer` callback.
    +        """
    +        raise NotImplementedError()
    +
    +    def fetch(self, path, **kwargs):
    +        """Convenience method to synchronously fetch a url.
    +
    +        The given path will be appended to the local server's host and
    +        port.  Any additional kwargs will be passed directly to
    +        `.AsyncHTTPClient.fetch` (and so could be used to pass
    +        ``method="POST"``, ``body="..."``, etc).
    +        """
    +        self.http_client.fetch(self.get_url(path), self.stop, **kwargs)
    +        return self.wait()
    +
    +    def get_httpserver_options(self):
    +        """May be overridden by subclasses to return additional
    +        keyword arguments for the server.
    +        """
    +        return {}
    +
    +    def get_http_port(self):
    +        """Returns the port used by the server.
    +
    +        A new port is chosen for each test.
    +        """
    +        return self.__port
    +
    +    def get_protocol(self):
    +        return 'http'
    +
    +    def get_url(self, path):
    +        """Returns an absolute url for the given path on the test server."""
    +        return '%s://localhost:%s%s' % (self.get_protocol(),
    +                                        self.get_http_port(), path)
    +
    +    def tearDown(self):
    +        self.http_server.stop()
    +        self.io_loop.run_sync(self.http_server.close_all_connections,
    +                              timeout=get_async_test_timeout())
    +        if (not IOLoop.initialized() or
    +                self.http_client.io_loop is not IOLoop.instance()):
    +            self.http_client.close()
    +        super(AsyncHTTPTestCase, self).tearDown()
    +
    +
    +class AsyncHTTPSTestCase(AsyncHTTPTestCase):
    +    """A test case that starts an HTTPS server.
    +
    +    Interface is generally the same as `AsyncHTTPTestCase`.
    +    """
    +    def get_http_client(self):
    +        return AsyncHTTPClient(io_loop=self.io_loop, force_instance=True,
    +                               defaults=dict(validate_cert=False))
    +
    +    def get_httpserver_options(self):
    +        return dict(ssl_options=self.get_ssl_options())
    +
    +    def get_ssl_options(self):
    +        """May be overridden by subclasses to select SSL options.
    +
    +        By default includes a self-signed testing certificate.
    +        """
    +        # Testing keys were generated with:
    +        # openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
    +        module_dir = os.path.dirname(__file__)
    +        return dict(
    +            certfile=os.path.join(module_dir, 'test', 'test.crt'),
    +            keyfile=os.path.join(module_dir, 'test', 'test.key'))
    +
    +    def get_protocol(self):
    +        return 'https'
    +
    +
    +def gen_test(func=None, timeout=None):
    +    """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.
    +
    +    ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
    +    already running.  ``@gen_test`` should be applied to test methods
    +    on subclasses of `AsyncTestCase`.
    +
    +    Example::
    +
    +        class MyTest(AsyncHTTPTestCase):
    +            @gen_test
    +            def test_something(self):
    +                response = yield gen.Task(self.fetch('/'))
    +
    +    By default, ``@gen_test`` times out after 5 seconds. The timeout may be
    +    overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
    +    or for each test with the ``timeout`` keyword argument::
    +
    +        class MyTest(AsyncHTTPTestCase):
    +            @gen_test(timeout=10)
    +            def test_something_slow(self):
    +                response = yield gen.Task(self.fetch('/'))
    +
    +    .. versionadded:: 3.1
    +       The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
    +       variable.
    +
    +    .. versionchanged:: 4.0
    +       The wrapper now passes along ``*args, **kwargs`` so it can be used
    +       on functions with arguments.
    +    """
    +    if timeout is None:
    +        timeout = get_async_test_timeout()
    +
    +    def wrap(f):
    +        # Stack up several decorators to allow us to access the generator
    +        # object itself.  In the innermost wrapper, we capture the generator
    +        # and save it in an attribute of self.  Next, we run the wrapped
    +        # function through @gen.coroutine.  Finally, the coroutine is
    +        # wrapped again to make it synchronous with run_sync.
    +        #
    +        # This is a good case study arguing for either some sort of
    +        # extensibility in the gen decorators or cancellation support.
    +        @functools.wraps(f)
    +        def pre_coroutine(self, *args, **kwargs):
    +            result = f(self, *args, **kwargs)
    +            if isinstance(result, GeneratorType) or iscoroutine(result):
    +                self._test_generator = result
    +            else:
    +                self._test_generator = None
    +            return result
    +
    +        if iscoroutinefunction(f):
    +            coro = pre_coroutine
    +        else:
    +            coro = gen.coroutine(pre_coroutine)
    +
    +        @functools.wraps(coro)
    +        def post_coroutine(self, *args, **kwargs):
    +            try:
    +                return self.io_loop.run_sync(
    +                    functools.partial(coro, self, *args, **kwargs),
    +                    timeout=timeout)
    +            except TimeoutError as e:
    +                # run_sync raises an error with an unhelpful traceback.
    +                # Throw it back into the generator or coroutine so the stack
    +                # trace is replaced by the point where the test is stopped.
    +                self._test_generator.throw(e)
    +                # In case the test contains an overly broad except clause,
    +                # we may get back here.  In this case re-raise the original
    +                # exception, which is better than nothing.
    +                raise
    +        return post_coroutine
    +
    +    if func is not None:
    +        # Used like:
    +        #     @gen_test
    +        #     def f(self):
    +        #         pass
    +        return wrap(func)
    +    else:
    +        # Used like @gen_test(timeout=10)
    +        return wrap
    +
    +
    +# Without this attribute, nosetests will try to run gen_test as a test
    +# anywhere it is imported.
    +gen_test.__test__ = False
    +
    +
    +class LogTrapTestCase(unittest.TestCase):
    +    """A test case that captures and discards all logging output
    +    if the test passes.
    +
    +    Some libraries can produce a lot of logging output even when
    +    the test succeeds, so this class can be useful to minimize the noise.
    +    Simply use it as a base class for your test case.  It is safe to combine
    +    with AsyncTestCase via multiple inheritance
    +    (``class MyTestCase(AsyncHTTPTestCase, LogTrapTestCase):``)
    +
    +    This class assumes that only one log handler is configured and
    +    that it is a `~logging.StreamHandler`.  This is true for both
    +    `logging.basicConfig` and the "pretty logging" configured by
    +    `tornado.options`.  It is not compatible with other log buffering
    +    mechanisms, such as those provided by some test runners.
    +
    +    .. deprecated:: 4.1
    +       Use the unittest module's ``--buffer`` option instead, or `.ExpectLog`.
    +    """
    +    def run(self, result=None):
    +        logger = logging.getLogger()
    +        if not logger.handlers:
    +            logging.basicConfig()
    +        handler = logger.handlers[0]
    +        if (len(logger.handlers) > 1 or
    +                not isinstance(handler, logging.StreamHandler)):
    +            # Logging has been configured in a way we don't recognize,
    +            # so just leave it alone.
    +            super(LogTrapTestCase, self).run(result)
    +            return
    +        old_stream = handler.stream
    +        try:
    +            handler.stream = StringIO()
    +            gen_log.info("RUNNING TEST: " + str(self))
    +            old_error_count = len(result.failures) + len(result.errors)
    +            super(LogTrapTestCase, self).run(result)
    +            new_error_count = len(result.failures) + len(result.errors)
    +            if new_error_count != old_error_count:
    +                old_stream.write(handler.stream.getvalue())
    +        finally:
    +            handler.stream = old_stream
    +
    +
    +class ExpectLog(logging.Filter):
    +    """Context manager to capture and suppress expected log output.
    +
    +    Useful to make tests of error conditions less noisy, while still
    +    leaving unexpected log entries visible.  *Not thread safe.*
    +
    +    The attribute ``logged_stack`` is set to true if any exception
    +    stack trace was logged.
    +
    +    Usage::
    +
    +        with ExpectLog('tornado.application', "Uncaught exception"):
    +            error_response = self.fetch("/some_page")
    +
    +    .. versionchanged:: 4.3
    +       Added the ``logged_stack`` attribute.
    +    """
    +    def __init__(self, logger, regex, required=True):
    +        """Constructs an ExpectLog context manager.
    +
    +        :param logger: Logger object (or name of logger) to watch.  Pass
    +            an empty string to watch the root logger.
    +        :param regex: Regular expression to match.  Any log entries on
    +            the specified logger that match this regex will be suppressed.
    +        :param required: If true, an exeption will be raised if the end of
    +            the ``with`` statement is reached without matching any log entries.
    +        """
    +        if isinstance(logger, basestring_type):
    +            logger = logging.getLogger(logger)
    +        self.logger = logger
    +        self.regex = re.compile(regex)
    +        self.required = required
    +        self.matched = False
    +        self.logged_stack = False
    +
    +    def filter(self, record):
    +        if record.exc_info:
    +            self.logged_stack = True
    +        message = record.getMessage()
    +        if self.regex.match(message):
    +            self.matched = True
    +            return False
    +        return True
    +
    +    def __enter__(self):
    +        self.logger.addFilter(self)
    +        return self
    +
    +    def __exit__(self, typ, value, tb):
    +        self.logger.removeFilter(self)
    +        if not typ and self.required and not self.matched:
    +            raise Exception("did not get expected log message")
    +
    +
    +def main(**kwargs):
    +    """A simple test runner.
    +
    +    This test runner is essentially equivalent to `unittest.main` from
    +    the standard library, but adds support for tornado-style option
    +    parsing and log formatting.
    +
    +    The easiest way to run a test is via the command line::
    +
    +        python -m tornado.testing tornado.test.stack_context_test
    +
    +    See the standard library unittest module for ways in which tests can
    +    be specified.
    +
    +    Projects with many tests may wish to define a test script like
    +    ``tornado/test/runtests.py``.  This script should define a method
    +    ``all()`` which returns a test suite and then call
    +    `tornado.testing.main()`.  Note that even when a test script is
    +    used, the ``all()`` test suite may be overridden by naming a
    +    single test on the command line::
    +
    +        # Runs all tests
    +        python -m tornado.test.runtests
    +        # Runs one test
    +        python -m tornado.test.runtests tornado.test.stack_context_test
    +
    +    Additional keyword arguments passed through to ``unittest.main()``.
    +    For example, use ``tornado.testing.main(verbosity=2)``
    +    to show many test details as they are run.
    +    See http://docs.python.org/library/unittest.html#unittest.main
    +    for full argument list.
    +    """
    +    from tornado.options import define, options, parse_command_line
    +
    +    define('exception_on_interrupt', type=bool, default=True,
    +           help=("If true (default), ctrl-c raises a KeyboardInterrupt "
    +                 "exception.  This prints a stack trace but cannot interrupt "
    +                 "certain operations.  If false, the process is more reliably "
    +                 "killed, but does not print a stack trace."))
    +
    +    # support the same options as unittest's command-line interface
    +    define('verbose', type=bool)
    +    define('quiet', type=bool)
    +    define('failfast', type=bool)
    +    define('catch', type=bool)
    +    define('buffer', type=bool)
    +
    +    argv = [sys.argv[0]] + parse_command_line(sys.argv)
    +
    +    if not options.exception_on_interrupt:
    +        signal.signal(signal.SIGINT, signal.SIG_DFL)
    +
    +    if options.verbose is not None:
    +        kwargs['verbosity'] = 2
    +    if options.quiet is not None:
    +        kwargs['verbosity'] = 0
    +    if options.failfast is not None:
    +        kwargs['failfast'] = True
    +    if options.catch is not None:
    +        kwargs['catchbreak'] = True
    +    if options.buffer is not None:
    +        kwargs['buffer'] = True
    +
    +    if __name__ == '__main__' and len(argv) == 1:
    +        print("No tests specified", file=sys.stderr)
    +        sys.exit(1)
    +    try:
    +        # In order to be able to run tests by their fully-qualified name
    +        # on the command line without importing all tests here,
    +        # module must be set to None.  Python 3.2's unittest.main ignores
    +        # defaultTest if no module is given (it tries to do its own
    +        # test discovery, which is incompatible with auto2to3), so don't
    +        # set module if we're not asking for a specific test.
    +        if len(argv) > 1:
    +            unittest.main(module=None, argv=argv, **kwargs)
    +        else:
    +            unittest.main(defaultTest="all", argv=argv, **kwargs)
    +    except SystemExit as e:
    +        if e.code == 0:
    +            gen_log.info('PASS')
    +        else:
    +            gen_log.error('FAIL')
    +        raise
    +
    +if __name__ == '__main__':
    +    main()
    diff --git a/server/www/packages/packages-common/tornado/util.py b/server/www/packages/packages-common/tornado/util.py
    new file mode 100644
    index 0000000..a67ddf5
    --- /dev/null
    +++ b/server/www/packages/packages-common/tornado/util.py
    @@ -0,0 +1,387 @@
    +"""Miscellaneous utility functions and classes.
    +
    +This module is used internally by Tornado.  It is not necessarily expected
    +that the functions and classes defined here will be useful to other
    +applications, but they are documented here in case they are.
    +
    +The one public-facing part of this module is the `Configurable` class
    +and its `~Configurable.configure` method, which becomes a part of the
    +interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`,
    +and `.Resolver`.
    +"""
    +
    +from __future__ import absolute_import, division, print_function, with_statement
    +
    +import array
    +import os
    +import sys
    +import zlib
    +
    +
    +try:
    +    xrange  # py2
    +except NameError:
    +    xrange = range  # py3
    +
    +# inspect.getargspec() raises DeprecationWarnings in Python 3.5.
    +# The two functions have compatible interfaces for the parts we need.
    +try:
    +    from inspect import getfullargspec as getargspec  # py3
    +except ImportError:
    +    from inspect import getargspec  # py2
    +
    +
    +class ObjectDict(dict):
    +    """Makes a dictionary behave like an object, with attribute-style access.
    +    """
    +    def __getattr__(self, name):
    +        try:
    +            return self[name]
    +        except KeyError:
    +            raise AttributeError(name)
    +
    +    def __setattr__(self, name, value):
    +        self[name] = value
    +
    +
    +class GzipDecompressor(object):
    +    """Streaming gzip decompressor.
    +
    +    The interface is like that of `zlib.decompressobj` (without some of the
    +    optional arguments, but it understands gzip headers and checksums.
    +    """
    +    def __init__(self):
    +        # Magic parameter makes zlib module understand gzip header
    +        # http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
    +        # This works on cpython and pypy, but not jython.
    +        self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS)
    +
    +    def decompress(self, value, max_length=None):
    +        """Decompress a chunk, returning newly-available data.
    +
    +        Some data may be buffered for later processing; `flush` must
    +        be called when there is no more input data to ensure that
    +        all data was processed.
    +
    +        If ``max_length`` is given, some input data may be left over
    +        in ``unconsumed_tail``; you must retrieve this value and pass
    +        it back to a future call to `decompress` if it is not empty.
    +        """
    +        return self.decompressobj.decompress(value, max_length)
    +
    +    @property
    +    def unconsumed_tail(self):
    +        """Returns the unconsumed portion left over
    +        """
    +        return self.decompressobj.unconsumed_tail
    +
    +    def flush(self):
    +        """Return any remaining buffered data not yet returned by decompress.
    +
    +        Also checks for errors such as truncated input.
    +        No other methods may be called on this object after `flush`.
    +        """
    +        return self.decompressobj.flush()
    +
    +
    +# Fake unicode literal support:  Python 3.2 doesn't have the u'' marker for
    +# literal strings, and alternative solutions like "from __future__ import
    +# unicode_literals" have other problems (see PEP 414).  u() can be applied
    +# to ascii strings that include \u escapes (but they must not contain
    +# literal non-ascii characters).
    +if not isinstance(b'', type('')):
    +    def u(s):
    +        return s
    +    unicode_type = str
    +    basestring_type = str
    +else:
    +    def u(s):
    +        return s.decode('unicode_escape')
    +    # These names don't exist in py3, so use noqa comments to disable
    +    # warnings in flake8.
    +    unicode_type = unicode  # noqa
    +    basestring_type = basestring  # noqa
    +
    +
    +def import_object(name):
    +    """Imports an object by name.
    +
    +    import_object('x') is equivalent to 'import x'.
    +    import_object('x.y.z') is equivalent to 'from x.y import z'.
    +
    +    >>> import tornado.escape
    +    >>> import_object('tornado.escape') is tornado.escape
    +    True
    +    >>> import_object('tornado.escape.utf8') is tornado.escape.utf8
    +    True
    +    >>> import_object('tornado') is tornado
    +    True
    +    >>> import_object('tornado.missing_module')
    +    Traceback (most recent call last):
    +        ...
    +    ImportError: No module named missing_module
    +    """
    +    if isinstance(name, unicode_type) and str is not unicode_type:
    +        # On python 2 a byte string is required.
    +        name = name.encode('utf-8')
    +    if name.count('.') == 0:
    +        return __import__(name, None, None)
    +
    +    parts = name.split('.')
    +    obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
    +    try:
    +        return getattr(obj, parts[-1])
    +    except AttributeError:
    +        raise ImportError("No module named %s" % parts[-1])
    +
    +
    +# Deprecated alias that was used before we dropped py25 support.
    +# Left here in case anyone outside Tornado is using it.
    +bytes_type = bytes
    +
    +if sys.version_info > (3,):
    +    exec("""
    +def raise_exc_info(exc_info):
    +    raise exc_info[1].with_traceback(exc_info[2])
    +
    +def exec_in(code, glob, loc=None):
    +    if isinstance(code, str):
    +        code = compile(code, '', 'exec', dont_inherit=True)
    +    exec(code, glob, loc)
    +""")
    +else:
    +    exec("""
    +def raise_exc_info(exc_info):
    +    raise exc_info[0], exc_info[1], exc_info[2]
    +
    +def exec_in(code, glob, loc=None):
    +    if isinstance(code, basestring):
    +        # exec(string) inherits the caller's future imports; compile
    +        # the string first to prevent that.
    +        code = compile(code, '', 'exec', dont_inherit=True)
    +    exec code in glob, loc
    +""")
    +
    +
    +def errno_from_exception(e):
    +    """Provides the errno from an Exception object.
    +
    +    There are cases that the errno attribute was not set so we pull
    +    the errno out of the args but if someone instantiates an Exception
    +    without any args you will get a tuple error. So this function
    +    abstracts all that behavior to give you a safe way to get the
    +    errno.
    +    """
    +
    +    if hasattr(e, 'errno'):
    +        return e.errno
    +    elif e.args:
    +        return e.args[0]
    +    else:
    +        return None
    +
    +
    +class Configurable(object):
    +    """Base class for configurable interfaces.
    +
    +    A configurable interface is an (abstract) class whose constructor
    +    acts as a factory function for one of its implementation subclasses.
    +    The implementation subclass as well as optional keyword arguments to
    +    its initializer can be set globally at runtime with `configure`.
    +
    +    By using the constructor as the factory method, the interface
    +    looks like a normal class, `isinstance` works as usual, etc.  This
    +    pattern is most useful when the choice of implementation is likely
    +    to be a global decision (e.g. when `~select.epoll` is available,
    +    always use it instead of `~select.select`), or when a
    +    previously-monolithic class has been split into specialized
    +    subclasses.
    +
    +    Configurable subclasses must define the class methods
    +    `configurable_base` and `configurable_default`, and use the instance
    +    method `initialize` instead of ``__init__``.
    +    """
    +    __impl_class = None
    +    __impl_kwargs = None
    +
    +    def __new__(cls, *args, **kwargs):
    +        base = cls.configurable_base()
    +        init_kwargs = {}
    +        if cls is base:
    +            impl = cls.configured_class()
    +            if base.__impl_kwargs:
    +                init_kwargs.update(base.__impl_kwargs)
    +        else:
    +            impl = cls
    +        init_kwargs.update(kwargs)
    +        instance = super(Configurable, cls).__new__(impl)
    +        # initialize vs __init__ chosen for compatibility with AsyncHTTPClient
    +        # singleton magic.  If we get rid of that we can switch to __init__
    +        # here too.
    +        instance.initialize(*args, **init_kwargs)
    +        return instance
    +
    +    @classmethod
    +    def configurable_base(cls):
    +        """Returns the base class of a configurable hierarchy.
    +
    +        This will normally return the class in which it is defined.
    +        (which is *not* necessarily the same as the cls classmethod parameter).
    +        """
    +        raise NotImplementedError()
    +
    +    @classmethod
    +    def configurable_default(cls):
    +        """Returns the implementation class to be used if none is configured."""
    +        raise NotImplementedError()
    +
    +    def initialize(self):
    +        """Initialize a `Configurable` subclass instance.
    +
    +        Configurable classes should use `initialize` instead of ``__init__``.
    +
    +        .. versionchanged:: 4.2
    +           Now accepts positional arguments in addition to keyword arguments.
    +        """
    +
    +    @classmethod
    +    def configure(cls, impl, **kwargs):
    +        """Sets the class to use when the base class is instantiated.
    +
    +        Keyword arguments will be saved and added to the arguments passed
    +        to the constructor.  This can be used to set global defaults for
    +        some parameters.
    +        """
    +        base = cls.configurable_base()
    +        if isinstance(impl, (unicode_type, bytes)):
    +            impl = import_object(impl)
    +        if impl is not None and not issubclass(impl, cls):
    +            raise ValueError("Invalid subclass of %s" % cls)
    +        base.__impl_class = impl
    +        base.__impl_kwargs = kwargs
    +
    +    @classmethod
    +    def configured_class(cls):
    +        """Returns the currently configured class."""
    +        base = cls.configurable_base()
    +        if cls.__impl_class is None:
    +            base.__impl_class = cls.configurable_default()
    +        return base.__impl_class
    +
    +    @classmethod
    +    def _save_configuration(cls):
    +        base = cls.configurable_base()
    +        return (base.__impl_class, base.__impl_kwargs)
    +
    +    @classmethod
    +    def _restore_configuration(cls, saved):
    +        base = cls.configurable_base()
    +        base.__impl_class = saved[0]
    +        base.__impl_kwargs = saved[1]
    +
    +
    +class ArgReplacer(object):
    +    """Replaces one value in an ``args, kwargs`` pair.
    +
    +    Inspects the function signature to find an argument by name
    +    whether it is passed by position or keyword.  For use in decorators
    +    and similar wrappers.
    +    """
    +    def __init__(self, func, name):
    +        self.name = name
    +        try:
    +            self.arg_pos = self._getargnames(func).index(name)
    +        except ValueError:
    +            # Not a positional parameter
    +            self.arg_pos = None
    +
    +    def _getargnames(self, func):
    +        try:
    +            return getargspec(func).args
    +        except TypeError:
    +            if hasattr(func, 'func_code'):
    +                # Cython-generated code has all the attributes needed
    +                # by inspect.getargspec, but the inspect module only
    +                # works with ordinary functions. Inline the portion of
    +                # getargspec that we need here. Note that for static
    +                # functions the @cython.binding(True) decorator must
    +                # be used (for methods it works out of the box).
    +                code = func.func_code
    +                return code.co_varnames[:code.co_argcount]
    +            raise
    +
    +    def get_old_value(self, args, kwargs, default=None):
    +        """Returns the old value of the named argument without replacing it.
    +
    +        Returns ``default`` if the argument is not present.
    +        """
    +        if self.arg_pos is not None and len(args) > self.arg_pos:
    +            return args[self.arg_pos]
    +        else:
    +            return kwargs.get(self.name, default)
    +
    +    def replace(self, new_value, args, kwargs):
    +        """Replace the named argument in ``args, kwargs`` with ``new_value``.
    +
    +        Returns ``(old_value, args, kwargs)``.  The returned ``args`` and
    +        ``kwargs`` objects may not be the same as the input objects, or
    +        the input objects may be mutated.
    +
    +        If the named argument was not found, ``new_value`` will be added
    +        to ``kwargs`` and None will be returned as ``old_value``.
    +        """
    +        if self.arg_pos is not None and len(args) > self.arg_pos:
    +            # The arg to replace is passed positionally
    +            old_value = args[self.arg_pos]
    +            args = list(args)  # *args is normally a tuple
    +            args[self.arg_pos] = new_value
    +        else:
    +            # The arg to replace is either omitted or passed by keyword.
    +            old_value = kwargs.get(self.name)
    +            kwargs[self.name] = new_value
    +        return old_value, args, kwargs
    +
    +
    +def timedelta_to_seconds(td):
    +    """Equivalent to td.total_seconds() (introduced in python 2.7)."""
    +    return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
    +
    +
    +def _websocket_mask_python(mask, data):
    +    """Websocket masking function.
    +
    +    `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length.
    +    Returns a `bytes` object of the same length as `data` with the mask applied
    +    as specified in section 5.3 of RFC 6455.
    +
    +    This pure-python implementation may be replaced by an optimized version when available.
    +    """
    +    mask = array.array("B", mask)
    +    unmasked = array.array("B", data)
    +    for i in xrange(len(data)):
    +        unmasked[i] = unmasked[i] ^ mask[i % 4]
    +    if hasattr(unmasked, 'tobytes'):
    +        # tostring was deprecated in py32.  It hasn't been removed,
    +        # but since we turn on deprecation warnings in our tests
    +        # we need to use the right one.
    +        return unmasked.tobytes()
    +    else:
    +        return unmasked.tostring()
    +
    +if (os.environ.get('TORNADO_NO_EXTENSION') or
    +        os.environ.get('TORNADO_EXTENSION') == '0'):
    +    # These environment variables exist to make it easier to do performance
    +    # comparisons; they are not guaranteed to remain supported in the future.
    +    _websocket_mask = _websocket_mask_python
    +else:
    +    try:
    +        from tornado.speedups import websocket_mask as _websocket_mask
    +    except ImportError:
    +        if os.environ.get('TORNADO_EXTENSION') == '1':
    +            raise
    +        _websocket_mask = _websocket_mask_python
    +
    +
    +def doctests():
    +    import doctest
    +    return doctest.DocTestSuite()
    diff --git a/server/www/packages/packages-common/tornado/web.py b/server/www/packages/packages-common/tornado/web.py
    new file mode 100644
    index 0000000..1c2ac8c
    --- /dev/null
    +++ b/server/www/packages/packages-common/tornado/web.py
    @@ -0,0 +1,3279 @@
    +#!/usr/bin/env python
    +#
    +# Copyright 2009 Facebook
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License"); you may
    +# not use this file except in compliance with the License. You may obtain
    +# a copy of the License at
    +#
    +#     http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    +# License for the specific language governing permissions and limitations
    +# under the License.
    +
    +"""``tornado.web`` provides a simple web framework with asynchronous
    +features that allow it to scale to large numbers of open connections,
    +making it ideal for `long polling
    +`_.
    +
    +Here is a simple "Hello, world" example app:
    +
    +.. testcode::
    +
    +    import tornado.ioloop
    +    import tornado.web
    +
    +    class MainHandler(tornado.web.RequestHandler):
    +        def get(self):
    +            self.write("Hello, world")
    +
    +    if __name__ == "__main__":
    +        application = tornado.web.Application([
    +            (r"/", MainHandler),
    +        ])
    +        application.listen(8888)
    +        tornado.ioloop.IOLoop.current().start()
    +
    +.. testoutput::
    +   :hide:
    +
    +
    +See the :doc:`guide` for additional information.
    +
    +Thread-safety notes
    +-------------------
    +
    +In general, methods on `RequestHandler` and elsewhere in Tornado are
    +not thread-safe.  In particular, methods such as
    +`~RequestHandler.write()`, `~RequestHandler.finish()`, and
    +`~RequestHandler.flush()` must only be called from the main thread.  If
    +you use multiple threads it is important to use `.IOLoop.add_callback`
    +to transfer control back to the main thread before finishing the
    +request.
    +
    +"""
    +
    +from __future__ import absolute_import, division, print_function, with_statement
    +
    +import base64
    +import binascii
    +import datetime
    +import email.utils
    +import functools
    +import gzip
    +import hashlib
    +import hmac
    +import mimetypes
    +import numbers
    +import os.path
    +import re
    +import stat
    +import sys
    +import threading
    +import time
    +import tornado
    +import traceback
    +import types
    +from io import BytesIO
    +
    +from tornado.concurrent import Future
    +from tornado import escape
    +from tornado import gen
    +from tornado import httputil
    +from tornado import iostream
    +from tornado import locale
    +from tornado.log import access_log, app_log, gen_log
    +from tornado import stack_context
    +from tornado import template
    +from tornado.escape import utf8, _unicode
    +from tornado.util import (import_object, ObjectDict, raise_exc_info,
    +                          unicode_type, _websocket_mask)
    +from tornado.httputil import split_host_and_port
    +
    +
    +try:
    +    import Cookie  # py2
    +except ImportError:
    +    import http.cookies as Cookie  # py3
    +
    +try:
    +    import urlparse  # py2
    +except ImportError:
    +    import urllib.parse as urlparse  # py3
    +
    +try:
    +    from urllib import urlencode  # py2
    +except ImportError:
    +    from urllib.parse import urlencode  # py3
    +
    +
    +MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
    +"""The oldest signed value version supported by this version of Tornado.
    +
    +Signed values older than this version cannot be decoded.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
    +"""The newest signed value version supported by this version of Tornado.
    +
    +Signed values newer than this version cannot be decoded.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +DEFAULT_SIGNED_VALUE_VERSION = 2
    +"""The signed value version produced by `.RequestHandler.create_signed_value`.
    +
    +May be overridden by passing a ``version`` keyword argument.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
    +"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
    +
    +May be overridden by passing a ``min_version`` keyword argument.
    +
    +.. versionadded:: 3.2.1
    +"""
    +
    +
    +class RequestHandler(object):
    +    """Base class for HTTP request handlers.
    +
    +    Subclasses must define at least one of the methods defined in the
    +    "Entry points" section below.
    +    """
    +    SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
    +                         "OPTIONS")
    +
    +    _template_loaders = {}  # {path: template.BaseLoader}
    +    _template_loader_lock = threading.Lock()
    +    _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
    +
    +    def __init__(self, application, request, **kwargs):
    +        super(RequestHandler, self).__init__()
    +
    +        self.application = application
    +        self.request = request
    +        self._headers_written = False
    +        self._finished = False
    +        self._auto_finish = True
    +        self._transforms = None  # will be set in _execute
    +        self._prepared_future = None
    +        self.path_args = None
    +        self.path_kwargs = None
    +        self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
    +                             application.ui_methods.items())
    +        # UIModules are available as both `modules` and `_tt_modules` in the
    +        # template namespace.  Historically only `modules` was available
    +        # but could be clobbered by user additions to the namespace.
    +        # The template {% module %} directive looks in `_tt_modules` to avoid
    +        # possible conflicts.
    +        self.ui["_tt_modules"] = _UIModuleNamespace(self,
    +                                                    application.ui_modules)
    +        self.ui["modules"] = self.ui["_tt_modules"]
    +        self.clear()
    +        self.request.connection.set_close_callback(self.on_connection_close)
    +        self.initialize(**kwargs)
    +
    +    def initialize(self):
    +        """Hook for subclass initialization.
    +
    +        A dictionary passed as the third argument of a url spec will be
    +        supplied as keyword arguments to initialize().
    +
    +        Example::
    +
    +            class ProfileHandler(RequestHandler):
    +                def initialize(self, database):
    +                    self.database = database
    +
    +                def get(self, username):
    +                    ...
    +
    +            app = Application([
    +                (r'/user/(.*)', ProfileHandler, dict(database=database)),
    +                ])
    +        """
    +        pass
    +
    +    @property
    +    def settings(self):
    +        """An alias for `self.application.settings `."""
    +        return self.application.settings
    +
    +    def head(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def get(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def post(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def delete(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def patch(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def put(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def options(self, *args, **kwargs):
    +        raise HTTPError(405)
    +
    +    def prepare(self):
    +        """Called at the beginning of a request before  `get`/`post`/etc.
    +
    +        Override this method to perform common initialization regardless
    +        of the request method.
    +
    +        Asynchronous support: Decorate this method with `.gen.coroutine`
    +        or `.return_future` to make it asynchronous (the
    +        `asynchronous` decorator cannot be used on `prepare`).
    +        If this method returns a `.Future` execution will not proceed
    +        until the `.Future` is done.
    +
    +        .. versionadded:: 3.1
    +           Asynchronous support.
    +        """
    +        pass
    +
    +    def on_finish(self):
    +        """Called after the end of a request.
    +
    +        Override this method to perform cleanup, logging, etc.
    +        This method is a counterpart to `prepare`.  ``on_finish`` may
    +        not produce any output, as it is called after the response
    +        has been sent to the client.
    +        """
    +        pass
    +
    +    def on_connection_close(self):
    +        """Called in async handlers if the client closed the connection.
    +
    +        Override this to clean up resources associated with
    +        long-lived connections.  Note that this method is called only if
    +        the connection was closed during asynchronous processing; if you
    +        need to do cleanup after every request override `on_finish`
    +        instead.
    +
    +        Proxies may keep a connection open for a time (perhaps
    +        indefinitely) after the client has gone away, so this method
    +        may not be called promptly after the end user closes their
    +        connection.
    +        """
    +        if _has_stream_request_body(self.__class__):
    +            if not self.request.body.done():
    +                self.request.body.set_exception(iostream.StreamClosedError())
    +                self.request.body.exception()
    +
    +    def clear(self):
    +        """Resets all headers and content for this response."""
    +        self._headers = httputil.HTTPHeaders({
    +            "Server": "TornadoServer/%s" % tornado.version,
    +            "Content-Type": "text/html; charset=UTF-8",
    +            "Date": httputil.format_timestamp(time.time()),
    +        })
    +        self.set_default_headers()
    +        self._write_buffer = []
    +        self._status_code = 200
    +        self._reason = httputil.responses[200]
    +
    +    def set_default_headers(self):
    +        """Override this to set HTTP headers at the beginning of the request.
    +
    +        For example, this is the place to set a custom ``Server`` header.
    +        Note that setting such headers in the normal flow of request
    +        processing may not do what you want, since headers may be reset
    +        during error handling.
    +        """
    +        pass
    +
    +    def set_status(self, status_code, reason=None):
    +        """Sets the status code for our response.
    +
    +        :arg int status_code: Response status code. If ``reason`` is ``None``,
    +            it must be present in `httplib.responses `.
    +        :arg string reason: Human-readable reason phrase describing the status
    +            code. If ``None``, it will be filled in from
    +            `httplib.responses `.
    +        """
    +        self._status_code = status_code
    +        if reason is not None:
    +            self._reason = escape.native_str(reason)
    +        else:
    +            try:
    +                self._reason = httputil.responses[status_code]
    +            except KeyError:
    +                raise ValueError("unknown status code %d", status_code)
    +
    +    def get_status(self):
    +        """Returns the status code for our response."""
    +        return self._status_code
    +
    +    def set_header(self, name, value):
    +        """Sets the given response header name and value.
    +
    +        If a datetime is given, we automatically format it according to the
    +        HTTP specification. If the value is not a string, we convert it to
    +        a string. All header values are then encoded as UTF-8.
    +        """
    +        self._headers[name] = self._convert_header_value(value)
    +
    +    def add_header(self, name, value):
    +        """Adds the given response header and value.
    +
    +        Unlike `set_header`, `add_header` may be called multiple times
    +        to return multiple values for the same header.
    +        """
    +        self._headers.add(name, self._convert_header_value(value))
    +
    +    def clear_header(self, name):
    +        """Clears an outgoing header, undoing a previous `set_header` call.
    +
    +        Note that this method does not apply to multi-valued headers
    +        set by `add_header`.
    +        """
    +        if name in self._headers:
    +            del self._headers[name]
    +
    +    _INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
    +
    +    def _convert_header_value(self, value):
    +        if isinstance(value, bytes):
    +            pass
    +        elif isinstance(value, unicode_type):
    +            value = value.encode('utf-8')
    +        elif isinstance(value, numbers.Integral):
    +            # return immediately since we know the converted value will be safe
    +            return str(value)
    +        elif isinstance(value, datetime.datetime):
    +            return httputil.format_timestamp(value)
    +        else:
    +            raise TypeError("Unsupported header value %r" % value)
    +        # If \n is allowed into the header, it is possible to inject
    +        # additional headers or split the request.
    +        if RequestHandler._INVALID_HEADER_CHAR_RE.search(value):
    +            raise ValueError("Unsafe header value %r", value)
    +        return value
    +
    +    _ARG_DEFAULT = []
    +
    +    def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
    +        """Returns the value of the argument with the given name.
    +
    +        If default is not provided, the argument is considered to be
    +        required, and we raise a `MissingArgumentError` if it is missing.
    +
    +        If the argument appears in the url more than once, we return the
    +        last value.
    +
    +        The returned value is always unicode.
    +        """
    +        return self._get_argument(name, default, self.request.arguments, strip)
    +
    +    def get_arguments(self, name, strip=True):
    +        """Returns a list of the arguments with the given name.
    +
    +        If the argument is not present, returns an empty list.
    +
    +        The returned values are always unicode.
    +        """
    +
    +        # Make sure `get_arguments` isn't accidentally being called with a
    +        # positional argument that's assumed to be a default (like in
    +        # `get_argument`.)
    +        assert isinstance(strip, bool)
    +
    +        return self._get_arguments(name, self.request.arguments, strip)
    +
    +    def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
    +        """Returns the value of the argument with the given name
    +        from the request body.
    +
    +        If default is not provided, the argument is considered to be
    +        required, and we raise a `MissingArgumentError` if it is missing.
    +
    +        If the argument appears in the url more than once, we return the
    +        last value.
    +
    +        The returned value is always unicode.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_argument(name, default, self.request.body_arguments,
    +                                  strip)
    +
    +    def get_body_arguments(self, name, strip=True):
    +        """Returns a list of the body arguments with the given name.
    +
    +        If the argument is not present, returns an empty list.
    +
    +        The returned values are always unicode.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_arguments(name, self.request.body_arguments, strip)
    +
    +    def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
    +        """Returns the value of the argument with the given name
    +        from the request query string.
    +
    +        If default is not provided, the argument is considered to be
    +        required, and we raise a `MissingArgumentError` if it is missing.
    +
    +        If the argument appears in the url more than once, we return the
    +        last value.
    +
    +        The returned value is always unicode.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_argument(name, default,
    +                                  self.request.query_arguments, strip)
    +
    +    def get_query_arguments(self, name, strip=True):
    +        """Returns a list of the query arguments with the given name.
    +
    +        If the argument is not present, returns an empty list.
    +
    +        The returned values are always unicode.
    +
    +        .. versionadded:: 3.2
    +        """
    +        return self._get_arguments(name, self.request.query_arguments, strip)
    +
    +    def _get_argument(self, name, default, source, strip=True):
    +        args = self._get_arguments(name, source, strip=strip)
    +        if not args:
    +            if default is self._ARG_DEFAULT:
    +                raise MissingArgumentError(name)
    +            return default
    +        return args[-1]
    +
    +    def _get_arguments(self, name, source, strip=True):
    +        values = []
    +        for v in source.get(name, []):
    +            v = self.decode_argument(v, name=name)
    +            if isinstance(v, unicode_type):
    +                # Get rid of any weird control chars (unless decoding gave
    +                # us bytes, in which case leave it alone)
    +                v = RequestHandler._remove_control_chars_regex.sub(" ", v)
    +            if strip:
    +                v = v.strip()
    +            values.append(v)
    +        return values
    +
    +    def decode_argument(self, value, name=None):
    +        """Decodes an argument from the request.
    +
    +        The argument has been percent-decoded and is now a byte string.
    +        By default, this method decodes the argument as utf-8 and returns
    +        a unicode string, but this may be overridden in subclasses.
    +
    +        This method is used as a filter for both `get_argument()` and for
    +        values extracted from the url and passed to `get()`/`post()`/etc.
    +
    +        The name of the argument is provided if known, but may be None
    +        (e.g. for unnamed groups in the url regex).
    +        """
    +        try:
    +            return _unicode(value)
    +        except UnicodeDecodeError:
    +            raise HTTPError(400, "Invalid unicode in %s: %r" %
    +                            (name or "url", value[:40]))
    +
    +    @property
    +    def cookies(self):
    +        """An alias for
    +        `self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
    +        return self.request.cookies
    +
    +    def get_cookie(self, name, default=None):
    +        """Gets the value of the cookie with the given name, else default."""
    +        if self.request.cookies is not None and name in self.request.cookies:
    +            return self.request.cookies[name].value
    +        return default
    +
    +    def set_cookie(self, name, value, domain=None, expires=None, path="/",
    +                   expires_days=None, **kwargs):
    +        """Sets the given cookie name/value with the given options.
    +
    +        Additional keyword arguments are set on the Cookie.Morsel
    +        directly.
    +        See http://docs.python.org/library/cookie.html#morsel-objects
    +        for available attributes.
    +        """
    +        # The cookie library only accepts type str, in both python 2 and 3
    +        name = escape.native_str(name)
    +        value = escape.native_str(value)
    +        if re.search(r"[\x00-\x20]", name + value):
    +            # Don't let us accidentally inject bad stuff
    +            raise ValueError("Invalid cookie %r: %r" % (name, value))
    +        if not hasattr(self, "_new_cookie"):
    +            self._new_cookie = Cookie.SimpleCookie()
    +        if name in self._new_cookie:
    +            del self._new_cookie[name]
    +        self._new_cookie[name] = value
    +        morsel = self._new_cookie[name]
    +        if domain:
    +            morsel["domain"] = domain
    +        if expires_days is not None and not expires:
    +            expires = datetime.datetime.utcnow() + datetime.timedelta(
    +                days=expires_days)
    +        if expires:
    +            morsel["expires"] = httputil.format_timestamp(expires)
    +        if path:
    +            morsel["path"] = path
    +        for k, v in kwargs.items():
    +            if k == 'max_age':
    +                k = 'max-age'
    +
    +            # skip falsy values for httponly and secure flags because
    +            # SimpleCookie sets them regardless
    +            if k in ['httponly', 'secure'] and not v:
    +                continue
    +
    +            morsel[k] = v
    +
    +    def clear_cookie(self, name, path="/", domain=None):
    +        """Deletes the cookie with the given name.
    +
    +        Due to limitations of the cookie protocol, you must pass the same
    +        path and domain to clear a cookie as were used when that cookie
    +        was set (but there is no way to find out on the server side
    +        which values were used for a given cookie).
    +        """
    +        expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
    +        self.set_cookie(name, value="", path=path, expires=expires,
    +                        domain=domain)
    +
    +    def clear_all_cookies(self, path="/", domain=None):
    +        """Deletes all the cookies the user sent with this request.
    +
    +        See `clear_cookie` for more information on the path and domain
    +        parameters.
    +
    +        .. versionchanged:: 3.2
    +
    +           Added the ``path`` and ``domain`` parameters.
    +        """
    +        for name in self.request.cookies:
    +            self.clear_cookie(name, path=path, domain=domain)
    +
    +    def set_secure_cookie(self, name, value, expires_days=30, version=None,
    +                          **kwargs):
    +        """Signs and timestamps a cookie so it cannot be forged.
    +
    +        You must specify the ``cookie_secret`` setting in your Application
    +        to use this method. It should be a long, random sequence of bytes
    +        to be used as the HMAC secret for the signature.
    +
    +        To read a cookie set with this method, use `get_secure_cookie()`.
    +
    +        Note that the ``expires_days`` parameter sets the lifetime of the
    +        cookie in the browser, but is independent of the ``max_age_days``
    +        parameter to `get_secure_cookie`.
    +
    +        Secure cookies may contain arbitrary byte values, not just unicode
    +        strings (unlike regular cookies)
    +
    +        .. versionchanged:: 3.2.1
    +
    +           Added the ``version`` argument.  Introduced cookie version 2
    +           and made it the default.
    +        """
    +        self.set_cookie(name, self.create_signed_value(name, value,
    +                                                       version=version),
    +                        expires_days=expires_days, **kwargs)
    +
    +    def create_signed_value(self, name, value, version=None):
    +        """Signs and timestamps a string so it cannot be forged.
    +
    +        Normally used via set_secure_cookie, but provided as a separate
    +        method for non-cookie uses.  To decode a value not stored
    +        as a cookie use the optional value argument to get_secure_cookie.
    +
    +        .. versionchanged:: 3.2.1
    +
    +           Added the ``version`` argument.  Introduced cookie version 2
    +           and made it the default.
    +        """
    +        self.require_setting("cookie_secret", "secure cookies")
    +        secret = self.application.settings["cookie_secret"]
    +        key_version = None
    +        if isinstance(secret, dict):
    +            if self.application.settings.get("key_version") is None:
    +                raise Exception("key_version setting must be used for secret_key dicts")
    +            key_version = self.application.settings["key_version"]
    +
    +        return create_signed_value(secret, name, value, version=version,
    +                                   key_version=key_version)
    +
    +    def get_secure_cookie(self, name, value=None, max_age_days=31,
    +                          min_version=None):
    +        """Returns the given signed cookie if it validates, or None.
    +
    +        The decoded cookie value is returned as a byte string (unlike
    +        `get_cookie`).
    +
    +        .. versionchanged:: 3.2.1
    +
    +           Added the ``min_version`` argument.  Introduced cookie version 2;
    +           both versions 1 and 2 are accepted by default.
    +        """
    +        self.require_setting("cookie_secret", "secure cookies")
    +        if value is None:
    +            value = self.get_cookie(name)
    +        return decode_signed_value(self.application.settings["cookie_secret"],
    +                                   name, value, max_age_days=max_age_days,
    +                                   min_version=min_version)
    +
    +    def get_secure_cookie_key_version(self, name, value=None):
    +        """Returns the signing key version of the secure cookie.
    +
    +        The version is returned as int.
    +        """
    +        self.require_setting("cookie_secret", "secure cookies")
    +        if value is None:
    +            value = self.get_cookie(name)
    +        return get_signature_key_version(value)
    +
    +    def redirect(self, url, permanent=False, status=None):
    +        """Sends a redirect to the given (optionally relative) URL.
    +
    +        If the ``status`` argument is specified, that value is used as the
    +        HTTP status code; otherwise either 301 (permanent) or 302
    +        (temporary) is chosen based on the ``permanent`` argument.
    +        The default is 302 (temporary).
    +        """
    +        if self._headers_written:
    +            raise Exception("Cannot redirect after headers have been written")
    +        if status is None:
    +            status = 301 if permanent else 302
    +        else:
    +            assert isinstance(status, int) and 300 <= status <= 399
    +        self.set_status(status)
    +        self.set_header("Location", utf8(url))
    +        self.finish()
    +
    +    def write(self, chunk):
    +        """Writes the given chunk to the output buffer.
    +
    +        To write the output to the network, use the flush() method below.
    +
    +        If the given chunk is a dictionary, we write it as JSON and set
    +        the Content-Type of the response to be ``application/json``.
    +        (if you want to send JSON as a different ``Content-Type``, call
    +        set_header *after* calling write()).
    +
    +        Note that lists are not converted to JSON because of a potential
    +        cross-site security vulnerability.  All JSON output should be
    +        wrapped in a dictionary.  More details at
    +        http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
    +        https://github.com/facebook/tornado/issues/1009
    +        """
    +        if self._finished:
    +            raise RuntimeError("Cannot write() after finish()")
    +        if not isinstance(chunk, (bytes, unicode_type, dict)):
    +            message = "write() only accepts bytes, unicode, and dict objects"
    +            if isinstance(chunk, list):
    +                message += ". Lists not accepted for security reasons; see http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write"
    +            raise TypeError(message)
    +        if isinstance(chunk, dict):
    +            chunk = escape.json_encode(chunk)
    +            self.set_header("Content-Type", "application/json; charset=UTF-8")
    +        chunk = utf8(chunk)
    +        self._write_buffer.append(chunk)
    +
    +    def render(self, template_name, **kwargs):
    +        """Renders the template with the given arguments as the response."""
    +        html = self.render_string(template_name, **kwargs)
    +
    +        # Insert the additional JS and CSS added by the modules on the page
    +        js_embed = []
    +        js_files = []
    +        css_embed = []
    +        css_files = []
    +        html_heads = []
    +        html_bodies = []
    +        for module in getattr(self, "_active_modules", {}).values():
    +            embed_part = module.embedded_javascript()
    +            if embed_part:
    +                js_embed.append(utf8(embed_part))
    +            file_part = module.javascript_files()
    +            if file_part:
    +                if isinstance(file_part, (unicode_type, bytes)):
    +                    js_files.append(file_part)
    +                else:
    +                    js_files.extend(file_part)
    +            embed_part = module.embedded_css()
    +            if embed_part:
    +                css_embed.append(utf8(embed_part))
    +            file_part = module.css_files()
    +            if file_part:
    +                if isinstance(file_part, (unicode_type, bytes)):
    +                    css_files.append(file_part)
    +                else:
    +                    css_files.extend(file_part)
    +            head_part = module.html_head()
    +            if head_part:
    +                html_heads.append(utf8(head_part))
    +            body_part = module.html_body()
    +            if body_part:
    +                html_bodies.append(utf8(body_part))
    +
    +        def is_absolute(path):
    +            return any(path.startswith(x) for x in ["/", "http:", "https:"])
    +        if js_files:
    +            # Maintain order of JavaScript files given by modules
    +            paths = []
    +            unique_paths = set()
    +            for path in js_files:
    +                if not is_absolute(path):
    +                    path = self.static_url(path)
    +                if path not in unique_paths:
    +                    paths.append(path)
    +                    unique_paths.add(path)
    +            js = ''.join(''
    +                         for p in paths)
    +            sloc = html.rindex(b'')
    +            html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
    +        if js_embed:
    +            js = b''
    +            sloc = html.rindex(b'')
    +            html = html[:sloc] + js + b'\n' + html[sloc:]
    +        if css_files:
    +            paths = []
    +            unique_paths = set()
    +            for path in css_files:
    +                if not is_absolute(path):
    +                    path = self.static_url(path)
    +                if path not in unique_paths:
    +                    paths.append(path)
    +                    unique_paths.add(path)
    +            css = ''.join(''
    +                          for p in paths)
    +            hloc = html.index(b'')
    +            html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
    +        if css_embed:
    +            css = b''
    +            hloc = html.index(b'')
    +            html = html[:hloc] + css + b'\n' + html[hloc:]
    +        if html_heads:
    +            hloc = html.index(b'')
    +            html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
    +        if html_bodies:
    +            hloc = html.index(b'')
    +            html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
    +        self.finish(html)
    +
    +    def render_string(self, template_name, **kwargs):
    +        """Generate the given template with the given arguments.
    +
    +        We return the generated byte string (in utf8). To generate and
    +        write a template as a response, use render() above.
    +        """
    +        # If no template_path is specified, use the path of the calling file
    +        template_path = self.get_template_path()
    +        if not template_path:
    +            frame = sys._getframe(0)
    +            web_file = frame.f_code.co_filename
    +            while frame.f_code.co_filename == web_file:
    +                frame = frame.f_back
    +            template_path = os.path.dirname(frame.f_code.co_filename)
    +        with RequestHandler._template_loader_lock:
    +            if template_path not in RequestHandler._template_loaders:
    +                loader = self.create_template_loader(template_path)
    +                RequestHandler._template_loaders[template_path] = loader
    +            else:
    +                loader = RequestHandler._template_loaders[template_path]
    +        t = loader.load(template_name)
    +        namespace = self.get_template_namespace()
    +        namespace.update(kwargs)
    +        return t.generate(**namespace)
    +
    +    def get_template_namespace(self):
    +        """Returns a dictionary to be used as the default template namespace.
    +
    +        May be overridden by subclasses to add or modify values.
    +
    +        The results of this method will be combined with additional
    +        defaults in the `tornado.template` module and keyword arguments
    +        to `render` or `render_string`.
    +        """
    +        namespace = dict(
    +            handler=self,
    +            request=self.request,
    +            current_user=self.current_user,
    +            locale=self.locale,
    +            _=self.locale.translate,
    +            pgettext=self.locale.pgettext,
    +            static_url=self.static_url,
    +            xsrf_form_html=self.xsrf_form_html,
    +            reverse_url=self.reverse_url
    +        )
    +        namespace.update(self.ui)
    +        return namespace
    +
    +    def create_template_loader(self, template_path):
    +        """Returns a new template loader for the given path.
    +
    +        May be overridden by subclasses.  By default returns a
    +        directory-based loader on the given path, using the
    +        ``autoescape`` and ``template_whitespace`` application
    +        settings.  If a ``template_loader`` application setting is
    +        supplied, uses that instead.
    +        """
    +        settings = self.application.settings
    +        if "template_loader" in settings:
    +            return settings["template_loader"]
    +        kwargs = {}
    +        if "autoescape" in settings:
    +            # autoescape=None means "no escaping", so we have to be sure
    +            # to only pass this kwarg if the user asked for it.
    +            kwargs["autoescape"] = settings["autoescape"]
    +        if "template_whitespace" in settings:
    +            kwargs["whitespace"] = settings["template_whitespace"]
    +        return template.Loader(template_path, **kwargs)
    +
    +    def flush(self, include_footers=False, callback=None):
    +        """Flushes the current output buffer to the network.
    +
    +        The ``callback`` argument, if given, can be used for flow control:
    +        it will be run when all flushed data has been written to the socket.
    +        Note that only one flush callback can be outstanding at a time;
    +        if another flush occurs before the previous flush's callback
    +        has been run, the previous callback will be discarded.
    +
    +        .. versionchanged:: 4.0
    +           Now returns a `.Future` if no callback is given.
    +        """
    +        chunk = b"".join(self._write_buffer)
    +        self._write_buffer = []
    +        if not self._headers_written:
    +            self._headers_written = True
    +            for transform in self._transforms:
    +                self._status_code, self._headers, chunk = \
    +                    transform.transform_first_chunk(
    +                        self._status_code, self._headers,
    +                        chunk, include_footers)
    +            # Ignore the chunk and only write the headers for HEAD requests
    +            if self.request.method == "HEAD":
    +                chunk = None
    +
    +            # Finalize the cookie headers (which have been stored in a side
    +            # object so an outgoing cookie could be overwritten before it
    +            # is sent).
    +            if hasattr(self, "_new_cookie"):
    +                for cookie in self._new_cookie.values():
    +                    self.add_header("Set-Cookie", cookie.OutputString(None))
    +
    +            start_line = httputil.ResponseStartLine('',
    +                                                    self._status_code,
    +                                                    self._reason)
    +            return self.request.connection.write_headers(
    +                start_line, self._headers, chunk, callback=callback)
    +        else:
    +            for transform in self._transforms:
    +                chunk = transform.transform_chunk(chunk, include_footers)
    +            # Ignore the chunk and only write the headers for HEAD requests
    +            if self.request.method != "HEAD":
    +                return self.request.connection.write(chunk, callback=callback)
    +            else:
    +                future = Future()
    +                future.set_result(None)
    +                return future
    +
    +    def finish(self, chunk=None):
    +        """Finishes this response, ending the HTTP request."""
    +        if self._finished:
    +            raise RuntimeError("finish() called twice")
    +
    +        if chunk is not None:
    +            self.write(chunk)
    +
    +        # Automatically support ETags and add the Content-Length header if
    +        # we have not flushed any content yet.
    +        if not self._headers_written:
    +            if (self._status_code == 200 and
    +                self.request.method in ("GET", "HEAD") and
    +                    "Etag" not in self._headers):
    +                self.set_etag_header()
    +                if self.check_etag_header():
    +                    self._write_buffer = []
    +                    self.set_status(304)
    +            if self._status_code == 304:
    +                assert not self._write_buffer, "Cannot send body with 304"
    +                self._clear_headers_for_304()
    +            elif "Content-Length" not in self._headers:
    +                content_length = sum(len(part) for part in self._write_buffer)
    +                self.set_header("Content-Length", content_length)
    +
    +        if hasattr(self.request, "connection"):
    +            # Now that the request is finished, clear the callback we
    +            # set on the HTTPConnection (which would otherwise prevent the
    +            # garbage collection of the RequestHandler when there
    +            # are keepalive connections)
    +            self.request.connection.set_close_callback(None)
    +
    +        self.flush(include_footers=True)
    +        self.request.finish()
    +        self._log()
    +        self._finished = True
    +        self.on_finish()
    +        # Break up a reference cycle between this handler and the
    +        # _ui_module closures to allow for faster GC on CPython.
    +        self.ui = None
    +
    +    def send_error(self, status_code=500, **kwargs):
    +        """Sends the given HTTP error code to the browser.
    +
    +        If `flush()` has already been called, it is not possible to send
    +        an error, so this method will simply terminate the response.
    +        If output has been written but not yet flushed, it will be discarded
    +        and replaced with the error page.
    +
    +        Override `write_error()` to customize the error page that is returned.
    +        Additional keyword arguments are passed through to `write_error`.
    +        """
    +        if self._headers_written:
    +            gen_log.error("Cannot send error response after headers written")
    +            if not self._finished:
    +                # If we get an error between writing headers and finishing,
    +                # we are unlikely to be able to finish due to a
    +                # Content-Length mismatch. Try anyway to release the
    +                # socket.
    +                try:
    +                    self.finish()
    +                except Exception:
    +                    gen_log.error("Failed to flush partial response",
    +                                  exc_info=True)
    +            return
    +        self.clear()
    +
    +        reason = kwargs.get('reason')
    +        if 'exc_info' in kwargs:
    +            exception = kwargs['exc_info'][1]
    +            if isinstance(exception, HTTPError) and exception.reason:
    +                reason = exception.reason
    +        self.set_status(status_code, reason=reason)
    +        try:
    +            self.write_error(status_code, **kwargs)
    +        except Exception:
    +            app_log.error("Uncaught exception in write_error", exc_info=True)
    +        if not self._finished:
    +            self.finish()
    +
    +    def write_error(self, status_code, **kwargs):
    +        """Override to implement custom error pages.
    +
    +        ``write_error`` may call `write`, `render`, `set_header`, etc
    +        to produce output as usual.
    +
    +        If this error was caused by an uncaught exception (including
    +        HTTPError), an ``exc_info`` triple will be available as
    +        ``kwargs["exc_info"]``.  Note that this exception may not be
    +        the "current" exception for purposes of methods like
    +        ``sys.exc_info()`` or ``traceback.format_exc``.
    +        """
    +        if self.settings.get("serve_traceback") and "exc_info" in kwargs:
    +            # in debug mode, try to send a traceback
    +            self.set_header('Content-Type', 'text/plain')
    +            for line in traceback.format_exception(*kwargs["exc_info"]):
    +                self.write(line)
    +            self.finish()
    +        else:
    +            self.finish("%(code)d: %(message)s"
    +                        "%(code)d: %(message)s" % {
    +                            "code": status_code,
    +                            "message": self._reason,
    +                        })
    +
    +    @property
    +    def locale(self):
    +        """The locale for the current session.
    +
    +        Determined by either `get_user_locale`, which you can override to
    +        set the locale based on, e.g., a user preference stored in a
    +        database, or `get_browser_locale`, which uses the ``Accept-Language``
    +        header.
    +
    +        .. versionchanged: 4.1
    +           Added a property setter.
    +        """
    +        if not hasattr(self, "_locale"):
    +            self._locale = self.get_user_locale()
    +            if not self._locale:
    +                self._locale = self.get_browser_locale()
    +                assert self._locale
    +        return self._locale
    +
    +    @locale.setter
    +    def locale(self, value):
    +        self._locale = value
    +
    +    def get_user_locale(self):
    +        """Override to determine the locale from the authenticated user.
    +
    +        If None is returned, we fall back to `get_browser_locale()`.
    +
    +        This method should return a `tornado.locale.Locale` object,
    +        most likely obtained via a call like ``tornado.locale.get("en")``
    +        """
    +        return None
    +
    +    def get_browser_locale(self, default="en_US"):
    +        """Determines the user's locale from ``Accept-Language`` header.
    +
    +        See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
    +        """
    +        if "Accept-Language" in self.request.headers:
    +            languages = self.request.headers["Accept-Language"].split(",")
    +            locales = []
    +            for language in languages:
    +                parts = language.strip().split(";")
    +                if len(parts) > 1 and parts[1].startswith("q="):
    +                    try:
    +                        score = float(parts[1][2:])
    +                    except (ValueError, TypeError):
    +                        score = 0.0
    +                else:
    +                    score = 1.0
    +                locales.append((parts[0], score))
    +            if locales:
    +                locales.sort(key=lambda pair: pair[1], reverse=True)
    +                codes = [l[0] for l in locales]
    +                return locale.get(*codes)
    +        return locale.get(default)
    +
    +    @property
    +    def current_user(self):
    +        """The authenticated user for this request.
    +
    +        This is set in one of two ways:
    +
    +        * A subclass may override `get_current_user()`, which will be called
    +          automatically the first time ``self.current_user`` is accessed.
    +          `get_current_user()` will only be called once per request,
    +          and is cached for future access::
    +
    +              def get_current_user(self):
    +                  user_cookie = self.get_secure_cookie("user")
    +                      if user_cookie:
    +                          return json.loads(user_cookie)
    +                  return None
    +
    +        * It may be set as a normal variable, typically from an overridden
    +          `prepare()`::
    +
    +              @gen.coroutine
    +              def prepare(self):
    +                  user_id_cookie = self.get_secure_cookie("user_id")
    +                  if user_id_cookie:
    +                      self.current_user = yield load_user(user_id_cookie)
    +
    +        Note that `prepare()` may be a coroutine while `get_current_user()`
    +        may not, so the latter form is necessary if loading the user requires
    +        asynchronous operations.
    +
    +        The user object may any type of the application's choosing.
    +        """
    +        if not hasattr(self, "_current_user"):
    +            self._current_user = self.get_current_user()
    +        return self._current_user
    +
    +    @current_user.setter
    +    def current_user(self, value):
    +        self._current_user = value
    +
    +    def get_current_user(self):
    +        """Override to determine the current user from, e.g., a cookie.
    +
    +        This method may not be a coroutine.
    +        """
    +        return None
    +
    +    def get_login_url(self):
    +        """Override to customize the login URL based on the request.
    +
    +        By default, we use the ``login_url`` application setting.
    +        """
    +        self.require_setting("login_url", "@tornado.web.authenticated")
    +        return self.application.settings["login_url"]
    +
    +    def get_template_path(self):
    +        """Override to customize template path for each handler.
    +
    +        By default, we use the ``template_path`` application setting.
    +        Return None to load templates relative to the calling file.
    +        """
    +        return self.application.settings.get("template_path")
    +
    +    @property
    +    def xsrf_token(self):
    +        """The XSRF-prevention token for the current user/session.
    +
    +        To prevent cross-site request forgery, we set an '_xsrf' cookie
    +        and include the same '_xsrf' value as an argument with all POST
    +        requests. If the two do not match, we reject the form submission
    +        as a potential forgery.
    +
    +        See http://en.wikipedia.org/wiki/Cross-site_request_forgery
    +
    +        .. versionchanged:: 3.2.2
    +           The xsrf token will now be have a random mask applied in every
    +           request, which makes it safe to include the token in pages
    +           that are compressed.  See http://breachattack.com for more
    +           information on the issue fixed by this change.  Old (version 1)
    +           cookies will be converted to version 2 when this method is called
    +           unless the ``xsrf_cookie_version`` `Application` setting is
    +           set to 1.
    +
    +        .. versionchanged:: 4.3
    +           The ``xsrf_cookie_kwargs`` `Application` setting may be
    +           used to supply additional cookie options (which will be
    +           passed directly to `set_cookie`). For example,
    +           ``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
    +           will set the ``secure`` and ``httponly`` flags on the
    +           ``_xsrf`` cookie.
    +        """
    +        if not hasattr(self, "_xsrf_token"):
    +            version, token, timestamp = self._get_raw_xsrf_token()
    +            output_version = self.settings.get("xsrf_cookie_version", 2)
    +            cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
    +            if output_version == 1:
    +                self._xsrf_token = binascii.b2a_hex(token)
    +            elif output_version == 2:
    +                mask = os.urandom(4)
    +                self._xsrf_token = b"|".join([
    +                    b"2",
    +                    binascii.b2a_hex(mask),
    +                    binascii.b2a_hex(_websocket_mask(mask, token)),
    +                    utf8(str(int(timestamp)))])
    +            else:
    +                raise ValueError("unknown xsrf cookie version %d",
    +                                 output_version)
    +            if version is None:
    +                expires_days = 30 if self.current_user else None
    +                self.set_cookie("_xsrf", self._xsrf_token,
    +                                expires_days=expires_days,
    +                                **cookie_kwargs)
    +        return self._xsrf_token
    +
    +    def _get_raw_xsrf_token(self):
    +        """Read or generate the xsrf token in its raw form.
    +
    +        The raw_xsrf_token is a tuple containing:
    +
    +        * version: the version of the cookie from which this token was read,
    +          or None if we generated a new token in this request.
    +        * token: the raw token data; random (non-ascii) bytes.
    +        * timestamp: the time this token was generated (will not be accurate
    +          for version 1 cookies)
    +        """
    +        if not hasattr(self, '_raw_xsrf_token'):
    +            cookie = self.get_cookie("_xsrf")
    +            if cookie:
    +                version, token, timestamp = self._decode_xsrf_token(cookie)
    +            else:
    +                version, token, timestamp = None, None, None
    +            if token is None:
    +                version = None
    +                token = os.urandom(16)
    +                timestamp = time.time()
    +            self._raw_xsrf_token = (version, token, timestamp)
    +        return self._raw_xsrf_token
    +
    +    def _decode_xsrf_token(self, cookie):
    +        """Convert a cookie string into a the tuple form returned by
    +        _get_raw_xsrf_token.
    +        """
    +
    +        try:
    +            m = _signed_value_version_re.match(utf8(cookie))
    +
    +            if m:
    +                version = int(m.group(1))
    +                if version == 2:
    +                    _, mask, masked_token, timestamp = cookie.split("|")
    +
    +                    mask = binascii.a2b_hex(utf8(mask))
    +                    token = _websocket_mask(
    +                        mask, binascii.a2b_hex(utf8(masked_token)))
    +                    timestamp = int(timestamp)
    +                    return version, token, timestamp
    +                else:
    +                    # Treat unknown versions as not present instead of failing.
    +                    raise Exception("Unknown xsrf cookie version")
    +            else:
    +                version = 1
    +                try:
    +                    token = binascii.a2b_hex(utf8(cookie))
    +                except (binascii.Error, TypeError):
    +                    token = utf8(cookie)
    +                # We don't have a usable timestamp in older versions.
    +                timestamp = int(time.time())
    +                return (version, token, timestamp)
    +        except Exception:
    +            # Catch exceptions and return nothing instead of failing.
    +            gen_log.debug("Uncaught exception in _decode_xsrf_token",
    +                          exc_info=True)
    +            return None, None, None
    +
    +    def check_xsrf_cookie(self):
    +        """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
    +
    +        To prevent cross-site request forgery, we set an ``_xsrf``
    +        cookie and include the same value as a non-cookie
    +        field with all ``POST`` requests. If the two do not match, we
    +        reject the form submission as a potential forgery.
    +
    +        The ``_xsrf`` value may be set as either a form field named ``_xsrf``
    +        or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
    +        (the latter is accepted for compatibility with Django).
    +
    +        See http://en.wikipedia.org/wiki/Cross-site_request_forgery
    +
    +        Prior to release 1.1.1, this check was ignored if the HTTP header
    +        ``X-Requested-With: XMLHTTPRequest`` was present.  This exception
    +        has been shown to be insecure and has been removed.  For more
    +        information please see
    +        http://www.djangoproject.com/weblog/2011/feb/08/security/
    +        http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
    +
    +        .. versionchanged:: 3.2.2
    +           Added support for cookie version 2.  Both versions 1 and 2 are
    +           supported.
    +        """
    +        token = (self.get_argument("_xsrf", None) or
    +                 self.request.headers.get("X-Xsrftoken") or
    +                 self.request.headers.get("X-Csrftoken"))
    +        if not token:
    +            raise HTTPError(403, "'_xsrf' argument missing from POST")
    +        _, token, _ = self._decode_xsrf_token(token)
    +        _, expected_token, _ = self._get_raw_xsrf_token()
    +        if not _time_independent_equals(utf8(token), utf8(expected_token)):
    +            raise HTTPError(403, "XSRF cookie does not match POST argument")
    +
    +    def xsrf_form_html(self):
    +        """An HTML ```` element to be included with all POST forms.
    +
    +        It defines the ``_xsrf`` input value, which we check on all POST
    +        requests to prevent cross-site request forgery. If you have set
    +        the ``xsrf_cookies`` application setting, you must include this
    +        HTML within all of your HTML forms.
    +
    +        In a template, this method should be called with ``{% module
    +        xsrf_form_html() %}``
    +
    +        See `check_xsrf_cookie()` above for more information.
    +        """
    +        return ''
    +
    +    def static_url(self, path, include_host=None, **kwargs):
    +        """Returns a static URL for the given relative static file path.
    +
    +        This method requires you set the ``static_path`` setting in your
    +        application (which specifies the root directory of your static
    +        files).
    +
    +        This method returns a versioned url (by default appending
    +        ``?v=``), which allows the static files to be
    +        cached indefinitely.  This can be disabled by passing
    +        ``include_version=False`` (in the default implementation;
    +        other static file implementations are not required to support
    +        this, but they may support other options).
    +
    +        By default this method returns URLs relative to the current
    +        host, but if ``include_host`` is true the URL returned will be
    +        absolute.  If this handler has an ``include_host`` attribute,
    +        that value will be used as the default for all `static_url`
    +        calls that do not pass ``include_host`` as a keyword argument.
    +
    +        """
    +        self.require_setting("static_path", "static_url")
    +        get_url = self.settings.get("static_handler_class",
    +                                    StaticFileHandler).make_static_url
    +
    +        if include_host is None:
    +            include_host = getattr(self, "include_host", False)
    +
    +        if include_host:
    +            base = self.request.protocol + "://" + self.request.host
    +        else:
    +            base = ""
    +
    +        return base + get_url(self.settings, path, **kwargs)
    +
    +    def require_setting(self, name, feature="this feature"):
    +        """Raises an exception if the given app setting is not defined."""
    +        if not self.application.settings.get(name):
    +            raise Exception("You must define the '%s' setting in your "
    +                            "application to use %s" % (name, feature))
    +
    +    def reverse_url(self, name, *args):
    +        """Alias for `Application.reverse_url`."""
    +        return self.application.reverse_url(name, *args)
    +
    +    def compute_etag(self):
    +        """Computes the etag header to be used for this request.
    +
    +        By default uses a hash of the content written so far.
    +
    +        May be overridden to provide custom etag implementations,
    +        or may return None to disable tornado's default etag support.
    +        """
    +        hasher = hashlib.sha1()
    +        for part in self._write_buffer:
    +            hasher.update(part)
    +        return '"%s"' % hasher.hexdigest()
    +
    +    def set_etag_header(self):
    +        """Sets the response's Etag header using ``self.compute_etag()``.
    +
    +        Note: no header will be set if ``compute_etag()`` returns ``None``.
    +
    +        This method is called automatically when the request is finished.
    +        """
    +        etag = self.compute_etag()
    +        if etag is not None:
    +            self.set_header("Etag", etag)
    +
    +    def check_etag_header(self):
    +        """Checks the ``Etag`` header against requests's ``If-None-Match``.
    +
    +        Returns ``True`` if the request's Etag matches and a 304 should be
    +        returned. For example::
    +
    +            self.set_etag_header()
    +            if self.check_etag_header():
    +                self.set_status(304)
    +                return
    +
    +        This method is called automatically when the request is finished,
    +        but may be called earlier for applications that override
    +        `compute_etag` and want to do an early check for ``If-None-Match``
    +        before completing the request.  The ``Etag`` header should be set
    +        (perhaps with `set_etag_header`) before calling this method.
    +        """
    +        computed_etag = utf8(self._headers.get("Etag", ""))
    +        # Find all weak and strong etag values from If-None-Match header
    +        # because RFC 7232 allows multiple etag values in a single header.
    +        etags = re.findall(
    +            br'\*|(?:W/)?"[^"]*"',
    +            utf8(self.request.headers.get("If-None-Match", ""))
    +        )
    +        if not computed_etag or not etags:
    +            return False
    +
    +        match = False
    +        if etags[0] == b'*':
    +            match = True
    +        else:
    +            # Use a weak comparison when comparing entity-tags.
    +            val = lambda x: x[2:] if x.startswith(b'W/') else x
    +            for etag in etags:
    +                if val(etag) == val(computed_etag):
    +                    match = True
    +                    break
    +        return match
    +
    +    def _stack_context_handle_exception(self, type, value, traceback):
    +        try:
    +            # For historical reasons _handle_request_exception only takes
    +            # the exception value instead of the full triple,
    +            # so re-raise the exception to ensure that it's in
    +            # sys.exc_info()
    +            raise_exc_info((type, value, traceback))
    +        except Exception:
    +            self._handle_request_exception(value)
    +        return True
    +
    +    @gen.coroutine
    +    def _execute(self, transforms, *args, **kwargs):
    +        """Executes this request with the given output transforms."""
    +        self._transforms = transforms
    +        try:
    +            if self.request.method not in self.SUPPORTED_METHODS:
    +                raise HTTPError(405)
    +            self.path_args = [self.decode_argument(arg) for arg in args]
    +            self.path_kwargs = dict((k, self.decode_argument(v, name=k))
    +                                    for (k, v) in kwargs.items())
    +            # If XSRF cookies are turned on, reject form submissions without
    +            # the proper cookie
    +            if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
    +                    self.application.settings.get("xsrf_cookies"):
    +                self.check_xsrf_cookie()
    +
    +            result = self.prepare()
    +            if result is not None:
    +                result = yield result
    +            if self._prepared_future is not None:
    +                # Tell the Application we've finished with prepare()
    +                # and are ready for the body to arrive.
    +                self._prepared_future.set_result(None)
    +            if self._finished:
    +                return
    +
    +            if _has_stream_request_body(self.__class__):
    +                # In streaming mode request.body is a Future that signals
    +                # the body has been completely received.  The Future has no
    +                # result; the data has been passed to self.data_received
    +                # instead.
    +                try:
    +                    yield self.request.body
    +                except iostream.StreamClosedError:
    +                    return
    +
    +            method = getattr(self, self.request.method.lower())
    +            result = method(*self.path_args, **self.path_kwargs)
    +            if result is not None:
    +                result = yield result
    +            if self._auto_finish and not self._finished:
    +                self.finish()
    +        except Exception as e:
    +            try:
    +                self._handle_request_exception(e)
    +            except Exception:
    +                app_log.error("Exception in exception handler", exc_info=True)
    +            if (self._prepared_future is not None and
    +                    not self._prepared_future.done()):
    +                # In case we failed before setting _prepared_future, do it
    +                # now (to unblock the HTTP server).  Note that this is not
    +                # in a finally block to avoid GC issues prior to Python 3.4.
    +                self._prepared_future.set_result(None)
    +
    +    def data_received(self, chunk):
    +        """Implement this method to handle streamed request data.
    +
    +        Requires the `.stream_request_body` decorator.
    +        """
    +        raise NotImplementedError()
    +
    +    def _log(self):
    +        """Logs the current request.
    +
    +        Sort of deprecated since this functionality was moved to the
    +        Application, but left in place for the benefit of existing apps
    +        that have overridden this method.
    +        """
    +        self.application.log_request(self)
    +
    +    def _request_summary(self):
    +        return "%s %s (%s)" % (self.request.method, self.request.uri,
    +                               self.request.remote_ip)
    +
    +    def _handle_request_exception(self, e):
    +        if isinstance(e, Finish):
    +            # Not an error; just finish the request without logging.
    +            if not self._finished:
    +                self.finish(*e.args)
    +            return
    +        try:
    +            self.log_exception(*sys.exc_info())
    +        except Exception:
    +            # An error here should still get a best-effort send_error()
    +            # to avoid leaking the connection.
    +            app_log.error("Error in exception logger", exc_info=True)
    +        if self._finished:
    +            # Extra errors after the request has been finished should
    +            # be logged, but there is no reason to continue to try and
    +            # send a response.
    +            return
    +        if isinstance(e, HTTPError):
    +            if e.status_code not in httputil.responses and not e.reason:
    +                gen_log.error("Bad HTTP status code: %d", e.status_code)
    +                self.send_error(500, exc_info=sys.exc_info())
    +            else:
    +                self.send_error(e.status_code, exc_info=sys.exc_info())
    +        else:
    +            self.send_error(500, exc_info=sys.exc_info())
    +
    +    def log_exception(self, typ, value, tb):
    +        """Override to customize logging of uncaught exceptions.
    +
    +        By default logs instances of `HTTPError` as warnings without
    +        stack traces (on the ``tornado.general`` logger), and all
    +        other exceptions as errors with stack traces (on the
    +        ``tornado.application`` logger).
    +
    +        .. versionadded:: 3.1
    +        """
    +        if isinstance(value, HTTPError):
    +            if value.log_message:
    +                format = "%d %s: " + value.log_message
    +                args = ([value.status_code, self._request_summary()] +
    +                        list(value.args))
    +                gen_log.warning(format, *args)
    +        else:
    +            app_log.error("Uncaught exception %s\n%r", self._request_summary(),
    +                          self.request, exc_info=(typ, value, tb))
    +
    +    def _ui_module(self, name, module):
    +        def render(*args, **kwargs):
    +            if not hasattr(self, "_active_modules"):
    +                self._active_modules = {}
    +            if name not in self._active_modules:
    +                self._active_modules[name] = module(self)
    +            rendered = self._active_modules[name].render(*args, **kwargs)
    +            return rendered
    +        return render
    +
    +    def _ui_method(self, method):
    +        return lambda *args, **kwargs: method(self, *args, **kwargs)
    +
    +    def _clear_headers_for_304(self):
    +        # 304 responses should not contain entity headers (defined in
    +        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
    +        # not explicitly allowed by
    +        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
    +        headers = ["Allow", "Content-Encoding", "Content-Language",
    +                   "Content-Length", "Content-MD5", "Content-Range",
    +                   "Content-Type", "Last-Modified"]
    +        for h in headers:
    +            self.clear_header(h)
    +
    +
    +def asynchronous(method):
    +    """Wrap request handler methods with this if they are asynchronous.
    +
    +    This decorator is for callback-style asynchronous methods; for
    +    coroutines, use the ``@gen.coroutine`` decorator without
    +    ``@asynchronous``. (It is legal for legacy reasons to use the two
    +    decorators together provided ``@asynchronous`` is first, but
    +    ``@asynchronous`` will be ignored in this case)
    +
    +    This decorator should only be applied to the :ref:`HTTP verb
    +    methods `; its behavior is undefined for any other method.
    +    This decorator does not *make* a method asynchronous; it tells
    +    the framework that the method *is* asynchronous.  For this decorator
    +    to be useful the method must (at least sometimes) do something
    +    asynchronous.
    +
    +    If this decorator is given, the response is not finished when the
    +    method returns. It is up to the request handler to call
    +    `self.finish() ` to finish the HTTP
    +    request. Without this decorator, the request is automatically
    +    finished when the ``get()`` or ``post()`` method returns. Example:
    +
    +    .. testcode::
    +
    +       class MyRequestHandler(RequestHandler):
    +           @asynchronous
    +           def get(self):
    +              http = httpclient.AsyncHTTPClient()
    +              http.fetch("http://friendfeed.com/", self._on_download)
    +
    +           def _on_download(self, response):
    +              self.write("Downloaded!")
    +              self.finish()
    +
    +    .. testoutput::
    +       :hide:
    +
    +    .. versionchanged:: 3.1
    +       The ability to use ``@gen.coroutine`` without ``@asynchronous``.
    +
    +    .. versionchanged:: 4.3 Returning anything but ``None`` or a
    +       yieldable object from a method decorated with ``@asynchronous``
    +       is an error. Such return values were previously ignored silently.
    +    """
    +    # Delay the IOLoop import because it's not available on app engine.
    +    from tornado.ioloop import IOLoop
    +
    +    @functools.wraps(method)
    +    def wrapper(self, *args, **kwargs):
    +        self._auto_finish = False
    +        with stack_context.ExceptionStackContext(
    +                self._stack_context_handle_exception):
    +            result = method(self, *args, **kwargs)
    +            if result is not None:
    +                result = gen.convert_yielded(result)
    +                # If @asynchronous is used with @gen.coroutine, (but
    +                # not @gen.engine), we can automatically finish the
    +                # request when the future resolves.  Additionally,
    +                # the Future will swallow any exceptions so we need
    +                # to throw them back out to the stack context to finish
    +                # the request.
    +                def future_complete(f):
    +                    f.result()
    +                    if not self._finished:
    +                        self.finish()
    +                IOLoop.current().add_future(result, future_complete)
    +                # Once we have done this, hide the Future from our
    +                # caller (i.e. RequestHandler._when_complete), which
    +                # would otherwise set up its own callback and
    +                # exception handler (resulting in exceptions being
    +                # logged twice).
    +                return None
    +            return result
    +    return wrapper
    +
    +
    +def stream_request_body(cls):
    +    """Apply to `RequestHandler` subclasses to enable streaming body support.
    +
    +    This decorator implies the following changes:
    +
    +    * `.HTTPServerRequest.body` is undefined, and body arguments will not
    +      be included in `RequestHandler.get_argument`.
    +    * `RequestHandler.prepare` is called when the request headers have been
    +      read instead of after the entire body has been read.
    +    * The subclass must define a method ``data_received(self, data):``, which
    +      will be called zero or more times as data is available.  Note that
    +      if the request has an empty body, ``data_received`` may not be called.
    +    * ``prepare`` and ``data_received`` may return Futures (such as via
    +      ``@gen.coroutine``, in which case the next method will not be called
    +      until those futures have completed.
    +    * The regular HTTP method (``post``, ``put``, etc) will be called after
    +      the entire body has been read.
    +
    +    There is a subtle interaction between ``data_received`` and asynchronous
    +    ``prepare``: The first call to ``data_received`` may occur at any point
    +    after the call to ``prepare`` has returned *or yielded*.
    +    """
    +    if not issubclass(cls, RequestHandler):
    +        raise TypeError("expected subclass of RequestHandler, got %r", cls)
    +    cls._stream_request_body = True
    +    return cls
    +
    +
    +def _has_stream_request_body(cls):
    +    if not issubclass(cls, RequestHandler):
    +        raise TypeError("expected subclass of RequestHandler, got %r", cls)
    +    return getattr(cls, '_stream_request_body', False)
    +
    +
    +def removeslash(method):
    +    """Use this decorator to remove trailing slashes from the request path.
    +
    +    For example, a request to ``/foo/`` would redirect to ``/foo`` with this
    +    decorator. Your request handler mapping should use a regular expression
    +    like ``r'/foo/*'`` in conjunction with using the decorator.
    +    """
    +    @functools.wraps(method)
    +    def wrapper(self, *args, **kwargs):
    +        if self.request.path.endswith("/"):
    +            if self.request.method in ("GET", "HEAD"):
    +                uri = self.request.path.rstrip("/")
    +                if uri:  # don't try to redirect '/' to ''
    +                    if self.request.query:
    +                        uri += "?" + self.request.query
    +                    self.redirect(uri, permanent=True)
    +                    return
    +            else:
    +                raise HTTPError(404)
    +        return method(self, *args, **kwargs)
    +    return wrapper
    +
    +
    +def addslash(method):
    +    """Use this decorator to add a missing trailing slash to the request path.
    +
    +    For example, a request to ``/foo`` would redirect to ``/foo/`` with this
    +    decorator. Your request handler mapping should use a regular expression
    +    like ``r'/foo/?'`` in conjunction with using the decorator.
    +    """
    +    @functools.wraps(method)
    +    def wrapper(self, *args, **kwargs):
    +        if not self.request.path.endswith("/"):
    +            if self.request.method in ("GET", "HEAD"):
    +                uri = self.request.path + "/"
    +                if self.request.query:
    +                    uri += "?" + self.request.query
    +                self.redirect(uri, permanent=True)
    +                return
    +            raise HTTPError(404)
    +        return method(self, *args, **kwargs)
    +    return wrapper
    +
    +
    +class Application(httputil.HTTPServerConnectionDelegate):
    +    """A collection of request handlers that make up a web application.
    +
    +    Instances of this class are callable and can be passed directly to
    +    HTTPServer to serve the application::
    +
    +        application = web.Application([
    +            (r"/", MainPageHandler),
    +        ])
    +        http_server = httpserver.HTTPServer(application)
    +        http_server.listen(8080)
    +        ioloop.IOLoop.current().start()
    +
    +    The constructor for this class takes in a list of `URLSpec` objects
    +    or (regexp, request_class) tuples. When we receive requests, we
    +    iterate over the list in order and instantiate an instance of the
    +    first request class whose regexp matches the request path.
    +    The request class can be specified as either a class object or a
    +    (fully-qualified) name.
    +
    +    Each tuple can contain additional elements, which correspond to the
    +    arguments to the `URLSpec` constructor.  (Prior to Tornado 3.2,
    +    only tuples of two or three elements were allowed).
    +
    +    A dictionary may be passed as the third element of the tuple,
    +    which will be used as keyword arguments to the handler's
    +    constructor and `~RequestHandler.initialize` method.  This pattern
    +    is used for the `StaticFileHandler` in this example (note that a
    +    `StaticFileHandler` can be installed automatically with the
    +    static_path setting described below)::
    +
    +        application = web.Application([
    +            (r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
    +        ])
    +
    +    We support virtual hosts with the `add_handlers` method, which takes in
    +    a host regular expression as the first argument::
    +
    +        application.add_handlers(r"www\.myhost\.com", [
    +            (r"/article/([0-9]+)", ArticleHandler),
    +        ])
    +
    +    You can serve static files by sending the ``static_path`` setting
    +    as a keyword argument. We will serve those files from the
    +    ``/static/`` URI (this is configurable with the
    +    ``static_url_prefix`` setting), and we will serve ``/favicon.ico``
    +    and ``/robots.txt`` from the same directory.  A custom subclass of
    +    `StaticFileHandler` can be specified with the
    +    ``static_handler_class`` setting.
    +
    +    """
    +    def __init__(self, handlers=None, default_host="", transforms=None,
    +                 **settings):
    +        if transforms is None:
    +            self.transforms = []
    +            if settings.get("compress_response") or settings.get("gzip"):
    +                self.transforms.append(GZipContentEncoding)
    +        else:
    +            self.transforms = transforms
    +        self.handlers = []
    +        self.named_handlers = {}
    +        self.default_host = default_host
    +        self.settings = settings
    +        self.ui_modules = {'linkify': _linkify,
    +                           'xsrf_form_html': _xsrf_form_html,
    +                           'Template': TemplateModule,
    +                           }
    +        self.ui_methods = {}
    +        self._load_ui_modules(settings.get("ui_modules", {}))
    +        self._load_ui_methods(settings.get("ui_methods", {}))
    +        if self.settings.get("static_path"):
    +            path = self.settings["static_path"]
    +            handlers = list(handlers or [])
    +            static_url_prefix = settings.get("static_url_prefix",
    +                                             "/static/")
    +            static_handler_class = settings.get("static_handler_class",
    +                                                StaticFileHandler)
    +            static_handler_args = settings.get("static_handler_args", {})
    +            static_handler_args['path'] = path
    +            for pattern in [re.escape(static_url_prefix) + r"(.*)",
    +                            r"/(favicon\.ico)", r"/(robots\.txt)"]:
    +                handlers.insert(0, (pattern, static_handler_class,
    +                                    static_handler_args))
    +        if handlers:
    +            self.add_handlers(".*$", handlers)
    +
    +        if self.settings.get('debug'):
    +            self.settings.setdefault('autoreload', True)
    +            self.settings.setdefault('compiled_template_cache', False)
    +            self.settings.setdefault('static_hash_cache', False)
    +            self.settings.setdefault('serve_traceback', True)
    +
    +        # Automatically reload modified modules
    +        if self.settings.get('autoreload'):
    +            from tornado import autoreload
    +            autoreload.start()
    +
    +    def listen(self, port, address="", **kwargs):
    +        """Starts an HTTP server for this application on the given port.
    +
    +        This is a convenience alias for creating an `.HTTPServer`
    +        object and calling its listen method.  Keyword arguments not
    +        supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
    +        `.HTTPServer` constructor.  For advanced uses
    +        (e.g. multi-process mode), do not use this method; create an
    +        `.HTTPServer` and call its
    +        `.TCPServer.bind`/`.TCPServer.start` methods directly.
    +
    +        Note that after calling this method you still need to call
    +        ``IOLoop.current().start()`` to start the server.
    +
    +        Returns the `.HTTPServer` object.
    +
    +        .. versionchanged:: 4.3
    +           Now returns the `.HTTPServer` object.
    +        """
    +        # import is here rather than top level because HTTPServer
    +        # is not importable on appengine
    +        from tornado.httpserver import HTTPServer
    +        server = HTTPServer(self, **kwargs)
    +        server.listen(port, address)
    +        return server
    +
    +    def add_handlers(self, host_pattern, host_handlers):
    +        """Appends the given handlers to our handler list.
    +
    +        Host patterns are processed sequentially in the order they were
    +        added. All matching patterns will be considered.
    +        """
    +        if not host_pattern.endswith("$"):
    +            host_pattern += "$"
    +        handlers = []
    +        # The handlers with the wildcard host_pattern are a special
    +        # case - they're added in the constructor but should have lower
    +        # precedence than the more-precise handlers added later.
    +        # If a wildcard handler group exists, it should always be last
    +        # in the list, so insert new groups just before it.
    +        if self.handlers and self.handlers[-1][0].pattern == '.*$':
    +            self.handlers.insert(-1, (re.compile(host_pattern), handlers))
    +        else:
    +            self.handlers.append((re.compile(host_pattern), handlers))
    +
    +        for spec in host_handlers:
    +            if isinstance(spec, (tuple, list)):
    +                assert len(spec) in (2, 3, 4)
    +                spec = URLSpec(*spec)
    +            handlers.append(spec)
    +            if spec.name:
    +                if spec.name in self.named_handlers:
    +                    app_log.warning(
    +                        "Multiple handlers named %s; replacing previous value",
    +                        spec.name)
    +                self.named_handlers[spec.name] = spec
    +
    +    def add_transform(self, transform_class):
    +        self.transforms.append(transform_class)
    +
    +    def _get_host_handlers(self, request):
    +        host = split_host_and_port(request.host.lower())[0]
    +        matches = []
    +        for pattern, handlers in self.handlers:
    +            if pattern.match(host):
    +                matches.extend(handlers)
    +        # Look for default host if not behind load balancer (for debugging)
    +        if not matches and "X-Real-Ip" not in request.headers:
    +            for pattern, handlers in self.handlers:
    +                if pattern.match(self.default_host):
    +                    matches.extend(handlers)
    +        return matches or None
    +
    +    def _load_ui_methods(self, methods):
    +        if isinstance(methods, types.ModuleType):
    +            self._load_ui_methods(dict((n, getattr(methods, n))
    +                                       for n in dir(methods)))
    +        elif isinstance(methods, list):
    +            for m in methods:
    +                self._load_ui_methods(m)
    +        else:
    +            for name, fn in methods.items():
    +                if not name.startswith("_") and hasattr(fn, "__call__") \
    +                        and name[0].lower() == name[0]:
    +                    self.ui_methods[name] = fn
    +
    +    def _load_ui_modules(self, modules):
    +        if isinstance(modules, types.ModuleType):
    +            self._load_ui_modules(dict((n, getattr(modules, n))
    +                                       for n in dir(modules)))
    +        elif isinstance(modules, list):
    +            for m in modules:
    +                self._load_ui_modules(m)
    +        else:
    +            assert isinstance(modules, dict)
    +            for name, cls in modules.items():
    +                try:
    +                    if issubclass(cls, UIModule):
    +                        self.ui_modules[name] = cls
    +                except TypeError:
    +                    pass
    +
    +    def start_request(self, server_conn, request_conn):
    +        # Modern HTTPServer interface
    +        return _RequestDispatcher(self, request_conn)
    +
    +    def __call__(self, request):
    +        # Legacy HTTPServer interface
    +        dispatcher = _RequestDispatcher(self, None)
    +        dispatcher.set_request(request)
    +        return dispatcher.execute()
    +
    +    def reverse_url(self, name, *args):
    +        """Returns a URL path for handler named ``name``
    +
    +        The handler must be added to the application as a named `URLSpec`.
    +
    +        Args will be substituted for capturing groups in the `URLSpec` regex.
    +        They will be converted to strings if necessary, encoded as utf8,
    +        and url-escaped.
    +        """
    +        if name in self.named_handlers:
    +            return self.named_handlers[name].reverse(*args)
    +        raise KeyError("%s not found in named urls" % name)
    +
    +    def log_request(self, handler):
    +        """Writes a completed HTTP request to the logs.
    +
    +        By default writes to the python root logger.  To change
    +        this behavior either subclass Application and override this method,
    +        or pass a function in the application settings dictionary as
    +        ``log_function``.
    +        """
    +        if "log_function" in self.settings:
    +            self.settings["log_function"](handler)
    +            return
    +        if handler.get_status() < 400:
    +            log_method = access_log.info
    +        elif handler.get_status() < 500:
    +            log_method = access_log.warning
    +        else:
    +            log_method = access_log.error
    +        request_time = 1000.0 * handler.request.request_time()
    +        log_method("%d %s %.2fms", handler.get_status(),
    +                   handler._request_summary(), request_time)
    +
    +
    +class _RequestDispatcher(httputil.HTTPMessageDelegate):
    +    def __init__(self, application, connection):
    +        self.application = application
    +        self.connection = connection
    +        self.request = None
    +        self.chunks = []
    +        self.handler_class = None
    +        self.handler_kwargs = None
    +        self.path_args = []
    +        self.path_kwargs = {}
    +
    +    def headers_received(self, start_line, headers):
    +        self.set_request(httputil.HTTPServerRequest(
    +            connection=self.connection, start_line=start_line,
    +            headers=headers))
    +        if self.stream_request_body:
    +            self.request.body = Future()
    +            return self.execute()
    +
    +    def set_request(self, request):
    +        self.request = request
    +        self._find_handler()
    +        self.stream_request_body = _has_stream_request_body(self.handler_class)
    +
    +    def _find_handler(self):
    +        # Identify the handler to use as soon as we have the request.
    +        # Save url path arguments for later.
    +        app = self.application
    +        handlers = app._get_host_handlers(self.request)
    +        if not handlers:
    +            self.handler_class = RedirectHandler
    +            self.handler_kwargs = dict(url="%s://%s/"
    +                                       % (self.request.protocol,
    +                                          app.default_host))
    +            return
    +        for spec in handlers:
    +            match = spec.regex.match(self.request.path)
    +            if match:
    +                self.handler_class = spec.handler_class
    +                self.handler_kwargs = spec.kwargs
    +                if spec.regex.groups:
    +                    # Pass matched groups to the handler.  Since
    +                    # match.groups() includes both named and
    +                    # unnamed groups, we want to use either groups
    +                    # or groupdict but not both.
    +                    if spec.regex.groupindex:
    +                        self.path_kwargs = dict(
    +                            (str(k), _unquote_or_none(v))
    +                            for (k, v) in match.groupdict().items())
    +                    else:
    +                        self.path_args = [_unquote_or_none(s)
    +                                          for s in match.groups()]
    +                return
    +        if app.settings.get('default_handler_class'):
    +            self.handler_class = app.settings['default_handler_class']
    +            self.handler_kwargs = app.settings.get(
    +                'default_handler_args', {})
    +        else:
    +            self.handler_class = ErrorHandler
    +            self.handler_kwargs = dict(status_code=404)
    +
    +    def data_received(self, data):
    +        if self.stream_request_body:
    +            return self.handler.data_received(data)
    +        else:
    +            self.chunks.append(data)
    +
    +    def finish(self):
    +        if self.stream_request_body:
    +            self.request.body.set_result(None)
    +        else:
    +            self.request.body = b''.join(self.chunks)
    +            self.request._parse_body()
    +            self.execute()
    +
    +    def on_connection_close(self):
    +        if self.stream_request_body:
    +            self.handler.on_connection_close()
    +        else:
    +            self.chunks = None
    +
    +    def execute(self):
    +        # If template cache is disabled (usually in the debug mode),
    +        # re-compile templates and reload static files on every
    +        # request so you don't need to restart to see changes
    +        if not self.application.settings.get("compiled_template_cache", True):
    +            with RequestHandler._template_loader_lock:
    +                for loader in RequestHandler._template_loaders.values():
    +                    loader.reset()
    +        if not self.application.settings.get('static_hash_cache', True):
    +            StaticFileHandler.reset()
    +
    +        self.handler = self.handler_class(self.application, self.request,
    +                                          **self.handler_kwargs)
    +        transforms = [t(self.request) for t in self.application.transforms]
    +
    +        if self.stream_request_body:
    +            self.handler._prepared_future = Future()
    +        # Note that if an exception escapes handler._execute it will be
    +        # trapped in the Future it returns (which we are ignoring here,
    +        # leaving it to be logged when the Future is GC'd).
    +        # However, that shouldn't happen because _execute has a blanket
    +        # except handler, and we cannot easily access the IOLoop here to
    +        # call add_future (because of the requirement to remain compatible
    +        # with WSGI)
    +        self.handler._execute(transforms, *self.path_args,
    +                              **self.path_kwargs)
    +        # If we are streaming the request body, then execute() is finished
    +        # when the handler has prepared to receive the body.  If not,
    +        # it doesn't matter when execute() finishes (so we return None)
    +        return self.handler._prepared_future
    +
    +
    +class HTTPError(Exception):
    +    """An exception that will turn into an HTTP error response.
    +
    +    Raising an `HTTPError` is a convenient alternative to calling
    +    `RequestHandler.send_error` since it automatically ends the
    +    current function.
    +
    +    To customize the response sent with an `HTTPError`, override
    +    `RequestHandler.write_error`.
    +
    +    :arg int status_code: HTTP status code.  Must be listed in
    +        `httplib.responses ` unless the ``reason``
    +        keyword argument is given.
    +    :arg string log_message: Message to be written to the log for this error
    +        (will not be shown to the user unless the `Application` is in debug
    +        mode).  May contain ``%s``-style placeholders, which will be filled
    +        in with remaining positional parameters.
    +    :arg string reason: Keyword-only argument.  The HTTP "reason" phrase
    +        to pass in the status line along with ``status_code``.  Normally
    +        determined automatically from ``status_code``, but can be used
    +        to use a non-standard numeric code.
    +    """
    +    def __init__(self, status_code=500, log_message=None, *args, **kwargs):
    +        self.status_code = status_code
    +        self.log_message = log_message
    +        self.args = args
    +        self.reason = kwargs.get('reason', None)
    +        if log_message and not args:
    +            self.log_message = log_message.replace('%', '%%')
    +
    +    def __str__(self):
    +        message = "HTTP %d: %s" % (
    +            self.status_code,
    +            self.reason or httputil.responses.get(self.status_code, 'Unknown'))
    +        if self.log_message:
    +            return message + " (" + (self.log_message % self.args) + ")"
    +        else:
    +            return message
    +
    +
    +class Finish(Exception):
    +    """An exception that ends the request without producing an error response.
    +
    +    When `Finish` is raised in a `RequestHandler`, the request will
    +    end (calling `RequestHandler.finish` if it hasn't already been
    +    called), but the error-handling methods (including
    +    `RequestHandler.write_error`) will not be called.
    +
    +    If `Finish()` was created with no arguments, the pending response
    +    will be sent as-is. If `Finish()` was given an argument, that
    +    argument will be passed to `RequestHandler.finish()`.
    +
    +    This can be a more convenient way to implement custom error pages
    +    than overriding ``write_error`` (especially in library code)::
    +
    +        if self.current_user is None:
    +            self.set_status(401)
    +            self.set_header('WWW-Authenticate', 'Basic realm="something"')
    +            raise Finish()
    +
    +    .. versionchanged:: 4.3
    +       Arguments passed to ``Finish()`` will be passed on to
    +       `RequestHandler.finish`.
    +    """
    +    pass
    +
    +
    +class MissingArgumentError(HTTPError):
    +    """Exception raised by `RequestHandler.get_argument`.
    +
    +    This is a subclass of `HTTPError`, so if it is uncaught a 400 response
    +    code will be used instead of 500 (and a stack trace will not be logged).
    +
    +    .. versionadded:: 3.1
    +    """
    +    def __init__(self, arg_name):
    +        super(MissingArgumentError, self).__init__(
    +            400, 'Missing argument %s' % arg_name)
    +        self.arg_name = arg_name
    +
    +
    +class ErrorHandler(RequestHandler):
    +    """Generates an error response with ``status_code`` for all requests."""
    +    def initialize(self, status_code):
    +        self.set_status(status_code)
    +
    +    def prepare(self):
    +        raise HTTPError(self._status_code)
    +
    +    def check_xsrf_cookie(self):
    +        # POSTs to an ErrorHandler don't actually have side effects,
    +        # so we don't need to check the xsrf token.  This allows POSTs
    +        # to the wrong url to return a 404 instead of 403.
    +        pass
    +
    +
    +class RedirectHandler(RequestHandler):
    +    """Redirects the client to the given URL for all GET requests.
    +
    +    You should provide the keyword argument ``url`` to the handler, e.g.::
    +
    +        application = web.Application([
    +            (r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
    +        ])
    +    """
    +    def initialize(self, url, permanent=True):
    +        self._url = url
    +        self._permanent = permanent
    +
    +    def get(self):
    +        self.redirect(self._url, permanent=self._permanent)
    +
    +
    +class StaticFileHandler(RequestHandler):
    +    """A simple handler that can serve static content from a directory.
    +
    +    A `StaticFileHandler` is configured automatically if you pass the
    +    ``static_path`` keyword argument to `Application`.  This handler
    +    can be customized with the ``static_url_prefix``, ``static_handler_class``,
    +    and ``static_handler_args`` settings.
    +
    +    To map an additional path to this handler for a static data directory
    +    you would add a line to your application like::
    +
    +        application = web.Application([
    +            (r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
    +        ])
    +
    +    The handler constructor requires a ``path`` argument, which specifies the
    +    local root directory of the content to be served.
    +
    +    Note that a capture group in the regex is required to parse the value for
    +    the ``path`` argument to the get() method (different than the constructor
    +    argument above); see `URLSpec` for details.
    +
    +    To serve a file like ``admin_index.mako`` automatically when a directory is
    +    requested, set ``static_handler_args=dict(default_filename="admin_index.mako")``
    +    in your application settings, or add ``default_filename`` as an initializer
    +    argument for your ``StaticFileHandler``.
    +
    +    To maximize the effectiveness of browser caching, this class supports
    +    versioned urls (by default using the argument ``?v=``).  If a version
    +    is given, we instruct the browser to cache this file indefinitely.
    +    `make_static_url` (also available as `RequestHandler.static_url`) can
    +    be used to construct a versioned url.
    +
    +    This handler is intended primarily for use in development and light-duty
    +    file serving; for heavy traffic it will be more efficient to use
    +    a dedicated static file server (such as nginx or Apache).  We support
    +    the HTTP ``Accept-Ranges`` mechanism to return partial content (because
    +    some browsers require this functionality to be present to seek in
    +    HTML5 audio or video).
    +
    +    **Subclassing notes**
    +
    +    This class is designed to be extensible by subclassing, but because
    +    of the way static urls are generated with class methods rather than
    +    instance methods, the inheritance patterns are somewhat unusual.
    +    Be sure to use the ``@classmethod`` decorator when overriding a
    +    class method.  Instance methods may use the attributes ``self.path``
    +    ``self.absolute_path``, and ``self.modified``.
    +
    +    Subclasses should only override methods discussed in this section;
    +    overriding other methods is error-prone.  Overriding
    +    ``StaticFileHandler.get`` is particularly problematic due to the
    +    tight coupling with ``compute_etag`` and other methods.
    +
    +    To change the way static urls are generated (e.g. to match the behavior
    +    of another server or CDN), override `make_static_url`, `parse_url_path`,
    +    `get_cache_time`, and/or `get_version`.
    +
    +    To replace all interaction with the filesystem (e.g. to serve
    +    static content from a database), override `get_content`,
    +    `get_content_size`, `get_modified_time`, `get_absolute_path`, and
    +    `validate_absolute_path`.
    +
    +    .. versionchanged:: 3.1
    +       Many of the methods for subclasses were added in Tornado 3.1.
    +    """
    +    CACHE_MAX_AGE = 86400 * 365 * 10  # 10 years
    +
    +    _static_hashes = {}
    +    _lock = threading.Lock()  # protects _static_hashes
    +
    +    def initialize(self, path, default_filename=None):
    +        self.root = path
    +        self.default_filename = default_filename
    +
    +    @classmethod
    +    def reset(cls):
    +        with cls._lock:
    +            cls._static_hashes = {}
    +
    +    def head(self, path):
    +        return self.get(path, include_body=False)
    +
    +    @gen.coroutine
    +    def get(self, path, include_body=True):
    +        # Set up our path instance variables.
    +        self.path = self.parse_url_path(path)
    +        del path  # make sure we don't refer to path instead of self.path again
    +        absolute_path = self.get_absolute_path(self.root, self.path)
    +        self.absolute_path = self.validate_absolute_path(
    +            self.root, absolute_path)
    +        if self.absolute_path is None:
    +            return
    +
    +        self.modified = self.get_modified_time()
    +        self.set_headers()
    +
    +        if self.should_return_304():
    +            self.set_status(304)
    +            return
    +
    +        request_range = None
    +        range_header = self.request.headers.get("Range")
    +        if range_header:
    +            # As per RFC 2616 14.16, if an invalid Range header is specified,
    +            # the request will be treated as if the header didn't exist.
    +            request_range = httputil._parse_request_range(range_header)
    +
    +        size = self.get_content_size()
    +        if request_range:
    +            start, end = request_range
    +            if (start is not None and start >= size) or end == 0:
    +                # As per RFC 2616 14.35.1, a range is not satisfiable only: if
    +                # the first requested byte is equal to or greater than the
    +                # content, or when a suffix with length 0 is specified
    +                self.set_status(416)  # Range Not Satisfiable
    +                self.set_header("Content-Type", "text/plain")
    +                self.set_header("Content-Range", "bytes */%s" % (size, ))
    +                return
    +            if start is not None and start < 0:
    +                start += size
    +            if end is not None and end > size:
    +                # Clients sometimes blindly use a large range to limit their
    +                # download size; cap the endpoint at the actual file size.
    +                end = size
    +            # Note: only return HTTP 206 if less than the entire range has been
    +            # requested. Not only is this semantically correct, but Chrome
    +            # refuses to play audio if it gets an HTTP 206 in response to
    +            # ``Range: bytes=0-``.
    +            if size != (end or size) - (start or 0):
    +                self.set_status(206)  # Partial Content
    +                self.set_header("Content-Range",
    +                                httputil._get_content_range(start, end, size))
    +        else:
    +            start = end = None
    +
    +        if start is not None and end is not None:
    +            content_length = end - start
    +        elif end is not None:
    +            content_length = end
    +        elif start is not None:
    +            content_length = size - start
    +        else:
    +            content_length = size
    +        self.set_header("Content-Length", content_length)
    +
    +        if include_body:
    +            content = self.get_content(self.absolute_path, start, end)
    +            if isinstance(content, bytes):
    +                content = [content]
    +            for chunk in content:
    +                try:
    +                    self.write(chunk)
    +                    yield self.flush()
    +                except iostream.StreamClosedError:
    +                    return
    +        else:
    +            assert self.request.method == "HEAD"
    +
    +    def compute_etag(self):
    +        """Sets the ``Etag`` header based on static url version.
    +
    +        This allows efficient ``If-None-Match`` checks against cached
    +        versions, and sends the correct ``Etag`` for a partial response
    +        (i.e. the same ``Etag`` as the full file).
    +
    +        .. versionadded:: 3.1
    +        """
    +        version_hash = self._get_cached_version(self.absolute_path)
    +        if not version_hash:
    +            return None
    +        return '"%s"' % (version_hash, )
    +
    +    def set_headers(self):
    +        """Sets the content and caching headers on the response.
    +
    +        .. versionadded:: 3.1
    +        """
    +        self.set_header("Accept-Ranges", "bytes")
    +        self.set_etag_header()
    +
    +        if self.modified is not None:
    +            self.set_header("Last-Modified", self.modified)
    +
    +        content_type = self.get_content_type()
    +        if content_type:
    +            self.set_header("Content-Type", content_type)
    +
    +        cache_time = self.get_cache_time(self.path, self.modified,
    +                                         content_type)
    +        if cache_time > 0:
    +            self.set_header("Expires", datetime.datetime.utcnow() +
    +                            datetime.timedelta(seconds=cache_time))
    +            self.set_header("Cache-Control", "max-age=" + str(cache_time))
    +
    +        self.set_extra_headers(self.path)
    +
    +    def should_return_304(self):
    +        """Returns True if the headers indicate that we should return 304.
    +
    +        .. versionadded:: 3.1
    +        """
    +        if self.check_etag_header():
    +            return True
    +
    +        # Check the If-Modified-Since, and don't send the result if the
    +        # content has not been modified
    +        ims_value = self.request.headers.get("If-Modified-Since")
    +        if ims_value is not None:
    +            date_tuple = email.utils.parsedate(ims_value)
    +            if date_tuple is not None:
    +                if_since = datetime.datetime(*date_tuple[:6])
    +                if if_since >= self.modified:
    +                    return True
    +
    +        return False
    +
    +    @classmethod
    +    def get_absolute_path(cls, root, path):
    +        """Returns the absolute location of ``path`` relative to ``root``.
    +
    +        ``root`` is the path configured for this `StaticFileHandler`
    +        (in most cases the ``static_path`` `Application` setting).
    +
    +        This class method may be overridden in subclasses.  By default
    +        it returns a filesystem path, but other strings may be used
    +        as long as they are unique and understood by the subclass's
    +        overridden `get_content`.
    +
    +        .. versionadded:: 3.1
    +        """
    +        abspath = os.path.abspath(os.path.join(root, path))
    +        return abspath
    +
    +    def validate_absolute_path(self, root, absolute_path):
    +        """Validate and return the absolute path.
    +
    +        ``root`` is the configured path for the `StaticFileHandler`,
    +        and ``path`` is the result of `get_absolute_path`
    +
    +        This is an instance method called during request processing,
    +        so it may raise `HTTPError` or use methods like
    +        `RequestHandler.redirect` (return None after redirecting to
    +        halt further processing).  This is where 404 errors for missing files
    +        are generated.
    +
    +        This method may modify the path before returning it, but note that
    +        any such modifications will not be understood by `make_static_url`.
    +
    +        In instance methods, this method's result is available as
    +        ``self.absolute_path``.
    +
    +        .. versionadded:: 3.1
    +        """
    +        # os.path.abspath strips a trailing /.
    +        # We must add it back to `root` so that we only match files
    +        # in a directory named `root` instead of files starting with
    +        # that prefix.
    +        root = os.path.abspath(root)
    +        if not root.endswith(os.path.sep):
    +            # abspath always removes a trailing slash, except when
    +            # root is '/'. This is an unusual case, but several projects
    +            # have independently discovered this technique to disable
    +            # Tornado's path validation and (hopefully) do their own,
    +            # so we need to support it.
    +            root += os.path.sep
    +        # The trailing slash also needs to be temporarily added back
    +        # the requested path so a request to root/ will match.
    +        if not (absolute_path + os.path.sep).startswith(root):
    +            raise HTTPError(403, "%s is not in root static directory",
    +                            self.path)
    +        if (os.path.isdir(absolute_path) and
    +                self.default_filename is not None):
    +            # need to look at the request.path here for when path is empty
    +            # but there is some prefix to the path that was already
    +            # trimmed by the routing
    +            if not self.request.path.endswith("/"):
    +                self.redirect(self.request.path + "/", permanent=True)
    +                return
    +            absolute_path = os.path.join(absolute_path, self.default_filename)
    +        if not os.path.exists(absolute_path):
    +            raise HTTPError(404)
    +        if not os.path.isfile(absolute_path):
    +            raise HTTPError(403, "%s is not a file", self.path)
    +        return absolute_path
    +
    +    @classmethod
    +    def get_content(cls, abspath, start=None, end=None):
    +        """Retrieve the content of the requested resource which is located
    +        at the given absolute path.
    +
    +        This class method may be overridden by subclasses.  Note that its
    +        signature is different from other overridable class methods
    +        (no ``settings`` argument); this is deliberate to ensure that
    +        ``abspath`` is able to stand on its own as a cache key.
    +
    +        This method should either return a byte string or an iterator
    +        of byte strings.  The latter is preferred for large files
    +        as it helps reduce memory fragmentation.
    +
    +        .. versionadded:: 3.1
    +        """
    +        with open(abspath, "rb") as file:
    +            if start is not None:
    +                file.seek(start)
    +            if end is not None:
    +                remaining = end - (start or 0)
    +            else:
    +                remaining = None
    +            while True:
    +                chunk_size = 64 * 1024
    +                if remaining is not None and remaining < chunk_size:
    +                    chunk_size = remaining
    +                chunk = file.read(chunk_size)
    +                if chunk:
    +                    if remaining is not None:
    +                        remaining -= len(chunk)
    +                    yield chunk
    +                else:
    +                    if remaining is not None:
    +                        assert remaining == 0
    +                    return
    +
    +    @classmethod
    +    def get_content_version(cls, abspath):
    +        """Returns a version string for the resource at the given path.
    +
    +        This class method may be overridden by subclasses.  The
    +        default implementation is a hash of the file's contents.
    +
    +        .. versionadded:: 3.1
    +        """
    +        data = cls.get_content(abspath)
    +        hasher = hashlib.md5()
    +        if isinstance(data, bytes):
    +            hasher.update(data)
    +        else:
    +            for chunk in data:
    +                hasher.update(chunk)
    +        return hasher.hexdigest()
    +
    +    def _stat(self):
    +        if not hasattr(self, '_stat_result'):
    +            self._stat_result = os.stat(self.absolute_path)
    +        return self._stat_result
    +
    +    def get_content_size(self):
    +        """Retrieve the total size of the resource at the given path.
    +
    +        This method may be overridden by subclasses.
    +
    +        .. versionadded:: 3.1
    +
    +        .. versionchanged:: 4.0
    +           This method is now always called, instead of only when
    +           partial results are requested.
    +        """
    +        stat_result = self._stat()
    +        return stat_result[stat.ST_SIZE]
    +
    +    def get_modified_time(self):
    +        """Returns the time that ``self.absolute_path`` was last modified.
    +
    +        May be overridden in subclasses.  Should return a `~datetime.datetime`
    +        object or None.
    +
    +        .. versionadded:: 3.1
    +        """
    +        stat_result = self._stat()
    +        modified = datetime.datetime.utcfromtimestamp(
    +            stat_result[stat.ST_MTIME])
    +        return modified
    +
    +    def get_content_type(self):
    +        """Returns the ``Content-Type`` header to be used for this request.
    +
    +        .. versionadded:: 3.1
    +        """
    +        mime_type, encoding = mimetypes.guess_type(self.absolute_path)
    +        # per RFC 6713, use the appropriate type for a gzip compressed file
    +        if encoding == "gzip":
    +            return "application/gzip"
    +        # As of 2015-07-21 there is no bzip2 encoding defined at
    +        # http://www.iana.org/assignments/media-types/media-types.xhtml
    +        # So for that (and any other encoding), use octet-stream.
    +        elif encoding is not None:
    +            return "application/octet-stream"
    +        elif mime_type is not None:
    +            return mime_type
    +        # if mime_type not detected, use application/octet-stream
    +        else:
    +            return "application/octet-stream"
    +
    +    def set_extra_headers(self, path):
    +        """For subclass to add extra headers to the response"""
    +        pass
    +
    +    def get_cache_time(self, path, modified, mime_type):
    +        """Override to customize cache control behavior.
    +
    +        Return a positive number of seconds to make the result
    +        cacheable for that amount of time or 0 to mark resource as
    +        cacheable for an unspecified amount of time (subject to
    +        browser heuristics).
    +
    +        By default returns cache expiry of 10 years for resources requested
    +        with ``v`` argument.
    +        """
    +        return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
    +
    +    @classmethod
    +    def make_static_url(cls, settings, path, include_version=True):
    +        """Constructs a versioned url for the given path.
    +
    +        This method may be overridden in subclasses (but note that it
    +        is a class method rather than an instance method).  Subclasses
    +        are only required to implement the signature
    +        ``make_static_url(cls, settings, path)``; other keyword
    +        arguments may be passed through `~RequestHandler.static_url`
    +        but are not standard.
    +
    +        ``settings`` is the `Application.settings` dictionary.  ``path``
    +        is the static path being requested.  The url returned should be
    +        relative to the current host.
    +
    +        ``include_version`` determines whether the generated URL should
    +        include the query string containing the version hash of the
    +        file corresponding to the given ``path``.
    +
    +        """
    +        url = settings.get('static_url_prefix', '/static/') + path
    +        if not include_version:
    +            return url
    +
    +        version_hash = cls.get_version(settings, path)
    +        if not version_hash:
    +            return url
    +
    +        return '%s?v=%s' % (url, version_hash)
    +
    +    def parse_url_path(self, url_path):
    +        """Converts a static URL path into a filesystem path.
    +
    +        ``url_path`` is the path component of the URL with
    +        ``static_url_prefix`` removed.  The return value should be
    +        filesystem path relative to ``static_path``.
    +
    +        This is the inverse of `make_static_url`.
    +        """
    +        if os.path.sep != "/":
    +            url_path = url_path.replace("/", os.path.sep)
    +        return url_path
    +
    +    @classmethod
    +    def get_version(cls, settings, path):
    +        """Generate the version string to be used in static URLs.
    +
    +        ``settings`` is the `Application.settings` dictionary and ``path``
    +        is the relative location of the requested asset on the filesystem.
    +        The returned value should be a string, or ``None`` if no version
    +        could be determined.
    +
    +        .. versionchanged:: 3.1
    +           This method was previously recommended for subclasses to override;
    +           `get_content_version` is now preferred as it allows the base
    +           class to handle caching of the result.
    +        """
    +        abs_path = cls.get_absolute_path(settings['static_path'], path)
    +        return cls._get_cached_version(abs_path)
    +
    +    @classmethod
    +    def _get_cached_version(cls, abs_path):
    +        with cls._lock:
    +            hashes = cls._static_hashes
    +            if abs_path not in hashes:
    +                try:
    +                    hashes[abs_path] = cls.get_content_version(abs_path)
    +                except Exception:
    +                    gen_log.error("Could not open static file %r", abs_path)
    +                    hashes[abs_path] = None
    +            hsh = hashes.get(abs_path)
    +            if hsh:
    +                return hsh
    +        return None
    +
    +
    +class FallbackHandler(RequestHandler):
    +    """A `RequestHandler` that wraps another HTTP server callback.
    +
    +    The fallback is a callable object that accepts an
    +    `~.httputil.HTTPServerRequest`, such as an `Application` or
    +    `tornado.wsgi.WSGIContainer`.  This is most useful to use both
    +    Tornado ``RequestHandlers`` and WSGI in the same server.  Typical
    +    usage::
    +
    +        wsgi_app = tornado.wsgi.WSGIContainer(
    +            django.core.handlers.wsgi.WSGIHandler())
    +        application = tornado.web.Application([
    +            (r"/foo", FooHandler),
    +            (r".*", FallbackHandler, dict(fallback=wsgi_app),
    +        ])
    +    """
    +    def initialize(self, fallback):
    +        self.fallback = fallback
    +
    +    def prepare(self):
    +        self.fallback(self.request)
    +        self._finished = True
    +
    +
    +class OutputTransform(object):
    +    """A transform modifies the result of an HTTP request (e.g., GZip encoding)
    +
    +    Applications are not expected to create their own OutputTransforms
    +    or interact with them directly; the framework chooses which transforms
    +    (if any) to apply.
    +    """
    +    def __init__(self, request):
    +        pass
    +
    +    def transform_first_chunk(self, status_code, headers, chunk, finishing):
    +        return status_code, headers, chunk
    +
    +    def transform_chunk(self, chunk, finishing):
    +        return chunk
    +
    +
    +class GZipContentEncoding(OutputTransform):
    +    """Applies the gzip content encoding to the response.
    +
    +    See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
    +
    +    .. versionchanged:: 4.0
    +        Now compresses all mime types beginning with ``text/``, instead
    +        of just a whitelist. (the whitelist is still used for certain
    +        non-text mime types).
    +    """
    +    # Whitelist of compressible mime types (in addition to any types
    +    # beginning with "text/").
    +    CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
    +                         "application/xml", "application/atom+xml",
    +                         "application/json", "application/xhtml+xml"])
    +    # Python's GzipFile defaults to level 9, while most other gzip
    +    # tools (including gzip itself) default to 6, which is probably a
    +    # better CPU/size tradeoff.
    +    GZIP_LEVEL = 6
    +    # Responses that are too short are unlikely to benefit from gzipping
    +    # after considering the "Content-Encoding: gzip" header and the header
    +    # inside the gzip encoding.
    +    # Note that responses written in multiple chunks will be compressed
    +    # regardless of size.
    +    MIN_LENGTH = 1024
    +
    +    def __init__(self, request):
    +        self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
    +
    +    def _compressible_type(self, ctype):
    +        return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
    +
    +    def transform_first_chunk(self, status_code, headers, chunk, finishing):
    +        if 'Vary' in headers:
    +            headers['Vary'] += b', Accept-Encoding'
    +        else:
    +            headers['Vary'] = b'Accept-Encoding'
    +        if self._gzipping:
    +            ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
    +            self._gzipping = self._compressible_type(ctype) and \
    +                (not finishing or len(chunk) >= self.MIN_LENGTH) and \
    +                ("Content-Encoding" not in headers)
    +        if self._gzipping:
    +            headers["Content-Encoding"] = "gzip"
    +            self._gzip_value = BytesIO()
    +            self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value,
    +                                            compresslevel=self.GZIP_LEVEL)
    +            chunk = self.transform_chunk(chunk, finishing)
    +            if "Content-Length" in headers:
    +                # The original content length is no longer correct.
    +                # If this is the last (and only) chunk, we can set the new
    +                # content-length; otherwise we remove it and fall back to
    +                # chunked encoding.
    +                if finishing:
    +                    headers["Content-Length"] = str(len(chunk))
    +                else:
    +                    del headers["Content-Length"]
    +        return status_code, headers, chunk
    +
    +    def transform_chunk(self, chunk, finishing):
    +        if self._gzipping:
    +            self._gzip_file.write(chunk)
    +            if finishing:
    +                self._gzip_file.close()
    +            else:
    +                self._gzip_file.flush()
    +            chunk = self._gzip_value.getvalue()
    +            self._gzip_value.truncate(0)
    +            self._gzip_value.seek(0)
    +        return chunk
    +
    +
    +def authenticated(method):
    +    """Decorate methods with this to require that the user be logged in.
    +
    +    If the user is not logged in, they will be redirected to the configured
    +    `login url `.
    +
    +    If you configure a login url with a query parameter, Tornado will
    +    assume you know what you're doing and use it as-is.  If not, it
    +    will add a `next` parameter so the login page knows where to send
    +    you once you're logged in.
    +    """
    +    @functools.wraps(method)
    +    def wrapper(self, *args, **kwargs):
    +        if not self.current_user:
    +            if self.request.method in ("GET", "HEAD"):
    +                url = self.get_login_url()
    +                if "?" not in url:
    +                    if urlparse.urlsplit(url).scheme:
    +                        # if login url is absolute, make next absolute too
    +                        next_url = self.request.full_url()
    +                    else:
    +                        next_url = self.request.uri
    +                    url += "?" + urlencode(dict(next=next_url))
    +                self.redirect(url)
    +                return
    +            raise HTTPError(403)
    +        return method(self, *args, **kwargs)
    +    return wrapper
    +
    +
    +class UIModule(object):
    +    """A re-usable, modular UI unit on a page.
    +
    +    UI modules often execute additional queries, and they can include
    +    additional CSS and JavaScript that will be included in the output
    +    page, which is automatically inserted on page render.
    +
    +    Subclasses of UIModule must override the `render` method.
    +    """
    +    def __init__(self, handler):
    +        self.handler = handler
    +        self.request = handler.request
    +        self.ui = handler.ui
    +        self.locale = handler.locale
    +
    +    @property
    +    def current_user(self):
    +        return self.handler.current_user
    +
    +    def render(self, *args, **kwargs):
    +        """Override in subclasses to return this module's output."""
    +        raise NotImplementedError()
    +
    +    def embedded_javascript(self):
    +        """Override to return a JavaScript string
    +        to be embedded in the page."""
    +        return None
    +
    +    def javascript_files(self):
    +        """Override to return a list of JavaScript files needed by this module.
    +
    +        If the return values are relative paths, they will be passed to
    +        `RequestHandler.static_url`; otherwise they will be used as-is.
    +        """
    +        return None
    +
    +    def embedded_css(self):
    +        """Override to return a CSS string
    +        that will be embedded in the page."""
    +        return None
    +
    +    def css_files(self):
    +        """Override to returns a list of CSS files required by this module.
    +
    +        If the return values are relative paths, they will be passed to
    +        `RequestHandler.static_url`; otherwise they will be used as-is.
    +        """
    +        return None
    +
    +    def html_head(self):
    +        """Override to return an HTML string that will be put in the 
    +        element.
    +        """
    +        return None
    +
    +    def html_body(self):
    +        """Override to return an HTML string that will be put at the end of
    +        the  element.
    +        """
    +        return None
    +
    +    def render_string(self, path, **kwargs):
    +        """Renders a template and returns it as a string."""
    +        return self.handler.render_string(path, **kwargs)
    +
    +
    +class _linkify(UIModule):
    +    def render(self, text, **kwargs):
    +        return escape.linkify(text, **kwargs)
    +
    +
    +class _xsrf_form_html(UIModule):
    +    def render(self):
    +        return self.handler.xsrf_form_html()
    +
    +
    +class TemplateModule(UIModule):
    +    """UIModule that simply renders the given template.
    +
    +    {% module Template("foo.html") %} is similar to {% include "foo.html" %},
    +    but the module version gets its own namespace (with kwargs passed to
    +    Template()) instead of inheriting the outer template's namespace.
    +
    +    Templates rendered through this module also get access to UIModule's
    +    automatic javascript/css features.  Simply call set_resources
    +    inside the template and give it keyword arguments corresponding to
    +    the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
    +    Note that these resources are output once per template file, not once
    +    per instantiation of the template, so they must not depend on
    +    any arguments to the template.
    +    """
    +    def __init__(self, handler):
    +        super(TemplateModule, self).__init__(handler)
    +        # keep resources in both a list and a dict to preserve order
    +        self._resource_list = []
    +        self._resource_dict = {}
    +
    +    def render(self, path, **kwargs):
    +        def set_resources(**kwargs):
    +            if path not in self._resource_dict:
    +                self._resource_list.append(kwargs)
    +                self._resource_dict[path] = kwargs
    +            else:
    +                if self._resource_dict[path] != kwargs:
    +                    raise ValueError("set_resources called with different "
    +                                     "resources for the same template")
    +            return ""
    +        return self.render_string(path, set_resources=set_resources,
    +                                  **kwargs)
    +
    +    def _get_resources(self, key):
    +        return (r[key] for r in self._resource_list if key in r)
    +
    +    def embedded_javascript(self):
    +        return "\n".join(self._get_resources("embedded_javascript"))
    +
    +    def javascript_files(self):
    +        result = []
    +        for f in self._get_resources("javascript_files"):
    +            if isinstance(f, (unicode_type, bytes)):
    +                result.append(f)
    +            else:
    +                result.extend(f)
    +        return result
    +
    +    def embedded_css(self):
    +        return "\n".join(self._get_resources("embedded_css"))
    +
    +    def css_files(self):
    +        result = []
    +        for f in self._get_resources("css_files"):
    +            if isinstance(f, (unicode_type, bytes)):
    +                result.append(f)
    +            else:
    +                result.extend(f)
    +        return result
    +
    +    def html_head(self):
    +        return "".join(self._get_resources("html_head"))
    +
    +    def html_body(self):
    +        return "".join(self._get_resources("html_body"))
    +
    +
    +class _UIModuleNamespace(object):
    +    """Lazy namespace which creates UIModule proxies bound to a handler."""
    +    def __init__(self, handler, ui_modules):
    +        self.handler = handler
    +        self.ui_modules = ui_modules
    +
    +    def __getitem__(self, key):
    +        return self.handler._ui_module(key, self.ui_modules[key])
    +
    +    def __getattr__(self, key):
    +        try:
    +            return self[key]
    +        except KeyError as e:
    +            raise AttributeError(str(e))
    +
    +
    +class URLSpec(object):
    +    """Specifies mappings between URLs and handlers."""
    +    def __init__(self, pattern, handler, kwargs=None, name=None):
    +        """Parameters:
    +
    +        * ``pattern``: Regular expression to be matched.  Any groups
    +          in the regex will be passed in to the handler's get/post/etc
    +          methods as arguments.
    +
    +        * ``handler``: `RequestHandler` subclass to be invoked.
    +
    +        * ``kwargs`` (optional): A dictionary of additional arguments
    +          to be passed to the handler's constructor.
    +
    +        * ``name`` (optional): A name for this handler.  Used by
    +          `Application.reverse_url`.
    +        """
    +        if not pattern.endswith('$'):
    +            pattern += '$'
    +        self.regex = re.compile(pattern)
    +        assert len(self.regex.groupindex) in (0, self.regex.groups), \
    +            ("groups in url regexes must either be all named or all "
    +             "positional: %r" % self.regex.pattern)
    +
    +        if isinstance(handler, str):
    +            # import the Module and instantiate the class
    +            # Must be a fully qualified name (module.ClassName)
    +            handler = import_object(handler)
    +
    +        self.handler_class = handler
    +        self.kwargs = kwargs or {}
    +        self.name = name
    +        self._path, self._group_count = self._find_groups()
    +
    +    def __repr__(self):
    +        return '%s(%r, %s, kwargs=%r, name=%r)' % \
    +            (self.__class__.__name__, self.regex.pattern,
    +             self.handler_class, self.kwargs, self.name)
    +
    +    def _find_groups(self):
    +        """Returns a tuple (reverse string, group count) for a url.
    +
    +        For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
    +        would return ('/%s/%s/', 2).
    +        """
    +        pattern = self.regex.pattern
    +        if pattern.startswith('^'):
    +            pattern = pattern[1:]
    +        if pattern.endswith('$'):
    +            pattern = pattern[:-1]
    +
    +        if self.regex.groups != pattern.count('('):
    +            # The pattern is too complicated for our simplistic matching,
    +            # so we can't support reversing it.
    +            return (None, None)
    +
    +        pieces = []
    +        for fragment in pattern.split('('):
    +            if ')' in fragment:
    +                paren_loc = fragment.index(')')
    +                if paren_loc >= 0:
    +                    pieces.append('%s' + fragment[paren_loc + 1:])
    +            else:
    +                pieces.append(fragment)
    +
    +        return (''.join(pieces), self.regex.groups)
    +
    +    def reverse(self, *args):
    +        assert self._path is not None, \
    +            "Cannot reverse url regex " + self.regex.pattern
    +        assert len(args) == self._group_count, "required number of arguments "\
    +            "not found"
    +        if not len(args):
    +            return self._path
    +        converted_args = []
    +        for a in args:
    +            if not isinstance(a, (unicode_type, bytes)):
    +                a = str(a)
    +            converted_args.append(escape.url_escape(utf8(a), plus=False))
    +        return self._path % tuple(converted_args)
    +
    +url = URLSpec
    +
    +
    +if hasattr(hmac, 'compare_digest'):  # python 3.3
    +    _time_independent_equals = hmac.compare_digest
    +else:
    +    def _time_independent_equals(a, b):
    +        if len(a) != len(b):
    +            return False
    +        result = 0
    +        if isinstance(a[0], int):  # python3 byte strings
    +            for x, y in zip(a, b):
    +                result |= x ^ y
    +        else:  # python2
    +            for x, y in zip(a, b):
    +                result |= ord(x) ^ ord(y)
    +        return result == 0
    +
    +
    +def create_signed_value(secret, name, value, version=None, clock=None,
    +                        key_version=None):
    +    if version is None:
    +        version = DEFAULT_SIGNED_VALUE_VERSION
    +    if clock is None:
    +        clock = time.time
    +
    +    timestamp = utf8(str(int(clock())))
    +    value = base64.b64encode(utf8(value))
    +    if version == 1:
    +        signature = _create_signature_v1(secret, name, value, timestamp)
    +        value = b"|".join([value, timestamp, signature])
    +        return value
    +    elif version == 2:
    +        # The v2 format consists of a version number and a series of
    +        # length-prefixed fields "%d:%s", the last of which is a
    +        # signature, all separated by pipes.  All numbers are in
    +        # decimal format with no leading zeros.  The signature is an
    +        # HMAC-SHA256 of the whole string up to that point, including
    +        # the final pipe.
    +        #
    +        # The fields are:
    +        # - format version (i.e. 2; no length prefix)
    +        # - key version (integer, default is 0)
    +        # - timestamp (integer seconds since epoch)
    +        # - name (not encoded; assumed to be ~alphanumeric)
    +        # - value (base64-encoded)
    +        # - signature (hex-encoded; no length prefix)
    +        def format_field(s):
    +            return utf8("%d:" % len(s)) + utf8(s)
    +        to_sign = b"|".join([
    +            b"2",
    +            format_field(str(key_version or 0)),
    +            format_field(timestamp),
    +            format_field(name),
    +            format_field(value),
    +            b''])
    +
    +        if isinstance(secret, dict):
    +            assert key_version is not None, 'Key version must be set when sign key dict is used'
    +            assert version >= 2, 'Version must be at least 2 for key version support'
    +            secret = secret[key_version]
    +
    +        signature = _create_signature_v2(secret, to_sign)
    +        return to_sign + signature
    +    else:
    +        raise ValueError("Unsupported version %d" % version)
    +
    +# A leading version number in decimal
    +# with no leading zeros, followed by a pipe.
    +_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
    +
    +
    +def _get_version(value):
    +    # Figures out what version value is.  Version 1 did not include an
    +    # explicit version field and started with arbitrary base64 data,
    +    # which makes this tricky.
    +    m = _signed_value_version_re.match(value)
    +    if m is None:
    +        version = 1
    +    else:
    +        try:
    +            version = int(m.group(1))
    +            if version > 999:
    +                # Certain payloads from the version-less v1 format may
    +                # be parsed as valid integers.  Due to base64 padding
    +                # restrictions, this can only happen for numbers whose
    +                # length is a multiple of 4, so we can treat all
    +                # numbers up to 999 as versions, and for the rest we
    +                # fall back to v1 format.
    +                version = 1
    +        except ValueError:
    +            version = 1
    +    return version
    +
    +
    +def decode_signed_value(secret, name, value, max_age_days=31,
    +                        clock=None, min_version=None):
    +    if clock is None:
    +        clock = time.time
    +    if min_version is None:
    +        min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
    +    if min_version > 2:
    +        raise ValueError("Unsupported min_version %d" % min_version)
    +    if not value:
    +        return None
    +
    +    value = utf8(value)
    +    version = _get_version(value)
    +
    +    if version < min_version:
    +        return None
    +    if version == 1:
    +        return _decode_signed_value_v1(secret, name, value,
    +                                       max_age_days, clock)
    +    elif version == 2:
    +        return _decode_signed_value_v2(secret, name, value,
    +                                       max_age_days, clock)
    +    else:
    +        return None
    +
    +
    +def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
    +    parts = utf8(value).split(b"|")
    +    if len(parts) != 3:
    +        return None
    +    signature = _create_signature_v1(secret, name, parts[0], parts[1])
    +    if not _time_independent_equals(parts[2], signature):
    +        gen_log.warning("Invalid cookie signature %r", value)
    +        return None
    +    timestamp = int(parts[1])
    +    if timestamp < clock() - max_age_days * 86400:
    +        gen_log.warning("Expired cookie %r", value)
    +        return None
    +    if timestamp > clock() + 31 * 86400:
    +        # _cookie_signature does not hash a delimiter between the
    +        # parts of the cookie, so an attacker could transfer trailing
    +        # digits from the payload to the timestamp without altering the
    +        # signature.  For backwards compatibility, sanity-check timestamp
    +        # here instead of modifying _cookie_signature.
    +        gen_log.warning("Cookie timestamp in future; possible tampering %r",
    +                        value)
    +        return None
    +    if parts[1].startswith(b"0"):
    +        gen_log.warning("Tampered cookie %r", value)
    +        return None
    +    try:
    +        return base64.b64decode(parts[0])
    +    except Exception:
    +        return None
    +
    +
    +def _decode_fields_v2(value):
    +    def _consume_field(s):
    +        length, _, rest = s.partition(b':')
    +        n = int(length)
    +        field_value = rest[:n]
    +        # In python 3, indexing bytes returns small integers; we must
    +        # use a slice to get a byte string as in python 2.
    +        if rest[n:n + 1] != b'|':
    +            raise ValueError("malformed v2 signed value field")
    +        rest = rest[n + 1:]
    +        return field_value, rest
    +
    +    rest = value[2:]  # remove version number
    +    key_version, rest = _consume_field(rest)
    +    timestamp, rest = _consume_field(rest)
    +    name_field, rest = _consume_field(rest)
    +    value_field, passed_sig = _consume_field(rest)
    +    return int(key_version), timestamp, name_field, value_field, passed_sig
    +
    +
    +def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
    +    try:
    +        key_version, timestamp, name_field, value_field, passed_sig = _decode_fields_v2(value)
    +    except ValueError:
    +        return None
    +    signed_string = value[:-len(passed_sig)]
    +
    +    if isinstance(secret, dict):
    +        try:
    +            secret = secret[key_version]
    +        except KeyError:
    +            return None
    +
    +    expected_sig = _create_signature_v2(secret, signed_string)
    +    if not _time_independent_equals(passed_sig, expected_sig):
    +        return None
    +    if name_field != utf8(name):
    +        return None
    +    timestamp = int(timestamp)
    +    if timestamp < clock() - max_age_days * 86400:
    +        # The signature has expired.
    +        return None
    +    try:
    +        return base64.b64decode(value_field)
    +    except Exception:
    +        return None
    +
    +
    +def get_signature_key_version(value):
    +    value = utf8(value)
    +    version = _get_version(value)
    +    if version < 2:
    +        return None
    +    try:
    +        key_version, _, _, _, _ = _decode_fields_v2(value)
    +    except ValueError:
    +        return None
    +
    +    return key_version
    +
    +
    +def _create_signature_v1(secret, *parts):
    +    hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
    +    for part in parts:
    +        hash.update(utf8(part))
    +    return utf8(hash.hexdigest())
    +
    +
    +def _create_signature_v2(secret, s):
    +    hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
    +    hash.update(utf8(s))
    +    return utf8(hash.hexdigest())
    +
    +
    +def _unquote_or_none(s):
    +    """None-safe wrapper around url_unescape to handle unamteched optional
    +    groups correctly.
    +
    +    Note that args are passed as bytes so the handler can decide what
    +    encoding to use.
    +    """
    +    if s is None:
    +        return s
    +    return escape.url_unescape(s, encoding=None, plus=False)
    diff --git a/server/www/packages/packages-common/tornado/websocket.py b/server/www/packages/packages-common/tornado/websocket.py
    new file mode 100644
    index 0000000..11e5266
    --- /dev/null
    +++ b/server/www/packages/packages-common/tornado/websocket.py
    @@ -0,0 +1,1063 @@
    +"""Implementation of the WebSocket protocol.
    +
    +`WebSockets `_ allow for bidirectional
    +communication between the browser and server.
    +
    +WebSockets are supported in the current versions of all major browsers,
    +although older versions that do not support WebSockets are still in use
    +(refer to http://caniuse.com/websockets for details).
    +
    +This module implements the final version of the WebSocket protocol as
    +defined in `RFC 6455 `_.  Certain
    +browser versions (notably Safari 5.x) implemented an earlier draft of
    +the protocol (known as "draft 76") and are not compatible with this module.
    +
    +.. versionchanged:: 4.0
    +   Removed support for the draft 76 protocol version.
    +"""
    +
    +from __future__ import absolute_import, division, print_function, with_statement
    +# Author: Jacob Kristhammar, 2010
    +
    +import base64
    +import collections
    +import hashlib
    +import os
    +import struct
    +import tornado.escape
    +import tornado.web
    +import zlib
    +
    +from tornado.concurrent import TracebackFuture
    +from tornado.escape import utf8, native_str, to_unicode
    +from tornado import httpclient, httputil
    +from tornado.ioloop import IOLoop
    +from tornado.iostream import StreamClosedError
    +from tornado.log import gen_log, app_log
    +from tornado import simple_httpclient
    +from tornado.tcpclient import TCPClient
    +from tornado.util import _websocket_mask
    +
    +try:
    +    from urllib.parse import urlparse  # py2
    +except ImportError:
    +    from urlparse import urlparse  # py3
    +
    +try:
    +    xrange  # py2
    +except NameError:
    +    xrange = range  # py3
    +
    +
    +class WebSocketError(Exception):
    +    pass
    +
    +
    +class WebSocketClosedError(WebSocketError):
    +    """Raised by operations on a closed connection.
    +
    +    .. versionadded:: 3.2
    +    """
    +    pass
    +
    +
    +class WebSocketHandler(tornado.web.RequestHandler):
    +    """Subclass this class to create a basic WebSocket handler.
    +
    +    Override `on_message` to handle incoming messages, and use
    +    `write_message` to send messages to the client. You can also
    +    override `open` and `on_close` to handle opened and closed
    +    connections.
    +
    +    See http://dev.w3.org/html5/websockets/ for details on the
    +    JavaScript interface.  The protocol is specified at
    +    http://tools.ietf.org/html/rfc6455.
    +
    +    Here is an example WebSocket handler that echos back all received messages
    +    back to the client:
    +
    +    .. testcode::
    +
    +      class EchoWebSocket(tornado.websocket.WebSocketHandler):
    +          def open(self):
    +              print("WebSocket opened")
    +
    +          def on_message(self, message):
    +              self.write_message(u"You said: " + message)
    +
    +          def on_close(self):
    +              print("WebSocket closed")
    +
    +    .. testoutput::
    +       :hide:
    +
    +    WebSockets are not standard HTTP connections. The "handshake" is
    +    HTTP, but after the handshake, the protocol is
    +    message-based. Consequently, most of the Tornado HTTP facilities
    +    are not available in handlers of this type. The only communication
    +    methods available to you are `write_message()`, `ping()`, and
    +    `close()`. Likewise, your request handler class should implement
    +    `open()` method rather than ``get()`` or ``post()``.
    +
    +    If you map the handler above to ``/websocket`` in your application, you can
    +    invoke it in JavaScript with::
    +
    +      var ws = new WebSocket("ws://localhost:8888/websocket");
    +      ws.onopen = function() {
    +         ws.send("Hello, world");
    +      };
    +      ws.onmessage = function (evt) {
    +         alert(evt.data);
    +      };
    +
    +    This script pops up an alert box that says "You said: Hello, world".
    +
    +    Web browsers allow any site to open a websocket connection to any other,
    +    instead of using the same-origin policy that governs other network
    +    access from javascript.  This can be surprising and is a potential
    +    security hole, so since Tornado 4.0 `WebSocketHandler` requires
    +    applications that wish to receive cross-origin websockets to opt in
    +    by overriding the `~WebSocketHandler.check_origin` method (see that
    +    method's docs for details).  Failure to do so is the most likely
    +    cause of 403 errors when making a websocket connection.
    +
    +    When using a secure websocket connection (``wss://``) with a self-signed
    +    certificate, the connection from a browser may fail because it wants
    +    to show the "accept this certificate" dialog but has nowhere to show it.
    +    You must first visit a regular HTML page using the same certificate
    +    to accept it before the websocket connection will succeed.
    +    """
    +    def __init__(self, application, request, **kwargs):
    +        tornado.web.RequestHandler.__init__(self, application, request,
    +                                            **kwargs)
    +        self.ws_connection = None
    +        self.close_code = None
    +        self.close_reason = None
    +        self.stream = None
    +        self._on_close_called = False
    +
    +    @tornado.web.asynchronous
    +    def get(self, *args, **kwargs):
    +        self.open_args = args
    +        self.open_kwargs = kwargs
    +
    +        # Upgrade header should be present and should be equal to WebSocket
    +        if self.request.headers.get("Upgrade", "").lower() != 'websocket':
    +            self.set_status(400)
    +            log_msg = "Can \"Upgrade\" only to \"WebSocket\"."
    +            self.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        # Connection header should be upgrade.
    +        # Some proxy servers/load balancers
    +        # might mess with it.
    +        headers = self.request.headers
    +        connection = map(lambda s: s.strip().lower(),
    +                         headers.get("Connection", "").split(","))
    +        if 'upgrade' not in connection:
    +            self.set_status(400)
    +            log_msg = "\"Connection\" must be \"Upgrade\"."
    +            self.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        # Handle WebSocket Origin naming convention differences
    +        # The difference between version 8 and 13 is that in 8 the
    +        # client sends a "Sec-Websocket-Origin" header and in 13 it's
    +        # simply "Origin".
    +        if "Origin" in self.request.headers:
    +            origin = self.request.headers.get("Origin")
    +        else:
    +            origin = self.request.headers.get("Sec-Websocket-Origin", None)
    +
    +        # If there was an origin header, check to make sure it matches
    +        # according to check_origin. When the origin is None, we assume it
    +        # did not come from a browser and that it can be passed on.
    +        if origin is not None and not self.check_origin(origin):
    +            self.set_status(403)
    +            log_msg = "Cross origin websockets not allowed"
    +            self.finish(log_msg)
    +            gen_log.debug(log_msg)
    +            return
    +
    +        self.stream = self.request.connection.detach()
    +        self.stream.set_close_callback(self.on_connection_close)
    +
    +        self.ws_connection = self.get_websocket_protocol()
    +        if self.ws_connection:
    +            self.ws_connection.accept_connection()
    +        else:
    +            if not self.stream.closed():
    +                self.stream.write(tornado.escape.utf8(
    +                    "HTTP/1.1 426 Upgrade Required\r\n"
    +                    "Sec-WebSocket-Version: 7, 8, 13\r\n\r\n"))
    +                self.stream.close()
    +
    +    def write_message(self, message, binary=False):
    +        """Sends the given message to the client of this Web Socket.
    +
    +        The message may be either a string or a dict (which will be
    +        encoded as json).  If the ``binary`` argument is false, the
    +        message will be sent as utf8; in binary mode any byte string
    +        is allowed.
    +
    +        If the connection is already closed, raises `WebSocketClosedError`.
    +
    +        .. versionchanged:: 3.2
    +           `WebSocketClosedError` was added (previously a closed connection
    +           would raise an `AttributeError`)
    +
    +        .. versionchanged:: 4.3
    +           Returns a `.Future` which can be used for flow control.
    +        """
    +        if self.ws_connection is None:
    +            raise WebSocketClosedError()
    +        if isinstance(message, dict):
    +            message = tornado.escape.json_encode(message)
    +        return self.ws_connection.write_message(message, binary=binary)
    +
    +    def select_subprotocol(self, subprotocols):
    +        """Invoked when a new WebSocket requests specific subprotocols.
    +
    +        ``subprotocols`` is a list of strings identifying the
    +        subprotocols proposed by the client.  This method may be
    +        overridden to return one of those strings to select it, or
    +        ``None`` to not select a subprotocol.  Failure to select a
    +        subprotocol does not automatically abort the connection,
    +        although clients may close the connection if none of their
    +        proposed subprotocols was selected.
    +        """
    +        return None
    +
    +    def get_compression_options(self):
    +        """Override to return compression options for the connection.
    +
    +        If this method returns None (the default), compression will
    +        be disabled.  If it returns a dict (even an empty one), it
    +        will be enabled.  The contents of the dict may be used to
    +        control the memory and CPU usage of the compression,
    +        but no such options are currently implemented.
    +
    +        .. versionadded:: 4.1
    +        """
    +        return None
    +
    +    def open(self, *args, **kwargs):
    +        """Invoked when a new WebSocket is opened.
    +
    +        The arguments to `open` are extracted from the `tornado.web.URLSpec`
    +        regular expression, just like the arguments to
    +        `tornado.web.RequestHandler.get`.
    +        """
    +        pass
    +
    +    def on_message(self, message):
    +        """Handle incoming messages on the WebSocket
    +
    +        This method must be overridden.
    +        """
    +        raise NotImplementedError
    +
    +    def ping(self, data):
    +        """Send ping frame to the remote end."""
    +        if self.ws_connection is None:
    +            raise WebSocketClosedError()
    +        self.ws_connection.write_ping(data)
    +
    +    def on_pong(self, data):
    +        """Invoked when the response to a ping frame is received."""
    +        pass
    +
    +    def on_close(self):
    +        """Invoked when the WebSocket is closed.
    +
    +        If the connection was closed cleanly and a status code or reason
    +        phrase was supplied, these values will be available as the attributes
    +        ``self.close_code`` and ``self.close_reason``.
    +
    +        .. versionchanged:: 4.0
    +
    +           Added ``close_code`` and ``close_reason`` attributes.
    +        """
    +        pass
    +
    +    def close(self, code=None, reason=None):
    +        """Closes this Web Socket.
    +
    +        Once the close handshake is successful the socket will be closed.
    +
    +        ``code`` may be a numeric status code, taken from the values
    +        defined in `RFC 6455 section 7.4.1
    +        `_.
    +        ``reason`` may be a textual message about why the connection is
    +        closing.  These values are made available to the client, but are
    +        not otherwise interpreted by the websocket protocol.
    +
    +        .. versionchanged:: 4.0
    +
    +           Added the ``code`` and ``reason`` arguments.
    +        """
    +        if self.ws_connection:
    +            self.ws_connection.close(code, reason)
    +            self.ws_connection = None
    +
    +    def check_origin(self, origin):
    +        """Override to enable support for allowing alternate origins.
    +
    +        The ``origin`` argument is the value of the ``Origin`` HTTP
    +        header, the url responsible for initiating this request.  This
    +        method is not called for clients that do not send this header;
    +        such requests are always allowed (because all browsers that
    +        implement WebSockets support this header, and non-browser
    +        clients do not have the same cross-site security concerns).
    +
    +        Should return True to accept the request or False to reject it.
    +        By default, rejects all requests with an origin on a host other
    +        than this one.
    +
    +        This is a security protection against cross site scripting attacks on
    +        browsers, since WebSockets are allowed to bypass the usual same-origin
    +        policies and don't use CORS headers.
    +
    +        To accept all cross-origin traffic (which was the default prior to
    +        Tornado 4.0), simply override this method to always return true::
    +
    +            def check_origin(self, origin):
    +                return True
    +
    +        To allow connections from any subdomain of your site, you might
    +        do something like::
    +
    +            def check_origin(self, origin):
    +                parsed_origin = urllib.parse.urlparse(origin)
    +                return parsed_origin.netloc.endswith(".mydomain.com")
    +
    +        .. versionadded:: 4.0
    +        """
    +        parsed_origin = urlparse(origin)
    +        origin = parsed_origin.netloc
    +        origin = origin.lower()
    +
    +        host = self.request.headers.get("Host")
    +
    +        # Check to see that origin matches host directly, including ports
    +        return origin == host
    +
    +    def set_nodelay(self, value):
    +        """Set the no-delay flag for this stream.
    +
    +        By default, small messages may be delayed and/or combined to minimize
    +        the number of packets sent.  This can sometimes cause 200-500ms delays
    +        due to the interaction between Nagle's algorithm and TCP delayed
    +        ACKs.  To reduce this delay (at the expense of possibly increasing
    +        bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
    +        connection is established.
    +
    +        See `.BaseIOStream.set_nodelay` for additional details.
    +
    +        .. versionadded:: 3.1
    +        """
    +        self.stream.set_nodelay(value)
    +
    +    def on_connection_close(self):
    +        if self.ws_connection:
    +            self.ws_connection.on_connection_close()
    +            self.ws_connection = None
    +        if not self._on_close_called:
    +            self._on_close_called = True
    +            self.on_close()
    +
    +    def send_error(self, *args, **kwargs):
    +        if self.stream is None:
    +            super(WebSocketHandler, self).send_error(*args, **kwargs)
    +        else:
    +            # If we get an uncaught exception during the handshake,
    +            # we have no choice but to abruptly close the connection.
    +            # TODO: for uncaught exceptions after the handshake,
    +            # we can close the connection more gracefully.
    +            self.stream.close()
    +
    +    def get_websocket_protocol(self):
    +        websocket_version = self.request.headers.get("Sec-WebSocket-Version")
    +        if websocket_version in ("7", "8", "13"):
    +            return WebSocketProtocol13(
    +                self, compression_options=self.get_compression_options())
    +
    +
    +def _wrap_method(method):
    +    def _disallow_for_websocket(self, *args, **kwargs):
    +        if self.stream is None:
    +            method(self, *args, **kwargs)
    +        else:
    +            raise RuntimeError("Method not supported for Web Sockets")
    +    return _disallow_for_websocket
    +for method in ["write", "redirect", "set_header", "set_cookie",
    +               "set_status", "flush", "finish"]:
    +    setattr(WebSocketHandler, method,
    +            _wrap_method(getattr(WebSocketHandler, method)))
    +
    +
    +class WebSocketProtocol(object):
    +    """Base class for WebSocket protocol versions.
    +    """
    +    def __init__(self, handler):
    +        self.handler = handler
    +        self.request = handler.request
    +        self.stream = handler.stream
    +        self.client_terminated = False
    +        self.server_terminated = False
    +
    +    def _run_callback(self, callback, *args, **kwargs):
    +        """Runs the given callback with exception handling.
    +
    +        On error, aborts the websocket connection and returns False.
    +        """
    +        try:
    +            callback(*args, **kwargs)
    +        except Exception:
    +            app_log.error("Uncaught exception in %s",
    +                          self.request.path, exc_info=True)
    +            self._abort()
    +
    +    def on_connection_close(self):
    +        self._abort()
    +
    +    def _abort(self):
    +        """Instantly aborts the WebSocket connection by closing the socket"""
    +        self.client_terminated = True
    +        self.server_terminated = True
    +        self.stream.close()  # forcibly tear down the connection
    +        self.close()  # let the subclass cleanup
    +
    +
    +class _PerMessageDeflateCompressor(object):
    +    def __init__(self, persistent, max_wbits):
    +        if max_wbits is None:
    +            max_wbits = zlib.MAX_WBITS
    +        # There is no symbolic constant for the minimum wbits value.
    +        if not (8 <= max_wbits <= zlib.MAX_WBITS):
    +            raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
    +                             max_wbits, zlib.MAX_WBITS)
    +        self._max_wbits = max_wbits
    +        if persistent:
    +            self._compressor = self._create_compressor()
    +        else:
    +            self._compressor = None
    +
    +    def _create_compressor(self):
    +        return zlib.compressobj(tornado.web.GZipContentEncoding.GZIP_LEVEL,
    +                                zlib.DEFLATED, -self._max_wbits)
    +
    +    def compress(self, data):
    +        compressor = self._compressor or self._create_compressor()
    +        data = (compressor.compress(data) +
    +                compressor.flush(zlib.Z_SYNC_FLUSH))
    +        assert data.endswith(b'\x00\x00\xff\xff')
    +        return data[:-4]
    +
    +
    +class _PerMessageDeflateDecompressor(object):
    +    def __init__(self, persistent, max_wbits):
    +        if max_wbits is None:
    +            max_wbits = zlib.MAX_WBITS
    +        if not (8 <= max_wbits <= zlib.MAX_WBITS):
    +            raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
    +                             max_wbits, zlib.MAX_WBITS)
    +        self._max_wbits = max_wbits
    +        if persistent:
    +            self._decompressor = self._create_decompressor()
    +        else:
    +            self._decompressor = None
    +
    +    def _create_decompressor(self):
    +        return zlib.decompressobj(-self._max_wbits)
    +
    +    def decompress(self, data):
    +        decompressor = self._decompressor or self._create_decompressor()
    +        return decompressor.decompress(data + b'\x00\x00\xff\xff')
    +
    +
    +class WebSocketProtocol13(WebSocketProtocol):
    +    """Implementation of the WebSocket protocol from RFC 6455.
    +
    +    This class supports versions 7 and 8 of the protocol in addition to the
    +    final version 13.
    +    """
    +    # Bit masks for the first byte of a frame.
    +    FIN = 0x80
    +    RSV1 = 0x40
    +    RSV2 = 0x20
    +    RSV3 = 0x10
    +    RSV_MASK = RSV1 | RSV2 | RSV3
    +    OPCODE_MASK = 0x0f
    +
    +    def __init__(self, handler, mask_outgoing=False,
    +                 compression_options=None):
    +        WebSocketProtocol.__init__(self, handler)
    +        self.mask_outgoing = mask_outgoing
    +        self._final_frame = False
    +        self._frame_opcode = None
    +        self._masked_frame = None
    +        self._frame_mask = None
    +        self._frame_length = None
    +        self._fragmented_message_buffer = None
    +        self._fragmented_message_opcode = None
    +        self._waiting = None
    +        self._compression_options = compression_options
    +        self._decompressor = None
    +        self._compressor = None
    +        self._frame_compressed = None
    +        # The total uncompressed size of all messages received or sent.
    +        # Unicode messages are encoded to utf8.
    +        # Only for testing; subject to change.
    +        self._message_bytes_in = 0
    +        self._message_bytes_out = 0
    +        # The total size of all packets received or sent.  Includes
    +        # the effect of compression, frame overhead, and control frames.
    +        self._wire_bytes_in = 0
    +        self._wire_bytes_out = 0
    +
    +    def accept_connection(self):
    +        try:
    +            self._handle_websocket_headers()
    +            self._accept_connection()
    +        except ValueError:
    +            gen_log.debug("Malformed WebSocket request received",
    +                          exc_info=True)
    +            self._abort()
    +            return
    +
    +    def _handle_websocket_headers(self):
    +        """Verifies all invariant- and required headers
    +
    +        If a header is missing or have an incorrect value ValueError will be
    +        raised
    +        """
    +        fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
    +        if not all(map(lambda f: self.request.headers.get(f), fields)):
    +            raise ValueError("Missing/Invalid WebSocket headers")
    +
    +    @staticmethod
    +    def compute_accept_value(key):
    +        """Computes the value for the Sec-WebSocket-Accept header,
    +        given the value for Sec-WebSocket-Key.
    +        """
    +        sha1 = hashlib.sha1()
    +        sha1.update(utf8(key))
    +        sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")  # Magic value
    +        return native_str(base64.b64encode(sha1.digest()))
    +
    +    def _challenge_response(self):
    +        return WebSocketProtocol13.compute_accept_value(
    +            self.request.headers.get("Sec-Websocket-Key"))
    +
    +    def _accept_connection(self):
    +        subprotocol_header = ''
    +        subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
    +        subprotocols = [s.strip() for s in subprotocols.split(',')]
    +        if subprotocols:
    +            selected = self.handler.select_subprotocol(subprotocols)
    +            if selected:
    +                assert selected in subprotocols
    +                subprotocol_header = ("Sec-WebSocket-Protocol: %s\r\n"
    +                                      % selected)
    +
    +        extension_header = ''
    +        extensions = self._parse_extensions_header(self.request.headers)
    +        for ext in extensions:
    +            if (ext[0] == 'permessage-deflate' and
    +                    self._compression_options is not None):
    +                # TODO: negotiate parameters if compression_options
    +                # specifies limits.
    +                self._create_compressors('server', ext[1])
    +                if ('client_max_window_bits' in ext[1] and
    +                        ext[1]['client_max_window_bits'] is None):
    +                    # Don't echo an offered client_max_window_bits
    +                    # parameter with no value.
    +                    del ext[1]['client_max_window_bits']
    +                extension_header = ('Sec-WebSocket-Extensions: %s\r\n' %
    +                                    httputil._encode_header(
    +                                        'permessage-deflate', ext[1]))
    +                break
    +
    +        if self.stream.closed():
    +            self._abort()
    +            return
    +        self.stream.write(tornado.escape.utf8(
    +            "HTTP/1.1 101 Switching Protocols\r\n"
    +            "Upgrade: websocket\r\n"
    +            "Connection: Upgrade\r\n"
    +            "Sec-WebSocket-Accept: %s\r\n"
    +            "%s%s"
    +            "\r\n" % (self._challenge_response(),
    +                      subprotocol_header, extension_header)))
    +
    +        self._run_callback(self.handler.open, *self.handler.open_args,
    +                           **self.handler.open_kwargs)
    +        self._receive_frame()
    +
    +    def _parse_extensions_header(self, headers):
    +        extensions = headers.get("Sec-WebSocket-Extensions", '')
    +        if extensions:
    +            return [httputil._parse_header(e.strip())
    +                    for e in extensions.split(',')]
    +        return []
    +
    +    def _process_server_headers(self, key, headers):
    +        """Process the headers sent by the server to this client connection.
    +
    +        'key' is the websocket handshake challenge/response key.
    +        """
    +        assert headers['Upgrade'].lower() == 'websocket'
    +        assert headers['Connection'].lower() == 'upgrade'
    +        accept = self.compute_accept_value(key)
    +        assert headers['Sec-Websocket-Accept'] == accept
    +
    +        extensions = self._parse_extensions_header(headers)
    +        for ext in extensions:
    +            if (ext[0] == 'permessage-deflate' and
    +                    self._compression_options is not None):
    +                self._create_compressors('client', ext[1])
    +            else:
    +                raise ValueError("unsupported extension %r", ext)
    +
    +    def _get_compressor_options(self, side, agreed_parameters):
    +        """Converts a websocket agreed_parameters set to keyword arguments
    +        for our compressor objects.
    +        """
    +        options = dict(
    +            persistent=(side + '_no_context_takeover') not in agreed_parameters)
    +        wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
    +        if wbits_header is None:
    +            options['max_wbits'] = zlib.MAX_WBITS
    +        else:
    +            options['max_wbits'] = int(wbits_header)
    +        return options
    +
    +    def _create_compressors(self, side, agreed_parameters):
    +        # TODO: handle invalid parameters gracefully
    +        allowed_keys = set(['server_no_context_takeover',
    +                            'client_no_context_takeover',
    +                            'server_max_window_bits',
    +                            'client_max_window_bits'])
    +        for key in agreed_parameters:
    +            if key not in allowed_keys:
    +                raise ValueError("unsupported compression parameter %r" % key)
    +        other_side = 'client' if (side == 'server') else 'server'
    +        self._compressor = _PerMessageDeflateCompressor(
    +            **self._get_compressor_options(side, agreed_parameters))
    +        self._decompressor = _PerMessageDeflateDecompressor(
    +            **self._get_compressor_options(other_side, agreed_parameters))
    +
    +    def _write_frame(self, fin, opcode, data, flags=0):
    +        if fin:
    +            finbit = self.FIN
    +        else:
    +            finbit = 0
    +        frame = struct.pack("B", finbit | opcode | flags)
    +        l = len(data)
    +        if self.mask_outgoing:
    +            mask_bit = 0x80
    +        else:
    +            mask_bit = 0
    +        if l < 126:
    +            frame += struct.pack("B", l | mask_bit)
    +        elif l <= 0xFFFF:
    +            frame += struct.pack("!BH", 126 | mask_bit, l)
    +        else:
    +            frame += struct.pack("!BQ", 127 | mask_bit, l)
    +        if self.mask_outgoing:
    +            mask = os.urandom(4)
    +            data = mask + _websocket_mask(mask, data)
    +        frame += data
    +        self._wire_bytes_out += len(frame)
    +        try:
    +            return self.stream.write(frame)
    +        except StreamClosedError:
    +            self._abort()
    +
    +    def write_message(self, message, binary=False):
    +        """Sends the given message to the client of this Web Socket."""
    +        if binary:
    +            opcode = 0x2
    +        else:
    +            opcode = 0x1
    +        message = tornado.escape.utf8(message)
    +        assert isinstance(message, bytes)
    +        self._message_bytes_out += len(message)
    +        flags = 0
    +        if self._compressor:
    +            message = self._compressor.compress(message)
    +            flags |= self.RSV1
    +        return self._write_frame(True, opcode, message, flags=flags)
    +
    +    def write_ping(self, data):
    +        """Send ping frame."""
    +        assert isinstance(data, bytes)
    +        self._write_frame(True, 0x9, data)
    +
    +    def _receive_frame(self):
    +        try:
    +            self.stream.read_bytes(2, self._on_frame_start)
    +        except StreamClosedError:
    +            self._abort()
    +
    +    def _on_frame_start(self, data):
    +        self._wire_bytes_in += len(data)
    +        header, payloadlen = struct.unpack("BB", data)
    +        self._final_frame = header & self.FIN
    +        reserved_bits = header & self.RSV_MASK
    +        self._frame_opcode = header & self.OPCODE_MASK
    +        self._frame_opcode_is_control = self._frame_opcode & 0x8
    +        if self._decompressor is not None and self._frame_opcode != 0:
    +            self._frame_compressed = bool(reserved_bits & self.RSV1)
    +            reserved_bits &= ~self.RSV1
    +        if reserved_bits:
    +            # client is using as-yet-undefined extensions; abort
    +            self._abort()
    +            return
    +        self._masked_frame = bool(payloadlen & 0x80)
    +        payloadlen = payloadlen & 0x7f
    +        if self._frame_opcode_is_control and payloadlen >= 126:
    +            # control frames must have payload < 126
    +            self._abort()
    +            return
    +        try:
    +            if payloadlen < 126:
    +                self._frame_length = payloadlen
    +                if self._masked_frame:
    +                    self.stream.read_bytes(4, self._on_masking_key)
    +                else:
    +                    self.stream.read_bytes(self._frame_length,
    +                                           self._on_frame_data)
    +            elif payloadlen == 126:
    +                self.stream.read_bytes(2, self._on_frame_length_16)
    +            elif payloadlen == 127:
    +                self.stream.read_bytes(8, self._on_frame_length_64)
    +        except StreamClosedError:
    +            self._abort()
    +
    +    def _on_frame_length_16(self, data):
    +        self._wire_bytes_in += len(data)
    +        self._frame_length = struct.unpack("!H", data)[0]
    +        try:
    +            if self._masked_frame:
    +                self.stream.read_bytes(4, self._on_masking_key)
    +            else:
    +                self.stream.read_bytes(self._frame_length, self._on_frame_data)
    +        except StreamClosedError:
    +            self._abort()
    +
    +    def _on_frame_length_64(self, data):
    +        self._wire_bytes_in += len(data)
    +        self._frame_length = struct.unpack("!Q", data)[0]
    +        try:
    +            if self._masked_frame:
    +                self.stream.read_bytes(4, self._on_masking_key)
    +            else:
    +                self.stream.read_bytes(self._frame_length, self._on_frame_data)
    +        except StreamClosedError:
    +            self._abort()
    +
    +    def _on_masking_key(self, data):
    +        self._wire_bytes_in += len(data)
    +        self._frame_mask = data
    +        try:
    +            self.stream.read_bytes(self._frame_length,
    +                                   self._on_masked_frame_data)
    +        except StreamClosedError:
    +            self._abort()
    +
    +    def _on_masked_frame_data(self, data):
    +        # Don't touch _wire_bytes_in; we'll do it in _on_frame_data.
    +        self._on_frame_data(_websocket_mask(self._frame_mask, data))
    +
    +    def _on_frame_data(self, data):
    +        self._wire_bytes_in += len(data)
    +        if self._frame_opcode_is_control:
    +            # control frames may be interleaved with a series of fragmented
    +            # data frames, so control frames must not interact with
    +            # self._fragmented_*
    +            if not self._final_frame:
    +                # control frames must not be fragmented
    +                self._abort()
    +                return
    +            opcode = self._frame_opcode
    +        elif self._frame_opcode == 0:  # continuation frame
    +            if self._fragmented_message_buffer is None:
    +                # nothing to continue
    +                self._abort()
    +                return
    +            self._fragmented_message_buffer += data
    +            if self._final_frame:
    +                opcode = self._fragmented_message_opcode
    +                data = self._fragmented_message_buffer
    +                self._fragmented_message_buffer = None
    +        else:  # start of new data message
    +            if self._fragmented_message_buffer is not None:
    +                # can't start new message until the old one is finished
    +                self._abort()
    +                return
    +            if self._final_frame:
    +                opcode = self._frame_opcode
    +            else:
    +                self._fragmented_message_opcode = self._frame_opcode
    +                self._fragmented_message_buffer = data
    +
    +        if self._final_frame:
    +            self._handle_message(opcode, data)
    +
    +        if not self.client_terminated:
    +            self._receive_frame()
    +
    +    def _handle_message(self, opcode, data):
    +        if self.client_terminated:
    +            return
    +
    +        if self._frame_compressed:
    +            data = self._decompressor.decompress(data)
    +
    +        if opcode == 0x1:
    +            # UTF-8 data
    +            self._message_bytes_in += len(data)
    +            try:
    +                decoded = data.decode("utf-8")
    +            except UnicodeDecodeError:
    +                self._abort()
    +                return
    +            self._run_callback(self.handler.on_message, decoded)
    +        elif opcode == 0x2:
    +            # Binary data
    +            self._message_bytes_in += len(data)
    +            self._run_callback(self.handler.on_message, data)
    +        elif opcode == 0x8:
    +            # Close
    +            self.client_terminated = True
    +            if len(data) >= 2:
    +                self.handler.close_code = struct.unpack('>H', data[:2])[0]
    +            if len(data) > 2:
    +                self.handler.close_reason = to_unicode(data[2:])
    +            # Echo the received close code, if any (RFC 6455 section 5.5.1).
    +            self.close(self.handler.close_code)
    +        elif opcode == 0x9:
    +            # Ping
    +            self._write_frame(True, 0xA, data)
    +        elif opcode == 0xA:
    +            # Pong
    +            self._run_callback(self.handler.on_pong, data)
    +        else:
    +            self._abort()
    +
    +    def close(self, code=None, reason=None):
    +        """Closes the WebSocket connection."""
    +        if not self.server_terminated:
    +            if not self.stream.closed():
    +                if code is None and reason is not None:
    +                    code = 1000  # "normal closure" status code
    +                if code is None:
    +                    close_data = b''
    +                else:
    +                    close_data = struct.pack('>H', code)
    +                if reason is not None:
    +                    close_data += utf8(reason)
    +                self._write_frame(True, 0x8, close_data)
    +            self.server_terminated = True
    +        if self.client_terminated:
    +            if self._waiting is not None:
    +                self.stream.io_loop.remove_timeout(self._waiting)
    +                self._waiting = None
    +            self.stream.close()
    +        elif self._waiting is None:
    +            # Give the client a few seconds to complete a clean shutdown,
    +            # otherwise just close the connection.
    +            self._waiting = self.stream.io_loop.add_timeout(
    +                self.stream.io_loop.time() + 5, self._abort)
    +
    +
    +class WebSocketClientConnection(simple_httpclient._HTTPConnection):
    +    """WebSocket client connection.
    +
    +    This class should not be instantiated directly; use the
    +    `websocket_connect` function instead.
    +    """
    +    def __init__(self, io_loop, request, on_message_callback=None,
    +                 compression_options=None):
    +        self.compression_options = compression_options
    +        self.connect_future = TracebackFuture()
    +        self.protocol = None
    +        self.read_future = None
    +        self.read_queue = collections.deque()
    +        self.key = base64.b64encode(os.urandom(16))
    +        self._on_message_callback = on_message_callback
    +        self.close_code = self.close_reason = None
    +
    +        scheme, sep, rest = request.url.partition(':')
    +        scheme = {'ws': 'http', 'wss': 'https'}[scheme]
    +        request.url = scheme + sep + rest
    +        request.headers.update({
    +            'Upgrade': 'websocket',
    +            'Connection': 'Upgrade',
    +            'Sec-WebSocket-Key': self.key,
    +            'Sec-WebSocket-Version': '13',
    +        })
    +        if self.compression_options is not None:
    +            # Always offer to let the server set our max_wbits (and even though
    +            # we don't offer it, we will accept a client_no_context_takeover
    +            # from the server).
    +            # TODO: set server parameters for deflate extension
    +            # if requested in self.compression_options.
    +            request.headers['Sec-WebSocket-Extensions'] = (
    +                'permessage-deflate; client_max_window_bits')
    +
    +        self.tcp_client = TCPClient(io_loop=io_loop)
    +        super(WebSocketClientConnection, self).__init__(
    +            io_loop, None, request, lambda: None, self._on_http_response,
    +            104857600, self.tcp_client, 65536, 104857600)
    +
    +    def close(self, code=None, reason=None):
    +        """Closes the websocket connection.
    +
    +        ``code`` and ``reason`` are documented under
    +        `WebSocketHandler.close`.
    +
    +        .. versionadded:: 3.2
    +
    +        .. versionchanged:: 4.0
    +
    +           Added the ``code`` and ``reason`` arguments.
    +        """
    +        if self.protocol is not None:
    +            self.protocol.close(code, reason)
    +            self.protocol = None
    +
    +    def on_connection_close(self):
    +        if not self.connect_future.done():
    +            self.connect_future.set_exception(StreamClosedError())
    +        self.on_message(None)
    +        self.tcp_client.close()
    +        super(WebSocketClientConnection, self).on_connection_close()
    +
    +    def _on_http_response(self, response):
    +        if not self.connect_future.done():
    +            if response.error:
    +                self.connect_future.set_exception(response.error)
    +            else:
    +                self.connect_future.set_exception(WebSocketError(
    +                    "Non-websocket response"))
    +
    +    def headers_received(self, start_line, headers):
    +        if start_line.code != 101:
    +            return super(WebSocketClientConnection, self).headers_received(
    +                start_line, headers)
    +
    +        self.headers = headers
    +        self.protocol = self.get_websocket_protocol()
    +        self.protocol._process_server_headers(self.key, self.headers)
    +        self.protocol._receive_frame()
    +
    +        if self._timeout is not None:
    +            self.io_loop.remove_timeout(self._timeout)
    +            self._timeout = None
    +
    +        self.stream = self.connection.detach()
    +        self.stream.set_close_callback(self.on_connection_close)
    +        # Once we've taken over the connection, clear the final callback
    +        # we set on the http request.  This deactivates the error handling
    +        # in simple_httpclient that would otherwise interfere with our
    +        # ability to see exceptions.
    +        self.final_callback = None
    +
    +        self.connect_future.set_result(self)
    +
    +    def write_message(self, message, binary=False):
    +        """Sends a message to the WebSocket server."""
    +        return self.protocol.write_message(message, binary)
    +
    +    def read_message(self, callback=None):
    +        """Reads a message from the WebSocket server.
    +
    +        If on_message_callback was specified at WebSocket
    +        initialization, this function will never return messages
    +
    +        Returns a future whose result is the message, or None
    +        if the connection is closed.  If a callback argument
    +        is given it will be called with the future when it is
    +        ready.
    +        """
    +        assert self.read_future is None
    +        future = TracebackFuture()
    +        if self.read_queue:
    +            future.set_result(self.read_queue.popleft())
    +        else:
    +            self.read_future = future
    +        if callback is not None:
    +            self.io_loop.add_future(future, callback)
    +        return future
    +
    +    def on_message(self, message):
    +        if self._on_message_callback:
    +            self._on_message_callback(message)
    +        elif self.read_future is not None:
    +            self.read_future.set_result(message)
    +            self.read_future = None
    +        else:
    +            self.read_queue.append(message)
    +
    +    def on_pong(self, data):
    +        pass
    +
    +    def get_websocket_protocol(self):
    +        return WebSocketProtocol13(self, mask_outgoing=True,
    +                                   compression_options=self.compression_options)
    +
    +
    +def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
    +                      on_message_callback=None, compression_options=None):
    +    """Client-side websocket support.
    +
    +    Takes a url and returns a Future whose result is a
    +    `WebSocketClientConnection`.
    +
    +    ``compression_options`` is interpreted in the same way as the
    +    return value of `.WebSocketHandler.get_compression_options`.
    +
    +    The connection supports two styles of operation. In the coroutine
    +    style, the application typically calls
    +    `~.WebSocketClientConnection.read_message` in a loop::
    +
    +        conn = yield websocket_connect(url)
    +        while True:
    +            msg = yield conn.read_message()
    +            if msg is None: break
    +            # Do something with msg
    +
    +    In the callback style, pass an ``on_message_callback`` to
    +    ``websocket_connect``. In both styles, a message of ``None``
    +    indicates that the connection has been closed.
    +
    +    .. versionchanged:: 3.2
    +       Also accepts ``HTTPRequest`` objects in place of urls.
    +
    +    .. versionchanged:: 4.1
    +       Added ``compression_options`` and ``on_message_callback``.
    +       The ``io_loop`` argument is deprecated.
    +    """
    +    if io_loop is None:
    +        io_loop = IOLoop.current()
    +    if isinstance(url, httpclient.HTTPRequest):
    +        assert connect_timeout is None
    +        request = url
    +        # Copy and convert the headers dict/object (see comments in
    +        # AsyncHTTPClient.fetch)
    +        request.headers = httputil.HTTPHeaders(request.headers)
    +    else:
    +        request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
    +    request = httpclient._RequestProxy(
    +        request, httpclient.HTTPRequest._DEFAULTS)
    +    conn = WebSocketClientConnection(io_loop, request,
    +                                     on_message_callback=on_message_callback,
    +                                     compression_options=compression_options)
    +    if callback is not None:
    +        io_loop.add_future(conn.connect_future, callback)
    +    return conn.connect_future
    diff --git a/server/www/packages/packages-common/tornado/wsgi.py b/server/www/packages/packages-common/tornado/wsgi.py
    new file mode 100644
    index 0000000..59e6c55
    --- /dev/null
    +++ b/server/www/packages/packages-common/tornado/wsgi.py
    @@ -0,0 +1,358 @@
    +#!/usr/bin/env python
    +#
    +# Copyright 2009 Facebook
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License"); you may
    +# not use this file except in compliance with the License. You may obtain
    +# a copy of the License at
    +#
    +#     http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    +# License for the specific language governing permissions and limitations
    +# under the License.
    +
    +"""WSGI support for the Tornado web framework.
    +
    +WSGI is the Python standard for web servers, and allows for interoperability
    +between Tornado and other Python web frameworks and servers.  This module
    +provides WSGI support in two ways:
    +
    +* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application
    +  interface.  This is useful for running a Tornado app on another
    +  HTTP server, such as Google App Engine.  See the `WSGIAdapter` class
    +  documentation for limitations that apply.
    +* `WSGIContainer` lets you run other WSGI applications and frameworks on the
    +  Tornado HTTP server.  For example, with this class you can mix Django
    +  and Tornado handlers in a single server.
    +"""
    +
    +from __future__ import absolute_import, division, print_function, with_statement
    +
    +import sys
    +from io import BytesIO
    +import tornado
    +
    +from tornado.concurrent import Future
    +from tornado import escape
    +from tornado import httputil
    +from tornado.log import access_log
    +from tornado import web
    +from tornado.escape import native_str
    +from tornado.util import unicode_type
    +
    +
    +try:
    +    import urllib.parse as urllib_parse  # py3
    +except ImportError:
    +    import urllib as urllib_parse
    +
    +# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
    +# that are smuggled inside objects of type unicode (via the latin1 encoding).
    +# These functions are like those in the tornado.escape module, but defined
    +# here to minimize the temptation to use them in non-wsgi contexts.
    +if str is unicode_type:
    +    def to_wsgi_str(s):
    +        assert isinstance(s, bytes)
    +        return s.decode('latin1')
    +
    +    def from_wsgi_str(s):
    +        assert isinstance(s, str)
    +        return s.encode('latin1')
    +else:
    +    def to_wsgi_str(s):
    +        assert isinstance(s, bytes)
    +        return s
    +
    +    def from_wsgi_str(s):
    +        assert isinstance(s, str)
    +        return s
    +
    +
    +class WSGIApplication(web.Application):
    +    """A WSGI equivalent of `tornado.web.Application`.
    +
    +    .. deprecated:: 4.0
    +
    +       Use a regular `.Application` and wrap it in `WSGIAdapter` instead.
    +    """
    +    def __call__(self, environ, start_response):
    +        return WSGIAdapter(self)(environ, start_response)
    +
    +
    +# WSGI has no facilities for flow control, so just return an already-done
    +# Future when the interface requires it.
    +_dummy_future = Future()
    +_dummy_future.set_result(None)
    +
    +
    +class _WSGIConnection(httputil.HTTPConnection):
    +    def __init__(self, method, start_response, context):
    +        self.method = method
    +        self.start_response = start_response
    +        self.context = context
    +        self._write_buffer = []
    +        self._finished = False
    +        self._expected_content_remaining = None
    +        self._error = None
    +
    +    def set_close_callback(self, callback):
    +        # WSGI has no facility for detecting a closed connection mid-request,
    +        # so we can simply ignore the callback.
    +        pass
    +
    +    def write_headers(self, start_line, headers, chunk=None, callback=None):
    +        if self.method == 'HEAD':
    +            self._expected_content_remaining = 0
    +        elif 'Content-Length' in headers:
    +            self._expected_content_remaining = int(headers['Content-Length'])
    +        else:
    +            self._expected_content_remaining = None
    +        self.start_response(
    +            '%s %s' % (start_line.code, start_line.reason),
    +            [(native_str(k), native_str(v)) for (k, v) in headers.get_all()])
    +        if chunk is not None:
    +            self.write(chunk, callback)
    +        elif callback is not None:
    +            callback()
    +        return _dummy_future
    +
    +    def write(self, chunk, callback=None):
    +        if self._expected_content_remaining is not None:
    +            self._expected_content_remaining -= len(chunk)
    +            if self._expected_content_remaining < 0:
    +                self._error = httputil.HTTPOutputError(
    +                    "Tried to write more data than Content-Length")
    +                raise self._error
    +        self._write_buffer.append(chunk)
    +        if callback is not None:
    +            callback()
    +        return _dummy_future
    +
    +    def finish(self):
    +        if (self._expected_content_remaining is not None and
    +                self._expected_content_remaining != 0):
    +            self._error = httputil.HTTPOutputError(
    +                "Tried to write %d bytes less than Content-Length" %
    +                self._expected_content_remaining)
    +            raise self._error
    +        self._finished = True
    +
    +
    +class _WSGIRequestContext(object):
    +    def __init__(self, remote_ip, protocol):
    +        self.remote_ip = remote_ip
    +        self.protocol = protocol
    +
    +    def __str__(self):
    +        return self.remote_ip
    +
    +
    +class WSGIAdapter(object):
    +    """Converts a `tornado.web.Application` instance into a WSGI application.
    +
    +    Example usage::
    +
    +        import tornado.web
    +        import tornado.wsgi
    +        import wsgiref.simple_server
    +
    +        class MainHandler(tornado.web.RequestHandler):
    +            def get(self):
    +                self.write("Hello, world")
    +
    +        if __name__ == "__main__":
    +            application = tornado.web.Application([
    +                (r"/", MainHandler),
    +            ])
    +            wsgi_app = tornado.wsgi.WSGIAdapter(application)
    +            server = wsgiref.simple_server.make_server('', 8888, wsgi_app)
    +            server.serve_forever()
    +
    +    See the `appengine demo
    +    `_
    +    for an example of using this module to run a Tornado app on Google
    +    App Engine.
    +
    +    In WSGI mode asynchronous methods are not supported.  This means
    +    that it is not possible to use `.AsyncHTTPClient`, or the
    +    `tornado.auth` or `tornado.websocket` modules.
    +
    +    .. versionadded:: 4.0
    +    """
    +    def __init__(self, application):
    +        if isinstance(application, WSGIApplication):
    +            self.application = lambda request: web.Application.__call__(
    +                application, request)
    +        else:
    +            self.application = application
    +
    +    def __call__(self, environ, start_response):
    +        method = environ["REQUEST_METHOD"]
    +        uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
    +        uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
    +        if environ.get("QUERY_STRING"):
    +            uri += "?" + environ["QUERY_STRING"]
    +        headers = httputil.HTTPHeaders()
    +        if environ.get("CONTENT_TYPE"):
    +            headers["Content-Type"] = environ["CONTENT_TYPE"]
    +        if environ.get("CONTENT_LENGTH"):
    +            headers["Content-Length"] = environ["CONTENT_LENGTH"]
    +        for key in environ:
    +            if key.startswith("HTTP_"):
    +                headers[key[5:].replace("_", "-")] = environ[key]
    +        if headers.get("Content-Length"):
    +            body = environ["wsgi.input"].read(
    +                int(headers["Content-Length"]))
    +        else:
    +            body = b""
    +        protocol = environ["wsgi.url_scheme"]
    +        remote_ip = environ.get("REMOTE_ADDR", "")
    +        if environ.get("HTTP_HOST"):
    +            host = environ["HTTP_HOST"]
    +        else:
    +            host = environ["SERVER_NAME"]
    +        connection = _WSGIConnection(method, start_response,
    +                                     _WSGIRequestContext(remote_ip, protocol))
    +        request = httputil.HTTPServerRequest(
    +            method, uri, "HTTP/1.1", headers=headers, body=body,
    +            host=host, connection=connection)
    +        request._parse_body()
    +        self.application(request)
    +        if connection._error:
    +            raise connection._error
    +        if not connection._finished:
    +            raise Exception("request did not finish synchronously")
    +        return connection._write_buffer
    +
    +
    +class WSGIContainer(object):
    +    r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
    +
    +    .. warning::
    +
    +       WSGI is a *synchronous* interface, while Tornado's concurrency model
    +       is based on single-threaded asynchronous execution.  This means that
    +       running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
    +       than running the same app in a multi-threaded WSGI server like
    +       ``gunicorn`` or ``uwsgi``.  Use `WSGIContainer` only when there are
    +       benefits to combining Tornado and WSGI in the same process that
    +       outweigh the reduced scalability.
    +
    +    Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
    +    run it. For example::
    +
    +        def simple_app(environ, start_response):
    +            status = "200 OK"
    +            response_headers = [("Content-type", "text/plain")]
    +            start_response(status, response_headers)
    +            return ["Hello world!\n"]
    +
    +        container = tornado.wsgi.WSGIContainer(simple_app)
    +        http_server = tornado.httpserver.HTTPServer(container)
    +        http_server.listen(8888)
    +        tornado.ioloop.IOLoop.current().start()
    +
    +    This class is intended to let other frameworks (Django, web.py, etc)
    +    run on the Tornado HTTP server and I/O loop.
    +
    +    The `tornado.web.FallbackHandler` class is often useful for mixing
    +    Tornado and WSGI apps in the same server.  See
    +    https://github.com/bdarnell/django-tornado-demo for a complete example.
    +    """
    +    def __init__(self, wsgi_application):
    +        self.wsgi_application = wsgi_application
    +
    +    def __call__(self, request):
    +        data = {}
    +        response = []
    +
    +        def start_response(status, response_headers, exc_info=None):
    +            data["status"] = status
    +            data["headers"] = response_headers
    +            return response.append
    +        app_response = self.wsgi_application(
    +            WSGIContainer.environ(request), start_response)
    +        try:
    +            response.extend(app_response)
    +            body = b"".join(response)
    +        finally:
    +            if hasattr(app_response, "close"):
    +                app_response.close()
    +        if not data:
    +            raise Exception("WSGI app did not call start_response")
    +
    +        status_code, reason = data["status"].split(' ', 1)
    +        status_code = int(status_code)
    +        headers = data["headers"]
    +        header_set = set(k.lower() for (k, v) in headers)
    +        body = escape.utf8(body)
    +        if status_code != 304:
    +            if "content-length" not in header_set:
    +                headers.append(("Content-Length", str(len(body))))
    +            if "content-type" not in header_set:
    +                headers.append(("Content-Type", "text/html; charset=UTF-8"))
    +        if "server" not in header_set:
    +            headers.append(("Server", "TornadoServer/%s" % tornado.version))
    +
    +        start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason)
    +        header_obj = httputil.HTTPHeaders()
    +        for key, value in headers:
    +            header_obj.add(key, value)
    +        request.connection.write_headers(start_line, header_obj, chunk=body)
    +        request.connection.finish()
    +        self._log(status_code, request)
    +
    +    @staticmethod
    +    def environ(request):
    +        """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
    +        """
    +        hostport = request.host.split(":")
    +        if len(hostport) == 2:
    +            host = hostport[0]
    +            port = int(hostport[1])
    +        else:
    +            host = request.host
    +            port = 443 if request.protocol == "https" else 80
    +        environ = {
    +            "REQUEST_METHOD": request.method,
    +            "SCRIPT_NAME": "",
    +            "PATH_INFO": to_wsgi_str(escape.url_unescape(
    +                request.path, encoding=None, plus=False)),
    +            "QUERY_STRING": request.query,
    +            "REMOTE_ADDR": request.remote_ip,
    +            "SERVER_NAME": host,
    +            "SERVER_PORT": str(port),
    +            "SERVER_PROTOCOL": request.version,
    +            "wsgi.version": (1, 0),
    +            "wsgi.url_scheme": request.protocol,
    +            "wsgi.input": BytesIO(escape.utf8(request.body)),
    +            "wsgi.errors": sys.stderr,
    +            "wsgi.multithread": False,
    +            "wsgi.multiprocess": True,
    +            "wsgi.run_once": False,
    +        }
    +        if "Content-Type" in request.headers:
    +            environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
    +        if "Content-Length" in request.headers:
    +            environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
    +        for key, value in request.headers.items():
    +            environ["HTTP_" + key.replace("-", "_").upper()] = value
    +        return environ
    +
    +    def _log(self, status_code, request):
    +        if status_code < 400:
    +            log_method = access_log.info
    +        elif status_code < 500:
    +            log_method = access_log.warning
    +        else:
    +            log_method = access_log.error
    +        request_time = 1000.0 * request.request_time()
    +        summary = request.method + " " + request.uri + " (" + \
    +            request.remote_ip + ")"
    +        log_method("%d %s %.2fms", status_code, summary, request_time)
    +
    +
    +HTTPRequest = httputil.HTTPServerRequest
    diff --git a/server/www/packages/packages-common/wheezy/captcha/__init__.py b/server/www/packages/packages-common/wheezy/captcha/__init__.py
    new file mode 100644
    index 0000000..033e12e
    --- /dev/null
    +++ b/server/www/packages/packages-common/wheezy/captcha/__init__.py
    @@ -0,0 +1,5 @@
    +
    +"""
    +"""
    +
    +__version__ = '0.1.44'
    diff --git a/server/www/packages/packages-common/wheezy/captcha/bezier.py b/server/www/packages/packages-common/wheezy/captcha/bezier.py
    new file mode 100644
    index 0000000..48fd137
    --- /dev/null
    +++ b/server/www/packages/packages-common/wheezy/captcha/bezier.py
    @@ -0,0 +1,43 @@
    +
    +"""
    +"""
    +
    +
    +tsequence = tuple([t / 20.0 for t in range(21)])
    +beziers = {}
    +
    +
    +def pascal_row(n):
    +    """ Returns n-th row of Pascal's triangle
    +    """
    +    result = [1]
    +    x, numerator = 1, n
    +    for denominator in range(1, n // 2 + 1):
    +        x *= numerator
    +        x /= denominator
    +        result.append(x)
    +        numerator -= 1
    +    if n & 1 == 0:
    +        result.extend(reversed(result[:-1]))
    +    else:
    +        result.extend(reversed(result))
    +    return result
    +
    +
    +def make_bezier(n):
    +    """ Bezier curves:
    +        http://en.wikipedia.org/wiki/B%C3%A9zier_curve#Generalization
    +    """
    +    try:
    +        return beziers[n]
    +    except KeyError:
    +        combinations = pascal_row(n - 1)
    +        result = []
    +        for t in tsequence:
    +            tpowers = (t ** i for i in range(n))
    +            upowers = ((1 - t) ** i for i in range(n - 1, -1, -1))
    +            coefs = [c * a * b for c, a, b in zip(combinations,
    +                                                  tpowers, upowers)]
    +            result.append(coefs)
    +        beziers[n] = result
    +        return result
    diff --git a/server/www/packages/packages-common/wheezy/captcha/comp.py b/server/www/packages/packages-common/wheezy/captcha/comp.py
    new file mode 100644
    index 0000000..28ccd97
    --- /dev/null
    +++ b/server/www/packages/packages-common/wheezy/captcha/comp.py
    @@ -0,0 +1,26 @@
    +""" ``comp`` module.
    +"""
    +
    +import sys
    +
    +
    +PY3 = sys.version_info[0] >= 3
    +
    +
    +if PY3:  # pragma: nocover
    +    xrange = range
    +else:  # pragma: nocover
    +    xrange = xrange
    +
    +try:  # pragma: nocover
    +    from PIL import Image
    +    from PIL import ImageFilter
    +    from PIL.ImageColor import getrgb
    +    from PIL.ImageDraw import Draw
    +    from PIL.ImageFont import truetype
    +except ImportError:  # pragma: nocover
    +    import Image  # noqa
    +    import ImageFilter  # noqa
    +    from ImageColor import getrgb  # noqa
    +    from ImageDraw import Draw  # noqa
    +    from ImageFont import truetype  # noqa
    diff --git a/server/www/packages/packages-common/wheezy/captcha/http.py b/server/www/packages/packages-common/wheezy/captcha/http.py
    new file mode 100644
    index 0000000..ab56937
    --- /dev/null
    +++ b/server/www/packages/packages-common/wheezy/captcha/http.py
    @@ -0,0 +1,126 @@
    +
    +"""
    +"""
    +
    +import random
    +
    +from datetime import timedelta
    +from time import time
    +from uuid import uuid4
    +
    +from wheezy.core.collections import last_item_adapter
    +from wheezy.core.uuid import shrink_uuid
    +from wheezy.http import CacheProfile
    +from wheezy.http import HTTPResponse
    +from wheezy.http import accept_method
    +from wheezy.http import bad_request
    +from wheezy.http import response_cache
    +
    +
    +class FileAdapter(object):
    +
    +    def __init__(self, response):
    +        self.write = response.write_bytes
    +
    +
    +class CaptchaContext(object):
    +
    +    def __init__(self, image,
    +                 cache, prefix='captcha:', namespace=None,
    +                 timeout=5 * 60, profile=None,
    +                 chars='ABCDEFGHJKLMNPQRSTUVWXYZ23456789',
    +                 max_chars=4, wait_timeout=2,
    +                 challenge_key='c', turing_key='turing_number',
    +                 enabled=True):
    +        self.image = image
    +        self.cache = cache
    +        self.prefix = prefix
    +        self.namespace = namespace
    +        self.timeout = timeout
    +        self.chars = chars
    +        self.wait_timeout = wait_timeout
    +        self.max_chars = max_chars
    +        self.challenge_key = challenge_key
    +        self.turing_key = turing_key
    +        self.enabled = enabled
    +        if profile:
    +            self.profile = profile
    +        else:
    +            self.profile = CacheProfile(
    +                'server',
    +                vary_query=[challenge_key],
    +                duration=timedelta(seconds=wait_timeout),
    +                no_store=True,
    +                namespace=namespace)
    +
    +    def create_handler(self, content_type='image/jpg', format='JPEG',
    +                       **options):
    +        @accept_method('GET')
    +        @response_cache(self.profile)
    +        def handler(request):
    +            if self.challenge_key not in request.query:
    +                return bad_request()
    +            challenge_code = last_item_adapter(
    +                request.query)[self.challenge_key]
    +            turing_number = ''.join(random.sample(self.chars, self.max_chars))
    +            if not self.cache.set(self.prefix + challenge_code,
    +                                  (int(time()), turing_number),
    +                                  self.timeout, self.namespace):
    +                return bad_request()
    +            response = HTTPResponse(content_type)
    +            self.image(turing_number).save(
    +                FileAdapter(response), format, **options)
    +            return response
    +        return handler
    +
    +    def get_challenge_code(self, request):
    +        if self.challenge_key not in request.query:
    +            return shrink_uuid(uuid4())
    +        else:
    +            return request.query[self.challenge_key][0]
    +
    +    def validate(self, request, errors, gettext):
    +        if not self.enabled:
    +            return True
    +        if self.challenge_key not in request.form:
    +            self.append_error(errors, gettext(
    +                'The challenge code is not available.'))
    +            return False
    +        if self.turing_key not in request.form:
    +            self.append_error(errors, gettext(
    +                'The turing number is not available.'))
    +            return False
    +        form = last_item_adapter(request.form)
    +        challenge_code = form[self.challenge_key]
    +        if len(challenge_code) != 22:
    +            self.append_error(errors, gettext(
    +                'The challenge code is invalid.'))
    +            return False
    +        entered_turing_number = form[self.turing_key]
    +        if len(entered_turing_number) != self.max_chars:
    +            self.append_error(errors, gettext(
    +                'The turing number is invalid.'))
    +            return False
    +
    +        key = self.prefix + challenge_code
    +        data = self.cache.get(key, self.namespace)
    +        if not data:
    +            self.append_error(errors, gettext(
    +                'The code you typed has expired after %d seconds.')
    +                % self.timeout)
    +            return False
    +        self.cache.delete(key, 0, self.namespace)
    +        issued, turing_number = data
    +        if issued + self.wait_timeout > int(time()):
    +            self.append_error(errors, gettext(
    +                'The code was typed too quickly. Wait at least %d seconds.')
    +                % self.wait_timeout)
    +            return False
    +        if turing_number != entered_turing_number.upper():
    +            self.append_error(
    +                errors, gettext('The code you typed has no match.'))
    +            return False
    +        return True
    +
    +    def append_error(self, errors, message):
    +        errors.setdefault(self.turing_key, []).append(message)
    diff --git a/server/www/packages/packages-common/wheezy/captcha/image.py b/server/www/packages/packages-common/wheezy/captcha/image.py
    new file mode 100644
    index 0000000..626dd72
    --- /dev/null
    +++ b/server/www/packages/packages-common/wheezy/captcha/image.py
    @@ -0,0 +1,186 @@
    +"""
    +"""
    +
    +import random
    +
    +from wheezy.captcha.comp import Draw
    +from wheezy.captcha.comp import Image
    +from wheezy.captcha.comp import ImageFilter
    +from wheezy.captcha.comp import getrgb
    +from wheezy.captcha.comp import truetype
    +from wheezy.captcha.comp import xrange
    +
    +
    +def captcha(drawings, width=200, height=75):
    +    def render(text):
    +        image = Image.new('RGB', (width, height), (255, 255, 255))
    +        for drawing in drawings:
    +            image = drawing(image, text)
    +            assert image
    +        return image
    +    return render
    +
    +
    +# region: captcha drawers
    +
    +def background(color='#EEEECC'):
    +    color = getrgb(color)
    +
    +    def drawer(image, text):
    +        Draw(image).rectangle([(0, 0), image.size], fill=color)
    +        return image
    +    return drawer
    +
    +
    +def smooth():
    +    def drawer(image, text):
    +        return image.filter(ImageFilter.SMOOTH)
    +    return drawer
    +
    +
    +def curve(color='#5C87B2', width=4, number=6):
    +    from wheezy.captcha.bezier import make_bezier
    +    if not callable(color):
    +        c = getrgb(color)
    +
    +        def color():
    +            return c
    +
    +    def drawer(image, text):
    +        dx, height = image.size
    +        dx = dx / number
    +        path = [(dx * i, random.randint(0, height))
    +                for i in range(1, number)]
    +        bcoefs = make_bezier(number - 1)
    +        points = []
    +        for coefs in bcoefs:
    +            points.append(tuple(sum([coef * p for coef, p in zip(coefs, ps)])
    +                          for ps in zip(*path)))
    +        draw = Draw(image)
    +        draw.line(points, fill=color(), width=width)
    +        return image
    +    return drawer
    +
    +
    +def noise(number=50, color='#EEEECC', level=2):
    +    if not callable(color):
    +        c = getrgb(color)
    +
    +        def color():
    +            return c
    +
    +    def drawer(image, text):
    +        width, height = image.size
    +        dx = width / 10
    +        width = width - dx
    +        dy = height / 10
    +        height = height - dy
    +        draw = Draw(image)
    +        for i in xrange(number):
    +            x = int(random.uniform(dx, width))
    +            y = int(random.uniform(dy, height))
    +            draw.line(((x, y), (x + level, y)), fill=color(), width=level)
    +        return image
    +    return drawer
    +
    +
    +def text(fonts, font_sizes=None, drawings=None, color='#5C87B2',
    +         squeeze_factor=0.8):
    +    fonts = tuple([truetype(name, size)
    +                   for name in fonts
    +                   for size in font_sizes or (65, 70, 75)])
    +    if not callable(color):
    +        c = getrgb(color)
    +
    +        def color():
    +            return c
    +
    +    def drawer(image, text):
    +        draw = Draw(image)
    +        char_images = []
    +        for c in text:
    +            font = random.choice(fonts)
    +            c_width, c_height = draw.textsize(c, font=font)
    +            char_image = Image.new('RGB', (c_width, c_height), (0, 0, 0))
    +            char_draw = Draw(char_image)
    +            char_draw.text((0, 0), c, font=font, fill=color())
    +            char_image = char_image.crop(char_image.getbbox())
    +            for drawing in drawings:
    +                char_image = drawing(char_image)
    +            char_images.append(char_image)
    +        width, height = image.size
    +        offset = int((width - sum(int(i.size[0] * squeeze_factor)
    +                                  for i in char_images[:-1]) -
    +                      char_images[-1].size[0]) / 2)
    +        for char_image in char_images:
    +            c_width, c_height = char_image.size
    +            mask = char_image.convert('L').point(lambda i: i * 1.97)
    +            image.paste(char_image,
    +                        (offset, int((height - c_height) / 2)),
    +                        mask)
    +            offset += int(c_width * squeeze_factor)
    +        return image
    +    return drawer
    +
    +
    +# region: text drawers
    +
    +def warp(dx_factor=0.27, dy_factor=0.21):
    +    def drawer(image):
    +        width, height = image.size
    +        dx = width * dx_factor
    +        dy = height * dy_factor
    +        x1 = int(random.uniform(-dx, dx))
    +        y1 = int(random.uniform(-dy, dy))
    +        x2 = int(random.uniform(-dx, dx))
    +        y2 = int(random.uniform(-dy, dy))
    +        image2 = Image.new('RGB',
    +                           (width + abs(x1) + abs(x2),
    +                            height + abs(y1) + abs(y2)))
    +        image2.paste(image, (abs(x1), abs(y1)))
    +        width2, height2 = image2.size
    +        return image2.transform(
    +            (width, height), Image.QUAD,
    +            (x1, y1,
    +             -x1, height2 - y2,
    +             width2 + x2, height2 + y2,
    +             width2 - x2, -y1))
    +    return drawer
    +
    +
    +def offset(dx_factor=0.1, dy_factor=0.2):
    +    def drawer(image):
    +        width, height = image.size
    +        dx = int(random.random() * width * dx_factor)
    +        dy = int(random.random() * height * dy_factor)
    +        image2 = Image.new('RGB', (width + dx, height + dy))
    +        image2.paste(image, (dx, dy))
    +        return image2
    +    return drawer
    +
    +
    +def rotate(angle=25):
    +    def drawer(image):
    +        return image.rotate(
    +            random.uniform(-angle, angle), Image.BILINEAR, expand=1)
    +    return drawer
    +
    +
    +if __name__ == '__main__':
    +    import string
    +    captcha_image = captcha(drawings=[
    +        background(),
    +        text(fonts=[
    +            'fonts/CourierNew-Bold.ttf',
    +            'fonts/LiberationMono-Bold.ttf'],
    +            drawings=[
    +                warp(),
    +                rotate(),
    +                offset()
    +            ]),
    +        curve(),
    +        noise(),
    +        smooth()
    +    ])
    +    image = captcha_image(random.sample(string.uppercase + string.digits, 4))
    +    image.save('sample.jpg', 'JPEG', quality=75)
    diff --git a/server/www/packages/packages-common/wheezy/captcha/mixin.py b/server/www/packages/packages-common/wheezy/captcha/mixin.py
    new file mode 100644
    index 0000000..1c95d42
    --- /dev/null
    +++ b/server/www/packages/packages-common/wheezy/captcha/mixin.py
    @@ -0,0 +1,24 @@
    +
    +"""
    +"""
    +
    +from wheezy.core.descriptors import attribute
    +
    +
    +class CaptchaMixin(object):
    +
    +    @attribute
    +    def challenge_code(self):
    +        return self.captcha_context.get_challenge_code(self.request)
    +
    +    def validate_captcha(self):
    +        return self.captcha_context.validate(self.request,
    +                                             self.errors, self._)
    +
    +    def captcha_widget(self, path):
    +        ctx = self.captcha_context
    +        return (''
    +                '' %
    +                (path, ctx.challenge_key, self.challenge_code,
    +                 self._('If you cannot read, click to generate a new one.'),
    +                 ctx.challenge_key, self.challenge_code))
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libfreetype-726730e0.so.6.12.3 b/server/www/packages/packages-linux/x64/PIL/.libs/libfreetype-726730e0.so.6.12.3
    new file mode 100644
    index 0000000..a39dcac
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libfreetype-726730e0.so.6.12.3 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libjpeg-bcb94a84.so.9.2.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libjpeg-bcb94a84.so.9.2.0
    new file mode 100644
    index 0000000..0b0c97f
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libjpeg-bcb94a84.so.9.2.0 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/liblcms2-1391ca29.so.2.0.7 b/server/www/packages/packages-linux/x64/PIL/.libs/liblcms2-1391ca29.so.2.0.7
    new file mode 100644
    index 0000000..0943c4b
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/liblcms2-1391ca29.so.2.0.7 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/liblzma-f444c404.so.5.2.2 b/server/www/packages/packages-linux/x64/PIL/.libs/liblzma-f444c404.so.5.2.2
    new file mode 100644
    index 0000000..705e9cd
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/liblzma-f444c404.so.5.2.2 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libopenjp2-59185378.so.2.1.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libopenjp2-59185378.so.2.1.0
    new file mode 100644
    index 0000000..b6094c1
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libopenjp2-59185378.so.2.1.0 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libpng16-3e9b18e2.so.16.21.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libpng16-3e9b18e2.so.16.21.0
    new file mode 100644
    index 0000000..1e3c77c
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libpng16-3e9b18e2.so.16.21.0 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libtiff-a8dfd9d2.so.5.2.4 b/server/www/packages/packages-linux/x64/PIL/.libs/libtiff-a8dfd9d2.so.5.2.4
    new file mode 100644
    index 0000000..dfa17f3
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libtiff-a8dfd9d2.so.5.2.4 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libwebp-72f31b92.so.6.0.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libwebp-72f31b92.so.6.0.0
    new file mode 100644
    index 0000000..7d9130d
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libwebp-72f31b92.so.6.0.0 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libwebpdemux-1ad3d184.so.2.0.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libwebpdemux-1ad3d184.so.2.0.0
    new file mode 100644
    index 0000000..0313509
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libwebpdemux-1ad3d184.so.2.0.0 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libwebpmux-1736b6b3.so.2.0.0 b/server/www/packages/packages-linux/x64/PIL/.libs/libwebpmux-1736b6b3.so.2.0.0
    new file mode 100644
    index 0000000..0d11c21
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libwebpmux-1736b6b3.so.2.0.0 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/.libs/libz-a147dcb0.so.1.2.3 b/server/www/packages/packages-linux/x64/PIL/.libs/libz-a147dcb0.so.1.2.3
    new file mode 100644
    index 0000000..82951d4
    Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/.libs/libz-a147dcb0.so.1.2.3 differ
    diff --git a/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py b/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py
    new file mode 100644
    index 0000000..e6cc22f
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/BdfFontFile.py
    @@ -0,0 +1,132 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# bitmap distribution font (bdf) file parser
    +#
    +# history:
    +# 1996-05-16 fl   created (as bdf2pil)
    +# 1997-08-25 fl   converted to FontFile driver
    +# 2001-05-25 fl   removed bogus __init__ call
    +# 2002-11-20 fl   robustification (from Kevin Cazabon, Dmitry Vasiliev)
    +# 2003-04-22 fl   more robustification (from Graham Dumpleton)
    +#
    +# Copyright (c) 1997-2003 by Secret Labs AB.
    +# Copyright (c) 1997-2003 by Fredrik Lundh.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image
    +from PIL import FontFile
    +
    +
    +# --------------------------------------------------------------------
    +# parse X Bitmap Distribution Format (BDF)
    +# --------------------------------------------------------------------
    +
    +bdf_slant = {
    +    "R": "Roman",
    +    "I": "Italic",
    +    "O": "Oblique",
    +    "RI": "Reverse Italic",
    +    "RO": "Reverse Oblique",
    +    "OT": "Other"
    +}
    +
    +bdf_spacing = {
    +    "P": "Proportional",
    +    "M": "Monospaced",
    +    "C": "Cell"
    +}
    +
    +
    +def bdf_char(f):
    +    # skip to STARTCHAR
    +    while True:
    +        s = f.readline()
    +        if not s:
    +            return None
    +        if s[:9] == b"STARTCHAR":
    +            break
    +    id = s[9:].strip().decode('ascii')
    +
    +    # load symbol properties
    +    props = {}
    +    while True:
    +        s = f.readline()
    +        if not s or s[:6] == b"BITMAP":
    +            break
    +        i = s.find(b" ")
    +        props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii')
    +
    +    # load bitmap
    +    bitmap = []
    +    while True:
    +        s = f.readline()
    +        if not s or s[:7] == b"ENDCHAR":
    +            break
    +        bitmap.append(s[:-1])
    +    bitmap = b"".join(bitmap)
    +
    +    [x, y, l, d] = [int(p) for p in props["BBX"].split()]
    +    [dx, dy] = [int(p) for p in props["DWIDTH"].split()]
    +
    +    bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y)
    +
    +    try:
    +        im = Image.frombytes("1", (x, y), bitmap, "hex", "1")
    +    except ValueError:
    +        # deal with zero-width characters
    +        im = Image.new("1", (x, y))
    +
    +    return id, int(props["ENCODING"]), bbox, im
    +
    +
    +##
    +# Font file plugin for the X11 BDF format.
    +
    +class BdfFontFile(FontFile.FontFile):
    +
    +    def __init__(self, fp):
    +
    +        FontFile.FontFile.__init__(self)
    +
    +        s = fp.readline()
    +        if s[:13] != b"STARTFONT 2.1":
    +            raise SyntaxError("not a valid BDF file")
    +
    +        props = {}
    +        comments = []
    +
    +        while True:
    +            s = fp.readline()
    +            if not s or s[:13] == b"ENDPROPERTIES":
    +                break
    +            i = s.find(b" ")
    +            props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii')
    +            if s[:i] in [b"COMMENT", b"COPYRIGHT"]:
    +                if s.find(b"LogicalFontDescription") < 0:
    +                    comments.append(s[i+1:-1].decode('ascii'))
    +
    +        # font = props["FONT"].split("-")
    +
    +        # font[4] = bdf_slant[font[4].upper()]
    +        # font[11] = bdf_spacing[font[11].upper()]
    +
    +        # ascent = int(props["FONT_ASCENT"])
    +        # descent = int(props["FONT_DESCENT"])
    +
    +        # fontname = ";".join(font[1:])
    +
    +        # print "#", fontname
    +        # for i in comments:
    +        #       print "#", i
    +
    +        while True:
    +            c = bdf_char(fp)
    +            if not c:
    +                break
    +            id, ch, (xy, dst, src), im = c
    +            if 0 <= ch < len(self.glyph):
    +                self.glyph[ch] = xy, dst, src, im
    diff --git a/server/www/packages/packages-linux/x64/PIL/BmpImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/BmpImagePlugin.py
    new file mode 100644
    index 0000000..eccd299
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/BmpImagePlugin.py
    @@ -0,0 +1,294 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# BMP file handler
    +#
    +# Windows (and OS/2) native bitmap storage format.
    +#
    +# history:
    +# 1995-09-01 fl   Created
    +# 1996-04-30 fl   Added save
    +# 1997-08-27 fl   Fixed save of 1-bit images
    +# 1998-03-06 fl   Load P images as L where possible
    +# 1998-07-03 fl   Load P images as 1 where possible
    +# 1998-12-29 fl   Handle small palettes
    +# 2002-12-30 fl   Fixed load of 1-bit palette images
    +# 2003-04-21 fl   Fixed load of 1-bit monochrome images
    +# 2003-04-23 fl   Added limited support for BI_BITFIELDS compression
    +#
    +# Copyright (c) 1997-2003 by Secret Labs AB
    +# Copyright (c) 1995-2003 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +
    +from PIL import Image, ImageFile, ImagePalette, _binary
    +import math
    +
    +__version__ = "0.7"
    +
    +i8 = _binary.i8
    +i16 = _binary.i16le
    +i32 = _binary.i32le
    +o8 = _binary.o8
    +o16 = _binary.o16le
    +o32 = _binary.o32le
    +
    +#
    +# --------------------------------------------------------------------
    +# Read BMP file
    +
    +BIT2MODE = {
    +    # bits => mode, rawmode
    +    1: ("P", "P;1"),
    +    4: ("P", "P;4"),
    +    8: ("P", "P"),
    +    16: ("RGB", "BGR;15"),
    +    24: ("RGB", "BGR"),
    +    32: ("RGB", "BGRX"),
    +}
    +
    +
    +def _accept(prefix):
    +    return prefix[:2] == b"BM"
    +
    +
    +# ==============================================================================
    +# Image plugin for the Windows BMP format.
    +# ==============================================================================
    +class BmpImageFile(ImageFile.ImageFile):
    +    """ Image plugin for the Windows Bitmap format (BMP) """
    +
    +    # -------------------------------------------------------------- Description
    +    format_description = "Windows Bitmap"
    +    format = "BMP"
    +    # --------------------------------------------------- BMP Compression values
    +    COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5}
    +    RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5
    +
    +    def _bitmap(self, header=0, offset=0):
    +        """ Read relevant info about the BMP """
    +        read, seek = self.fp.read, self.fp.seek
    +        if header:
    +            seek(header)
    +        file_info = dict()
    +        file_info['header_size'] = i32(read(4))  # read bmp header size @offset 14 (this is part of the header size)
    +        file_info['direction'] = -1
    +        # --------------------- If requested, read header at a specific position
    +        header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4)  # read the rest of the bmp header, without its size
    +        # --------------------------------------------------- IBM OS/2 Bitmap v1
    +        # ------ This format has different offsets because of width/height types
    +        if file_info['header_size'] == 12:
    +            file_info['width'] = i16(header_data[0:2])
    +            file_info['height'] = i16(header_data[2:4])
    +            file_info['planes'] = i16(header_data[4:6])
    +            file_info['bits'] = i16(header_data[6:8])
    +            file_info['compression'] = self.RAW
    +            file_info['palette_padding'] = 3
    +        # ---------------------------------------------- Windows Bitmap v2 to v5
    +        elif file_info['header_size'] in (40, 64, 108, 124):  # v3, OS/2 v2, v4, v5
    +            if file_info['header_size'] >= 40:  # v3 and OS/2
    +                file_info['y_flip'] = i8(header_data[7]) == 0xff
    +                file_info['direction'] = 1 if file_info['y_flip'] else -1
    +                file_info['width'] = i32(header_data[0:4])
    +                file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8])
    +                file_info['planes'] = i16(header_data[8:10])
    +                file_info['bits'] = i16(header_data[10:12])
    +                file_info['compression'] = i32(header_data[12:16])
    +                file_info['data_size'] = i32(header_data[16:20])  # byte size of pixel data
    +                file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28]))
    +                file_info['colors'] = i32(header_data[28:32])
    +                file_info['palette_padding'] = 4
    +                self.info["dpi"] = tuple(
    +                    map(lambda x: int(math.ceil(x / 39.3701)),
    +                        file_info['pixels_per_meter']))
    +                if file_info['compression'] == self.BITFIELDS:
    +                    if len(header_data) >= 52:
    +                        for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']):
    +                            file_info[mask] = i32(header_data[36+idx*4:40+idx*4])
    +                    else:
    +                        # 40 byte headers only have the three components in the bitfields masks,
    +                        # ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
    +                        # See also https://github.com/python-pillow/Pillow/issues/1293
    +                        # There is a 4th component in the RGBQuad, in the alpha location, but it
    +                        # is listed as a reserved component, and it is not generally an alpha channel
    +                        file_info['a_mask'] = 0x0
    +                        for mask in ['r_mask', 'g_mask', 'b_mask']:
    +                            file_info[mask] = i32(read(4))
    +                    file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'])
    +                    file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask'])
    +        else:
    +            raise IOError("Unsupported BMP header type (%d)" % file_info['header_size'])
    +        # ------------------ Special case : header is reported 40, which
    +        # ---------------------- is shorter than real size for bpp >= 16
    +        self.size = file_info['width'], file_info['height']
    +        # -------- If color count was not found in the header, compute from bits
    +        file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits'])
    +        # -------------------------------- Check abnormal values for DOS attacks
    +        if file_info['width'] * file_info['height'] > 2**31:
    +            raise IOError("Unsupported BMP Size: (%dx%d)" % self.size)
    +        # ----------------------- Check bit depth for unusual unsupported values
    +        self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None))
    +        if self.mode is None:
    +            raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits'])
    +        # ----------------- Process BMP with Bitfields compression (not palette)
    +        if file_info['compression'] == self.BITFIELDS:
    +            SUPPORTED = {
    +                32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0)],
    +                24: [(0xff0000, 0xff00, 0xff)],
    +                16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)]
    +            }
    +            MASK_MODES = {
    +                (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX",
    +                (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA",
    +                (32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
    +                (24, (0xff0000, 0xff00, 0xff)): "BGR",
    +                (16, (0xf800, 0x7e0, 0x1f)): "BGR;16",
    +                (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15"
    +            }
    +            if file_info['bits'] in SUPPORTED:
    +                if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]:
    +                    raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])]
    +                    self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode
    +                elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]:
    +                    raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])]
    +                else:
    +                    raise IOError("Unsupported BMP bitfields layout")
    +            else:
    +                raise IOError("Unsupported BMP bitfields layout")
    +        elif file_info['compression'] == self.RAW:
    +            if file_info['bits'] == 32 and header == 22:  # 32-bit .cur offset
    +                raw_mode, self.mode = "BGRA", "RGBA"
    +        else:
    +            raise IOError("Unsupported BMP compression (%d)" % file_info['compression'])
    +        # ---------------- Once the header is processed, process the palette/LUT
    +        if self.mode == "P":  # Paletted for 1, 4 and 8 bit images
    +            # ----------------------------------------------------- 1-bit images
    +            if not (0 < file_info['colors'] <= 65536):
    +                raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors'])
    +            else:
    +                padding = file_info['palette_padding']
    +                palette = read(padding * file_info['colors'])
    +                greyscale = True
    +                indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors']))
    +                # ------------------ Check if greyscale and ignore palette if so
    +                for ind, val in enumerate(indices):
    +                    rgb = palette[ind*padding:ind*padding + 3]
    +                    if rgb != o8(val) * 3:
    +                        greyscale = False
    +                # -------- If all colors are grey, white or black, ditch palette
    +                if greyscale:
    +                    self.mode = "1" if file_info['colors'] == 2 else "L"
    +                    raw_mode = self.mode
    +                else:
    +                    self.mode = "P"
    +                    self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette)
    +
    +        # ----------------------------- Finally set the tile data for the plugin
    +        self.info['compression'] = file_info['compression']
    +        self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(),
    +                      (raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction'])
    +                      )]
    +
    +    def _open(self):
    +        """ Open file, check magic number and read header """
    +        # read 14 bytes: magic number, filesize, reserved, header final offset
    +        head_data = self.fp.read(14)
    +        # choke if the file does not have the required magic bytes
    +        if head_data[0:2] != b"BM":
    +            raise SyntaxError("Not a BMP file")
    +        # read the start position of the BMP image data (u32)
    +        offset = i32(head_data[10:14])
    +        # load bitmap information (offset=raster info)
    +        self._bitmap(offset=offset)
    +
    +
    +# ==============================================================================
    +# Image plugin for the DIB format (BMP alias)
    +# ==============================================================================
    +class DibImageFile(BmpImageFile):
    +
    +    format = "DIB"
    +    format_description = "Windows Bitmap"
    +
    +    def _open(self):
    +        self._bitmap()
    +
    +#
    +# --------------------------------------------------------------------
    +# Write BMP file
    +
    +SAVE = {
    +    "1": ("1", 1, 2),
    +    "L": ("L", 8, 256),
    +    "P": ("P", 8, 256),
    +    "RGB": ("BGR", 24, 0),
    +    "RGBA": ("BGRA", 32, 0),
    +}
    +
    +
    +def _save(im, fp, filename, check=0):
    +    try:
    +        rawmode, bits, colors = SAVE[im.mode]
    +    except KeyError:
    +        raise IOError("cannot write mode %s as BMP" % im.mode)
    +
    +    if check:
    +        return check
    +
    +    info = im.encoderinfo
    +
    +    dpi = info.get("dpi", (96, 96))
    +
    +    # 1 meter == 39.3701 inches
    +    ppm = tuple(map(lambda x: int(x * 39.3701), dpi))
    +
    +    stride = ((im.size[0]*bits+7)//8+3) & (~3)
    +    header = 40  # or 64 for OS/2 version 2
    +    offset = 14 + header + colors * 4
    +    image = stride * im.size[1]
    +
    +    # bitmap header
    +    fp.write(b"BM" +                      # file type (magic)
    +             o32(offset+image) +          # file size
    +             o32(0) +                     # reserved
    +             o32(offset))                 # image data offset
    +
    +    # bitmap info header
    +    fp.write(o32(header) +                # info header size
    +             o32(im.size[0]) +            # width
    +             o32(im.size[1]) +            # height
    +             o16(1) +                     # planes
    +             o16(bits) +                  # depth
    +             o32(0) +                     # compression (0=uncompressed)
    +             o32(image) +                 # size of bitmap
    +             o32(ppm[0]) + o32(ppm[1]) +  # resolution
    +             o32(colors) +                # colors used
    +             o32(colors))                 # colors important
    +
    +    fp.write(b"\0" * (header - 40))       # padding (for OS/2 format)
    +
    +    if im.mode == "1":
    +        for i in (0, 255):
    +            fp.write(o8(i) * 4)
    +    elif im.mode == "L":
    +        for i in range(256):
    +            fp.write(o8(i) * 4)
    +    elif im.mode == "P":
    +        fp.write(im.im.getpalette("RGB", "BGRX"))
    +
    +    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0,
    +                    (rawmode, stride, -1))])
    +
    +#
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(BmpImageFile.format, BmpImageFile, _accept)
    +Image.register_save(BmpImageFile.format, _save)
    +
    +Image.register_extension(BmpImageFile.format, ".bmp")
    +
    +Image.register_mime(BmpImageFile.format, "image/bmp")
    diff --git a/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py
    new file mode 100644
    index 0000000..45ee547
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/BufrStubImagePlugin.py
    @@ -0,0 +1,72 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# BUFR stub adapter
    +#
    +# Copyright (c) 1996-2003 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, ImageFile
    +
    +_handler = None
    +
    +
    +##
    +# Install application-specific BUFR image handler.
    +#
    +# @param handler Handler object.
    +
    +def register_handler(handler):
    +    global _handler
    +    _handler = handler
    +
    +
    +# --------------------------------------------------------------------
    +# Image adapter
    +
    +def _accept(prefix):
    +    return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC"
    +
    +
    +class BufrStubImageFile(ImageFile.StubImageFile):
    +
    +    format = "BUFR"
    +    format_description = "BUFR"
    +
    +    def _open(self):
    +
    +        offset = self.fp.tell()
    +
    +        if not _accept(self.fp.read(8)):
    +            raise SyntaxError("Not a BUFR file")
    +
    +        self.fp.seek(offset)
    +
    +        # make something up
    +        self.mode = "F"
    +        self.size = 1, 1
    +
    +        loader = self._load()
    +        if loader:
    +            loader.open(self)
    +
    +    def _load(self):
    +        return _handler
    +
    +
    +def _save(im, fp, filename):
    +    if _handler is None or not hasattr("_handler", "save"):
    +        raise IOError("BUFR save handler not installed")
    +    _handler.save(im, fp, filename)
    +
    +
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept)
    +Image.register_save(BufrStubImageFile.format, _save)
    +
    +Image.register_extension(BufrStubImageFile.format, ".bufr")
    diff --git a/server/www/packages/packages-linux/x64/PIL/ContainerIO.py b/server/www/packages/packages-linux/x64/PIL/ContainerIO.py
    new file mode 100644
    index 0000000..262f2af
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ContainerIO.py
    @@ -0,0 +1,117 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# a class to read from a container file
    +#
    +# History:
    +# 1995-06-18 fl     Created
    +# 1995-09-07 fl     Added readline(), readlines()
    +#
    +# Copyright (c) 1997-2001 by Secret Labs AB
    +# Copyright (c) 1995 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +##
    +# A file object that provides read access to a part of an existing
    +# file (for example a TAR file).
    +
    +
    +class ContainerIO(object):
    +
    +    ##
    +    # Create file object.
    +    #
    +    # @param file Existing file.
    +    # @param offset Start of region, in bytes.
    +    # @param length Size of region, in bytes.
    +
    +    def __init__(self, file, offset, length):
    +        self.fh = file
    +        self.pos = 0
    +        self.offset = offset
    +        self.length = length
    +        self.fh.seek(offset)
    +
    +    ##
    +    # Always false.
    +
    +    def isatty(self):
    +        return 0
    +
    +    ##
    +    # Move file pointer.
    +    #
    +    # @param offset Offset in bytes.
    +    # @param mode Starting position. Use 0 for beginning of region, 1
    +    #    for current offset, and 2 for end of region.  You cannot move
    +    #    the pointer outside the defined region.
    +
    +    def seek(self, offset, mode=0):
    +        if mode == 1:
    +            self.pos = self.pos + offset
    +        elif mode == 2:
    +            self.pos = self.length + offset
    +        else:
    +            self.pos = offset
    +        # clamp
    +        self.pos = max(0, min(self.pos, self.length))
    +        self.fh.seek(self.offset + self.pos)
    +
    +    ##
    +    # Get current file pointer.
    +    #
    +    # @return Offset from start of region, in bytes.
    +
    +    def tell(self):
    +        return self.pos
    +
    +    ##
    +    # Read data.
    +    #
    +    # @def read(bytes=0)
    +    # @param bytes Number of bytes to read.  If omitted or zero,
    +    #     read until end of region.
    +    # @return An 8-bit string.
    +
    +    def read(self, n=0):
    +        if n:
    +            n = min(n, self.length - self.pos)
    +        else:
    +            n = self.length - self.pos
    +        if not n:  # EOF
    +            return ""
    +        self.pos = self.pos + n
    +        return self.fh.read(n)
    +
    +    ##
    +    # Read a line of text.
    +    #
    +    # @return An 8-bit string.
    +
    +    def readline(self):
    +        s = ""
    +        while True:
    +            c = self.read(1)
    +            if not c:
    +                break
    +            s = s + c
    +            if c == "\n":
    +                break
    +        return s
    +
    +    ##
    +    # Read multiple lines of text.
    +    #
    +    # @return A list of 8-bit strings.
    +
    +    def readlines(self):
    +        l = []
    +        while True:
    +            s = self.readline()
    +            if not s:
    +                break
    +            l.append(s)
    +        return l
    diff --git a/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py
    new file mode 100644
    index 0000000..4db4c40
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/CurImagePlugin.py
    @@ -0,0 +1,88 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# Windows Cursor support for PIL
    +#
    +# notes:
    +#       uses BmpImagePlugin.py to read the bitmap data.
    +#
    +# history:
    +#       96-05-27 fl     Created
    +#
    +# Copyright (c) Secret Labs AB 1997.
    +# Copyright (c) Fredrik Lundh 1996.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +
    +from PIL import Image, BmpImagePlugin, _binary
    +
    +__version__ = "0.1"
    +
    +#
    +# --------------------------------------------------------------------
    +
    +i8 = _binary.i8
    +i16 = _binary.i16le
    +i32 = _binary.i32le
    +
    +
    +def _accept(prefix):
    +    return prefix[:4] == b"\0\0\2\0"
    +
    +
    +##
    +# Image plugin for Windows Cursor files.
    +
    +class CurImageFile(BmpImagePlugin.BmpImageFile):
    +
    +    format = "CUR"
    +    format_description = "Windows Cursor"
    +
    +    def _open(self):
    +
    +        offset = self.fp.tell()
    +
    +        # check magic
    +        s = self.fp.read(6)
    +        if not _accept(s):
    +            raise SyntaxError("not a CUR file")
    +
    +        # pick the largest cursor in the file
    +        m = b""
    +        for i in range(i16(s[4:])):
    +            s = self.fp.read(16)
    +            if not m:
    +                m = s
    +            elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]):
    +                m = s
    +            # print "width", i8(s[0])
    +            # print "height", i8(s[1])
    +            # print "colors", i8(s[2])
    +            # print "reserved", i8(s[3])
    +            # print "hotspot x", i16(s[4:])
    +            # print "hotspot y", i16(s[6:])
    +            # print "bytes", i32(s[8:])
    +            # print "offset", i32(s[12:])
    +        if not m:
    +            raise TypeError("No cursors were found")
    +
    +        # load as bitmap
    +        self._bitmap(i32(m[12:]) + offset)
    +
    +        # patch up the bitmap height
    +        self.size = self.size[0], self.size[1]//2
    +        d, e, o, a = self.tile[0]
    +        self.tile[0] = d, (0, 0)+self.size, o, a
    +
    +        return
    +
    +
    +#
    +# --------------------------------------------------------------------
    +
    +Image.register_open(CurImageFile.format, CurImageFile, _accept)
    +
    +Image.register_extension(CurImageFile.format, ".cur")
    diff --git a/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py
    new file mode 100644
    index 0000000..f9034d1
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/DcxImagePlugin.py
    @@ -0,0 +1,86 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# DCX file handling
    +#
    +# DCX is a container file format defined by Intel, commonly used
    +# for fax applications.  Each DCX file consists of a directory
    +# (a list of file offsets) followed by a set of (usually 1-bit)
    +# PCX files.
    +#
    +# History:
    +# 1995-09-09 fl   Created
    +# 1996-03-20 fl   Properly derived from PcxImageFile.
    +# 1998-07-15 fl   Renamed offset attribute to avoid name clash
    +# 2002-07-30 fl   Fixed file handling
    +#
    +# Copyright (c) 1997-98 by Secret Labs AB.
    +# Copyright (c) 1995-96 by Fredrik Lundh.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, _binary
    +from PIL.PcxImagePlugin import PcxImageFile
    +
    +__version__ = "0.2"
    +
    +MAGIC = 0x3ADE68B1  # QUIZ: what's this value, then?
    +
    +i32 = _binary.i32le
    +
    +
    +def _accept(prefix):
    +    return len(prefix) >= 4 and i32(prefix) == MAGIC
    +
    +
    +##
    +# Image plugin for the Intel DCX format.
    +
    +class DcxImageFile(PcxImageFile):
    +
    +    format = "DCX"
    +    format_description = "Intel DCX"
    +
    +    def _open(self):
    +
    +        # Header
    +        s = self.fp.read(4)
    +        if i32(s) != MAGIC:
    +            raise SyntaxError("not a DCX file")
    +
    +        # Component directory
    +        self._offset = []
    +        for i in range(1024):
    +            offset = i32(self.fp.read(4))
    +            if not offset:
    +                break
    +            self._offset.append(offset)
    +
    +        self.__fp = self.fp
    +        self.seek(0)
    +
    +    @property
    +    def n_frames(self):
    +        return len(self._offset)
    +
    +    @property
    +    def is_animated(self):
    +        return len(self._offset) > 1
    +
    +    def seek(self, frame):
    +        if frame >= len(self._offset):
    +            raise EOFError("attempt to seek outside DCX directory")
    +        self.frame = frame
    +        self.fp = self.__fp
    +        self.fp.seek(self._offset[frame])
    +        PcxImageFile._open(self)
    +
    +    def tell(self):
    +        return self.frame
    +
    +
    +Image.register_open(DcxImageFile.format, DcxImageFile, _accept)
    +
    +Image.register_extension(DcxImageFile.format, ".dcx")
    diff --git a/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py
    new file mode 100644
    index 0000000..2ebfdf0
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/DdsImagePlugin.py
    @@ -0,0 +1,268 @@
    +"""
    +A Pillow loader for .dds files (S3TC-compressed aka DXTC)
    +Jerome Leclanche 
    +
    +Documentation:
    +  http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt
    +
    +The contents of this file are hereby released in the public domain (CC0)
    +Full text of the CC0 license:
    +  https://creativecommons.org/publicdomain/zero/1.0/
    +"""
    +
    +import struct
    +from io import BytesIO
    +from PIL import Image, ImageFile
    +
    +
    +# Magic ("DDS ")
    +DDS_MAGIC = 0x20534444
    +
    +# DDS flags
    +DDSD_CAPS = 0x1
    +DDSD_HEIGHT = 0x2
    +DDSD_WIDTH = 0x4
    +DDSD_PITCH = 0x8
    +DDSD_PIXELFORMAT = 0x1000
    +DDSD_MIPMAPCOUNT = 0x20000
    +DDSD_LINEARSIZE = 0x80000
    +DDSD_DEPTH = 0x800000
    +
    +# DDS caps
    +DDSCAPS_COMPLEX = 0x8
    +DDSCAPS_TEXTURE = 0x1000
    +DDSCAPS_MIPMAP = 0x400000
    +
    +DDSCAPS2_CUBEMAP = 0x200
    +DDSCAPS2_CUBEMAP_POSITIVEX = 0x400
    +DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800
    +DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000
    +DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000
    +DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000
    +DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000
    +DDSCAPS2_VOLUME = 0x200000
    +
    +# Pixel Format
    +DDPF_ALPHAPIXELS = 0x1
    +DDPF_ALPHA = 0x2
    +DDPF_FOURCC = 0x4
    +DDPF_PALETTEINDEXED8 = 0x20
    +DDPF_RGB = 0x40
    +DDPF_LUMINANCE = 0x20000
    +
    +
    +# dds.h
    +
    +DDS_FOURCC = DDPF_FOURCC
    +DDS_RGB = DDPF_RGB
    +DDS_RGBA = DDPF_RGB | DDPF_ALPHAPIXELS
    +DDS_LUMINANCE = DDPF_LUMINANCE
    +DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS
    +DDS_ALPHA = DDPF_ALPHA
    +DDS_PAL8 = DDPF_PALETTEINDEXED8
    +
    +DDS_HEADER_FLAGS_TEXTURE = (DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH |
    +                            DDSD_PIXELFORMAT)
    +DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT
    +DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH
    +DDS_HEADER_FLAGS_PITCH = DDSD_PITCH
    +DDS_HEADER_FLAGS_LINEARSIZE = DDSD_LINEARSIZE
    +
    +DDS_HEIGHT = DDSD_HEIGHT
    +DDS_WIDTH = DDSD_WIDTH
    +
    +DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS_TEXTURE
    +DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS_COMPLEX | DDSCAPS_MIPMAP
    +DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS_COMPLEX
    +
    +DDS_CUBEMAP_POSITIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX
    +DDS_CUBEMAP_NEGATIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX
    +DDS_CUBEMAP_POSITIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY
    +DDS_CUBEMAP_NEGATIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY
    +DDS_CUBEMAP_POSITIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ
    +DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ
    +
    +
    +# DXT1
    +DXT1_FOURCC = 0x31545844
    +
    +# DXT3
    +DXT3_FOURCC = 0x33545844
    +
    +# DXT5
    +DXT5_FOURCC = 0x35545844
    +
    +
    +def _decode565(bits):
    +    a = ((bits >> 11) & 0x1f) << 3
    +    b = ((bits >> 5) & 0x3f) << 2
    +    c = (bits & 0x1f) << 3
    +    return a, b, c
    +
    +
    +def _c2a(a, b):
    +    return (2 * a + b) // 3
    +
    +
    +def _c2b(a, b):
    +    return (a + b) // 2
    +
    +
    +def _c3(a, b):
    +    return (2 * b + a) // 3
    +
    +
    +def _dxt1(data, width, height):
    +    # TODO implement this function as pixel format in decode.c
    +    ret = bytearray(4 * width * height)
    +
    +    for y in range(0, height, 4):
    +        for x in range(0, width, 4):
    +            color0, color1, bits = struct.unpack("> 2
    +                    if control == 0:
    +                        r, g, b = r0, g0, b0
    +                    elif control == 1:
    +                        r, g, b = r1, g1, b1
    +                    elif control == 2:
    +                        if color0 > color1:
    +                            r, g, b = _c2a(r0, r1), _c2a(g0, g1), _c2a(b0, b1)
    +                        else:
    +                            r, g, b = _c2b(r0, r1), _c2b(g0, g1), _c2b(b0, b1)
    +                    elif control == 3:
    +                        if color0 > color1:
    +                            r, g, b = _c3(r0, r1), _c3(g0, g1), _c3(b0, b1)
    +                        else:
    +                            r, g, b = 0, 0, 0
    +
    +                    idx = 4 * ((y + j) * width + (x + i))
    +                    ret[idx:idx+4] = struct.pack('4B', r, g, b, 255)
    +
    +    return bytes(ret)
    +
    +
    +def _dxtc_alpha(a0, a1, ac0, ac1, ai):
    +    if ai <= 12:
    +        ac = (ac0 >> ai) & 7
    +    elif ai == 15:
    +        ac = (ac0 >> 15) | ((ac1 << 1) & 6)
    +    else:
    +        ac = (ac1 >> (ai - 16)) & 7
    +
    +    if ac == 0:
    +        alpha = a0
    +    elif ac == 1:
    +        alpha = a1
    +    elif a0 > a1:
    +        alpha = ((8 - ac) * a0 + (ac - 1) * a1) // 7
    +    elif ac == 6:
    +        alpha = 0
    +    elif ac == 7:
    +        alpha = 0xff
    +    else:
    +        alpha = ((6 - ac) * a0 + (ac - 1) * a1) // 5
    +
    +    return alpha
    +
    +
    +def _dxt5(data, width, height):
    +    # TODO implement this function as pixel format in decode.c
    +    ret = bytearray(4 * width * height)
    +
    +    for y in range(0, height, 4):
    +        for x in range(0, width, 4):
    +            a0, a1, ac0, ac1, c0, c1, code = struct.unpack("<2BHI2HI",
    +                                                           data.read(16))
    +
    +            r0, g0, b0 = _decode565(c0)
    +            r1, g1, b1 = _decode565(c1)
    +
    +            for j in range(4):
    +                for i in range(4):
    +                    ai = 3 * (4 * j + i)
    +                    alpha = _dxtc_alpha(a0, a1, ac0, ac1, ai)
    +
    +                    cc = (code >> 2 * (4 * j + i)) & 3
    +                    if cc == 0:
    +                        r, g, b = r0, g0, b0
    +                    elif cc == 1:
    +                        r, g, b = r1, g1, b1
    +                    elif cc == 2:
    +                        r, g, b = _c2a(r0, r1), _c2a(g0, g1), _c2a(b0, b1)
    +                    elif cc == 3:
    +                        r, g, b = _c3(r0, r1), _c3(g0, g1), _c3(b0, b1)
    +
    +                    idx = 4 * ((y + j) * width + (x + i))
    +                    ret[idx:idx+4] = struct.pack('4B', r, g, b, alpha)
    +
    +    return bytes(ret)
    +
    +
    +class DdsImageFile(ImageFile.ImageFile):
    +    format = "DDS"
    +    format_description = "DirectDraw Surface"
    +
    +    def _open(self):
    +        magic, header_size = struct.unpack(" 0:
    +                s = fp.read(min(lengthfile, 100*1024))
    +                if not s:
    +                    break
    +                lengthfile -= len(s)
    +                f.write(s)
    +
    +    # Build ghostscript command
    +    command = ["gs",
    +               "-q",                         # quiet mode
    +               "-g%dx%d" % size,             # set output geometry (pixels)
    +               "-r%fx%f" % res,              # set input DPI (dots per inch)
    +               "-dNOPAUSE",                  # don't pause between pages,
    +               "-dSAFER",                    # safe mode
    +               "-sDEVICE=ppmraw",            # ppm driver
    +               "-sOutputFile=%s" % outfile,  # output file
    +               "-c", "%d %d translate" % (-bbox[0], -bbox[1]),
    +                                             # adjust for image origin
    +               "-f", infile,                 # input file
    +               ]
    +
    +    if gs_windows_binary is not None:
    +        if not gs_windows_binary:
    +            raise WindowsError('Unable to locate Ghostscript on paths')
    +        command[0] = gs_windows_binary
    +
    +    # push data through ghostscript
    +    try:
    +        gs = subprocess.Popen(command, stdin=subprocess.PIPE,
    +                              stdout=subprocess.PIPE)
    +        gs.stdin.close()
    +        status = gs.wait()
    +        if status:
    +            raise IOError("gs failed (status %d)" % status)
    +        im = Image.core.open_ppm(outfile)
    +    finally:
    +        try:
    +            os.unlink(outfile)
    +            if infile_temp:
    +                os.unlink(infile_temp)
    +        except OSError:
    +            pass
    +
    +    return im
    +
    +
    +class PSFile(object):
    +    """
    +    Wrapper for bytesio object that treats either CR or LF as end of line.
    +    """
    +    def __init__(self, fp):
    +        self.fp = fp
    +        self.char = None
    +
    +    def seek(self, offset, whence=0):
    +        self.char = None
    +        self.fp.seek(offset, whence)
    +
    +    def readline(self):
    +        s = self.char or b""
    +        self.char = None
    +
    +        c = self.fp.read(1)
    +        while c not in b"\r\n":
    +            s = s + c
    +            c = self.fp.read(1)
    +
    +        self.char = self.fp.read(1)
    +        # line endings can be 1 or 2 of \r \n, in either order
    +        if self.char in b"\r\n":
    +            self.char = None
    +
    +        return s.decode('latin-1')
    +
    +
    +def _accept(prefix):
    +    return prefix[:4] == b"%!PS" or \
    +           (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5)
    +
    +##
    +# Image plugin for Encapsulated Postscript.  This plugin supports only
    +# a few variants of this format.
    +
    +
    +class EpsImageFile(ImageFile.ImageFile):
    +    """EPS File Parser for the Python Imaging Library"""
    +
    +    format = "EPS"
    +    format_description = "Encapsulated Postscript"
    +
    +    mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"}
    +
    +    def _open(self):
    +        (length, offset) = self._find_offset(self.fp)
    +
    +        # Rewrap the open file pointer in something that will
    +        # convert line endings and decode to latin-1.
    +        try:
    +            if bytes is str:
    +                # Python2, no encoding conversion necessary
    +                fp = open(self.fp.name, "Ur")
    +            else:
    +                # Python3, can use bare open command.
    +                fp = open(self.fp.name, "Ur", encoding='latin-1')
    +        except:
    +            # Expect this for bytesio/stringio
    +            fp = PSFile(self.fp)
    +
    +        # go to offset - start of "%!PS"
    +        fp.seek(offset)
    +
    +        box = None
    +
    +        self.mode = "RGB"
    +        self.size = 1, 1  # FIXME: huh?
    +
    +        #
    +        # Load EPS header
    +
    +        s = fp.readline().strip('\r\n')
    +
    +        while s:
    +            if len(s) > 255:
    +                raise SyntaxError("not an EPS file")
    +
    +            try:
    +                m = split.match(s)
    +            except re.error as v:
    +                raise SyntaxError("not an EPS file")
    +
    +            if m:
    +                k, v = m.group(1, 2)
    +                self.info[k] = v
    +                if k == "BoundingBox":
    +                    try:
    +                        # Note: The DSC spec says that BoundingBox
    +                        # fields should be integers, but some drivers
    +                        # put floating point values there anyway.
    +                        box = [int(float(i)) for i in v.split()]
    +                        self.size = box[2] - box[0], box[3] - box[1]
    +                        self.tile = [("eps", (0, 0) + self.size, offset,
    +                                      (length, box))]
    +                    except:
    +                        pass
    +
    +            else:
    +                m = field.match(s)
    +                if m:
    +                    k = m.group(1)
    +
    +                    if k == "EndComments":
    +                        break
    +                    if k[:8] == "PS-Adobe":
    +                        self.info[k[:8]] = k[9:]
    +                    else:
    +                        self.info[k] = ""
    +                elif s[0] == '%':
    +                    # handle non-DSC Postscript comments that some
    +                    # tools mistakenly put in the Comments section
    +                    pass
    +                else:
    +                    raise IOError("bad EPS header")
    +
    +            s = fp.readline().strip('\r\n')
    +
    +            if s[:1] != "%":
    +                break
    +
    +        #
    +        # Scan for an "ImageData" descriptor
    +
    +        while s[:1] == "%":
    +
    +            if len(s) > 255:
    +                raise SyntaxError("not an EPS file")
    +
    +            if s[:11] == "%ImageData:":
    +                # Encoded bitmapped image.
    +                x, y, bi, mo = s[11:].split(None, 7)[:4]
    +
    +                if int(bi) != 8:
    +                    break
    +                try:
    +                    self.mode = self.mode_map[int(mo)]
    +                except ValueError:
    +                    break
    +
    +                self.size = int(x), int(y)
    +                return
    +
    +            s = fp.readline().strip('\r\n')
    +            if not s:
    +                break
    +
    +        if not box:
    +            raise IOError("cannot determine EPS bounding box")
    +
    +    def _find_offset(self, fp):
    +
    +        s = fp.read(160)
    +
    +        if s[:4] == b"%!PS":
    +            # for HEAD without binary preview
    +            fp.seek(0, 2)
    +            length = fp.tell()
    +            offset = 0
    +        elif i32(s[0:4]) == 0xC6D3D0C5:
    +            # FIX for: Some EPS file not handled correctly / issue #302
    +            # EPS can contain binary data
    +            # or start directly with latin coding
    +            # more info see:
    +            # http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf
    +            offset = i32(s[4:8])
    +            length = i32(s[8:12])
    +        else:
    +            raise SyntaxError("not an EPS file")
    +
    +        return (length, offset)
    +
    +    def load(self, scale=1):
    +        # Load EPS via Ghostscript
    +        if not self.tile:
    +            return
    +        self.im = Ghostscript(self.tile, self.size, self.fp, scale)
    +        self.mode = self.im.mode
    +        self.size = self.im.size
    +        self.tile = []
    +
    +    def load_seek(self, *args, **kwargs):
    +        # we can't incrementally load, so force ImageFile.parser to
    +        # use our custom load method by defining this method.
    +        pass
    +
    +
    +#
    +# --------------------------------------------------------------------
    +
    +def _save(im, fp, filename, eps=1):
    +    """EPS Writer for the Python Imaging Library."""
    +
    +    #
    +    # make sure image data is available
    +    im.load()
    +
    +    #
    +    # determine postscript image mode
    +    if im.mode == "L":
    +        operator = (8, 1, "image")
    +    elif im.mode == "RGB":
    +        operator = (8, 3, "false 3 colorimage")
    +    elif im.mode == "CMYK":
    +        operator = (8, 4, "false 4 colorimage")
    +    else:
    +        raise ValueError("image mode is not supported")
    +
    +    class NoCloseStream(object):
    +        def __init__(self, fp):
    +            self.fp = fp
    +
    +        def __getattr__(self, name):
    +            return getattr(self.fp, name)
    +
    +        def close(self):
    +            pass
    +
    +    base_fp = fp
    +    if fp != sys.stdout:
    +        fp = NoCloseStream(fp)
    +        if sys.version_info[0] > 2:
    +            fp = io.TextIOWrapper(fp, encoding='latin-1')
    +
    +    if eps:
    +        #
    +        # write EPS header
    +        fp.write("%!PS-Adobe-3.0 EPSF-3.0\n")
    +        fp.write("%%Creator: PIL 0.1 EpsEncode\n")
    +        # fp.write("%%CreationDate: %s"...)
    +        fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size)
    +        fp.write("%%Pages: 1\n")
    +        fp.write("%%EndComments\n")
    +        fp.write("%%Page: 1 1\n")
    +        fp.write("%%ImageData: %d %d " % im.size)
    +        fp.write("%d %d 0 1 1 \"%s\"\n" % operator)
    +
    +    #
    +    # image header
    +    fp.write("gsave\n")
    +    fp.write("10 dict begin\n")
    +    fp.write("/buf %d string def\n" % (im.size[0] * operator[1]))
    +    fp.write("%d %d scale\n" % im.size)
    +    fp.write("%d %d 8\n" % im.size)  # <= bits
    +    fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
    +    fp.write("{ currentfile buf readhexstring pop } bind\n")
    +    fp.write(operator[2] + "\n")
    +    if hasattr(fp, "flush"):
    +        fp.flush()
    +
    +    ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)])
    +
    +    fp.write("\n%%%%EndBinary\n")
    +    fp.write("grestore end\n")
    +    if hasattr(fp, "flush"):
    +        fp.flush()
    +
    +#
    +# --------------------------------------------------------------------
    +
    +Image.register_open(EpsImageFile.format, EpsImageFile, _accept)
    +
    +Image.register_save(EpsImageFile.format, _save)
    +
    +Image.register_extension(EpsImageFile.format, ".ps")
    +Image.register_extension(EpsImageFile.format, ".eps")
    +
    +Image.register_mime(EpsImageFile.format, "application/postscript")
    diff --git a/server/www/packages/packages-linux/x64/PIL/ExifTags.py b/server/www/packages/packages-linux/x64/PIL/ExifTags.py
    new file mode 100644
    index 0000000..a8ad26b
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ExifTags.py
    @@ -0,0 +1,315 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# EXIF tags
    +#
    +# Copyright (c) 2003 by Secret Labs AB
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +##
    +# This module provides constants and clear-text names for various
    +# well-known EXIF tags.
    +##
    +
    +##
    +# Maps EXIF tags to tag names.
    +
    +TAGS = {
    +
    +    # possibly incomplete
    +    0x000b: "ProcessingSoftware",
    +    0x00fe: "NewSubfileType",
    +    0x00ff: "SubfileType",
    +    0x0100: "ImageWidth",
    +    0x0101: "ImageLength",
    +    0x0102: "BitsPerSample",
    +    0x0103: "Compression",
    +    0x0106: "PhotometricInterpretation",
    +    0x0107: "Thresholding",
    +    0x0108: "CellWidth",
    +    0x0109: "CellLength",
    +    0x010a: "FillOrder",
    +    0x010d: "DocumentName",
    +    0x010e: "ImageDescription",
    +    0x010f: "Make",
    +    0x0110: "Model",
    +    0x0111: "StripOffsets",
    +    0x0112: "Orientation",
    +    0x0115: "SamplesPerPixel",
    +    0x0116: "RowsPerStrip",
    +    0x0117: "StripByteCounts",
    +    0x0118: "MinSampleValue",
    +    0x0119: "MaxSampleValue",
    +    0x011a: "XResolution",
    +    0x011b: "YResolution",
    +    0x011c: "PlanarConfiguration",
    +    0x011d: "PageName",
    +    0x0120: "FreeOffsets",
    +    0x0121: "FreeByteCounts",
    +    0x0122: "GrayResponseUnit",
    +    0x0123: "GrayResponseCurve",
    +    0x0124: "T4Options",
    +    0x0125: "T6Options",
    +    0x0128: "ResolutionUnit",
    +    0x0129: "PageNumber",
    +    0x012d: "TransferFunction",
    +    0x0131: "Software",
    +    0x0132: "DateTime",
    +    0x013b: "Artist",
    +    0x013c: "HostComputer",
    +    0x013d: "Predictor",
    +    0x013e: "WhitePoint",
    +    0x013f: "PrimaryChromaticities",
    +    0x0140: "ColorMap",
    +    0x0141: "HalftoneHints",
    +    0x0142: "TileWidth",
    +    0x0143: "TileLength",
    +    0x0144: "TileOffsets",
    +    0x0145: "TileByteCounts",
    +    0x014a: "SubIFDs",
    +    0x014c: "InkSet",
    +    0x014d: "InkNames",
    +    0x014e: "NumberOfInks",
    +    0x0150: "DotRange",
    +    0x0151: "TargetPrinter",
    +    0x0152: "ExtraSamples",
    +    0x0153: "SampleFormat",
    +    0x0154: "SMinSampleValue",
    +    0x0155: "SMaxSampleValue",
    +    0x0156: "TransferRange",
    +    0x0157: "ClipPath",
    +    0x0158: "XClipPathUnits",
    +    0x0159: "YClipPathUnits",
    +    0x015a: "Indexed",
    +    0x015b: "JPEGTables",
    +    0x015f: "OPIProxy",
    +    0x0200: "JPEGProc",
    +    0x0201: "JpegIFOffset",
    +    0x0202: "JpegIFByteCount",
    +    0x0203: "JpegRestartInterval",
    +    0x0205: "JpegLosslessPredictors",
    +    0x0206: "JpegPointTransforms",
    +    0x0207: "JpegQTables",
    +    0x0208: "JpegDCTables",
    +    0x0209: "JpegACTables",
    +    0x0211: "YCbCrCoefficients",
    +    0x0212: "YCbCrSubSampling",
    +    0x0213: "YCbCrPositioning",
    +    0x0214: "ReferenceBlackWhite",
    +    0x02bc: "XMLPacket",
    +    0x1000: "RelatedImageFileFormat",
    +    0x1001: "RelatedImageWidth",
    +    0x1002: "RelatedImageLength",
    +    0x4746: "Rating",
    +    0x4749: "RatingPercent",
    +    0x800d: "ImageID",
    +    0x828d: "CFARepeatPatternDim",
    +    0x828e: "CFAPattern",
    +    0x828f: "BatteryLevel",
    +    0x8298: "Copyright",
    +    0x829a: "ExposureTime",
    +    0x829d: "FNumber",
    +    0x83bb: "IPTCNAA",
    +    0x8649: "ImageResources",
    +    0x8769: "ExifOffset",
    +    0x8773: "InterColorProfile",
    +    0x8822: "ExposureProgram",
    +    0x8824: "SpectralSensitivity",
    +    0x8825: "GPSInfo",
    +    0x8827: "ISOSpeedRatings",
    +    0x8828: "OECF",
    +    0x8829: "Interlace",
    +    0x882a: "TimeZoneOffset",
    +    0x882b: "SelfTimerMode",
    +    0x9000: "ExifVersion",
    +    0x9003: "DateTimeOriginal",
    +    0x9004: "DateTimeDigitized",
    +    0x9101: "ComponentsConfiguration",
    +    0x9102: "CompressedBitsPerPixel",
    +    0x9201: "ShutterSpeedValue",
    +    0x9202: "ApertureValue",
    +    0x9203: "BrightnessValue",
    +    0x9204: "ExposureBiasValue",
    +    0x9205: "MaxApertureValue",
    +    0x9206: "SubjectDistance",
    +    0x9207: "MeteringMode",
    +    0x9208: "LightSource",
    +    0x9209: "Flash",
    +    0x920a: "FocalLength",
    +    0x920b: "FlashEnergy",
    +    0x920c: "SpatialFrequencyResponse",
    +    0x920d: "Noise",
    +    0x9211: "ImageNumber",
    +    0x9212: "SecurityClassification",
    +    0x9213: "ImageHistory",
    +    0x9214: "SubjectLocation",
    +    0x9215: "ExposureIndex",
    +    0x9216: "TIFF/EPStandardID",
    +    0x927c: "MakerNote",
    +    0x9286: "UserComment",
    +    0x9290: "SubsecTime",
    +    0x9291: "SubsecTimeOriginal",
    +    0x9292: "SubsecTimeDigitized",
    +    0x9c9b: "XPTitle",
    +    0x9c9c: "XPComment",
    +    0x9c9d: "XPAuthor",
    +    0x9c9e: "XPKeywords",
    +    0x9c9f: "XPSubject",
    +    0xa000: "FlashPixVersion",
    +    0xa001: "ColorSpace",
    +    0xa002: "ExifImageWidth",
    +    0xa003: "ExifImageHeight",
    +    0xa004: "RelatedSoundFile",
    +    0xa005: "ExifInteroperabilityOffset",
    +    0xa20b: "FlashEnergy",
    +    0xa20c: "SpatialFrequencyResponse",
    +    0xa20e: "FocalPlaneXResolution",
    +    0xa20f: "FocalPlaneYResolution",
    +    0xa210: "FocalPlaneResolutionUnit",
    +    0xa214: "SubjectLocation",
    +    0xa215: "ExposureIndex",
    +    0xa217: "SensingMethod",
    +    0xa300: "FileSource",
    +    0xa301: "SceneType",
    +    0xa302: "CFAPattern",
    +    0xa401: "CustomRendered",
    +    0xa402: "ExposureMode",
    +    0xa403: "WhiteBalance",
    +    0xa404: "DigitalZoomRatio",
    +    0xa405: "FocalLengthIn35mmFilm",
    +    0xa406: "SceneCaptureType",
    +    0xa407: "GainControl",
    +    0xa408: "Contrast",
    +    0xa409: "Saturation",
    +    0xa40a: "Sharpness",
    +    0xa40b: "DeviceSettingDescription",
    +    0xa40c: "SubjectDistanceRange",
    +    0xa420: "ImageUniqueID",
    +    0xa430: "CameraOwnerName",
    +    0xa431: "BodySerialNumber",
    +    0xa432: "LensSpecification",
    +    0xa433: "LensMake",
    +    0xa434: "LensModel",
    +    0xa435: "LensSerialNumber",
    +    0xa500: "Gamma",
    +    0xc4a5: "PrintImageMatching",
    +    0xc612: "DNGVersion",
    +    0xc613: "DNGBackwardVersion",
    +    0xc614: "UniqueCameraModel",
    +    0xc615: "LocalizedCameraModel",
    +    0xc616: "CFAPlaneColor",
    +    0xc617: "CFALayout",
    +    0xc618: "LinearizationTable",
    +    0xc619: "BlackLevelRepeatDim",
    +    0xc61a: "BlackLevel",
    +    0xc61b: "BlackLevelDeltaH",
    +    0xc61c: "BlackLevelDeltaV",
    +    0xc61d: "WhiteLevel",
    +    0xc61e: "DefaultScale",
    +    0xc61f: "DefaultCropOrigin",
    +    0xc620: "DefaultCropSize",
    +    0xc621: "ColorMatrix1",
    +    0xc622: "ColorMatrix2",
    +    0xc623: "CameraCalibration1",
    +    0xc624: "CameraCalibration2",
    +    0xc625: "ReductionMatrix1",
    +    0xc626: "ReductionMatrix2",
    +    0xc627: "AnalogBalance",
    +    0xc628: "AsShotNeutral",
    +    0xc629: "AsShotWhiteXY",
    +    0xc62a: "BaselineExposure",
    +    0xc62b: "BaselineNoise",
    +    0xc62c: "BaselineSharpness",
    +    0xc62d: "BayerGreenSplit",
    +    0xc62e: "LinearResponseLimit",
    +    0xc62f: "CameraSerialNumber",
    +    0xc630: "LensInfo",
    +    0xc631: "ChromaBlurRadius",
    +    0xc632: "AntiAliasStrength",
    +    0xc633: "ShadowScale",
    +    0xc634: "DNGPrivateData",
    +    0xc635: "MakerNoteSafety",
    +    0xc65a: "CalibrationIlluminant1",
    +    0xc65b: "CalibrationIlluminant2",
    +    0xc65c: "BestQualityScale",
    +    0xc65d: "RawDataUniqueID",
    +    0xc68b: "OriginalRawFileName",
    +    0xc68c: "OriginalRawFileData",
    +    0xc68d: "ActiveArea",
    +    0xc68e: "MaskedAreas",
    +    0xc68f: "AsShotICCProfile",
    +    0xc690: "AsShotPreProfileMatrix",
    +    0xc691: "CurrentICCProfile",
    +    0xc692: "CurrentPreProfileMatrix",
    +    0xc6bf: "ColorimetricReference",
    +    0xc6f3: "CameraCalibrationSignature",
    +    0xc6f4: "ProfileCalibrationSignature",
    +    0xc6f6: "AsShotProfileName",
    +    0xc6f7: "NoiseReductionApplied",
    +    0xc6f8: "ProfileName",
    +    0xc6f9: "ProfileHueSatMapDims",
    +    0xc6fa: "ProfileHueSatMapData1",
    +    0xc6fb: "ProfileHueSatMapData2",
    +    0xc6fc: "ProfileToneCurve",
    +    0xc6fd: "ProfileEmbedPolicy",
    +    0xc6fe: "ProfileCopyright",
    +    0xc714: "ForwardMatrix1",
    +    0xc715: "ForwardMatrix2",
    +    0xc716: "PreviewApplicationName",
    +    0xc717: "PreviewApplicationVersion",
    +    0xc718: "PreviewSettingsName",
    +    0xc719: "PreviewSettingsDigest",
    +    0xc71a: "PreviewColorSpace",
    +    0xc71b: "PreviewDateTime",
    +    0xc71c: "RawImageDigest",
    +    0xc71d: "OriginalRawFileDigest",
    +    0xc71e: "SubTileBlockSize",
    +    0xc71f: "RowInterleaveFactor",
    +    0xc725: "ProfileLookTableDims",
    +    0xc726: "ProfileLookTableData",
    +    0xc740: "OpcodeList1",
    +    0xc741: "OpcodeList2",
    +    0xc74e: "OpcodeList3",
    +    0xc761: "NoiseProfile"
    +}
    +
    +##
    +# Maps EXIF GPS tags to tag names.
    +
    +GPSTAGS = {
    +    0: "GPSVersionID",
    +    1: "GPSLatitudeRef",
    +    2: "GPSLatitude",
    +    3: "GPSLongitudeRef",
    +    4: "GPSLongitude",
    +    5: "GPSAltitudeRef",
    +    6: "GPSAltitude",
    +    7: "GPSTimeStamp",
    +    8: "GPSSatellites",
    +    9: "GPSStatus",
    +    10: "GPSMeasureMode",
    +    11: "GPSDOP",
    +    12: "GPSSpeedRef",
    +    13: "GPSSpeed",
    +    14: "GPSTrackRef",
    +    15: "GPSTrack",
    +    16: "GPSImgDirectionRef",
    +    17: "GPSImgDirection",
    +    18: "GPSMapDatum",
    +    19: "GPSDestLatitudeRef",
    +    20: "GPSDestLatitude",
    +    21: "GPSDestLongitudeRef",
    +    22: "GPSDestLongitude",
    +    23: "GPSDestBearingRef",
    +    24: "GPSDestBearing",
    +    25: "GPSDestDistanceRef",
    +    26: "GPSDestDistance",
    +    27: "GPSProcessingMethod",
    +    28: "GPSAreaInformation",
    +    29: "GPSDateStamp",
    +    30: "GPSDifferential",
    +    31: "GPSHPositioningError",
    +}
    diff --git a/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py
    new file mode 100644
    index 0000000..7aefff2
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/FitsStubImagePlugin.py
    @@ -0,0 +1,76 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# FITS stub adapter
    +#
    +# Copyright (c) 1998-2003 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, ImageFile
    +
    +_handler = None
    +
    +##
    +# Install application-specific FITS image handler.
    +#
    +# @param handler Handler object.
    +
    +
    +def register_handler(handler):
    +    global _handler
    +    _handler = handler
    +
    +# --------------------------------------------------------------------
    +# Image adapter
    +
    +
    +def _accept(prefix):
    +    return prefix[:6] == b"SIMPLE"
    +
    +
    +class FITSStubImageFile(ImageFile.StubImageFile):
    +
    +    format = "FITS"
    +    format_description = "FITS"
    +
    +    def _open(self):
    +
    +        offset = self.fp.tell()
    +
    +        if not _accept(self.fp.read(6)):
    +            raise SyntaxError("Not a FITS file")
    +
    +        # FIXME: add more sanity checks here; mandatory header items
    +        # include SIMPLE, BITPIX, NAXIS, etc.
    +
    +        self.fp.seek(offset)
    +
    +        # make something up
    +        self.mode = "F"
    +        self.size = 1, 1
    +
    +        loader = self._load()
    +        if loader:
    +            loader.open(self)
    +
    +    def _load(self):
    +        return _handler
    +
    +
    +def _save(im, fp, filename):
    +    if _handler is None or not hasattr("_handler", "save"):
    +        raise IOError("FITS save handler not installed")
    +    _handler.save(im, fp, filename)
    +
    +
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept)
    +Image.register_save(FITSStubImageFile.format, _save)
    +
    +Image.register_extension(FITSStubImageFile.format, ".fit")
    +Image.register_extension(FITSStubImageFile.format, ".fits")
    diff --git a/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py
    new file mode 100644
    index 0000000..a07dc29
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/FliImagePlugin.py
    @@ -0,0 +1,188 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# FLI/FLC file handling.
    +#
    +# History:
    +#       95-09-01 fl     Created
    +#       97-01-03 fl     Fixed parser, setup decoder tile
    +#       98-07-15 fl     Renamed offset attribute to avoid name clash
    +#
    +# Copyright (c) Secret Labs AB 1997-98.
    +# Copyright (c) Fredrik Lundh 1995-97.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +
    +from PIL import Image, ImageFile, ImagePalette, _binary
    +
    +__version__ = "0.2"
    +
    +i8 = _binary.i8
    +i16 = _binary.i16le
    +i32 = _binary.i32le
    +o8 = _binary.o8
    +
    +
    +#
    +# decoder
    +
    +def _accept(prefix):
    +    return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12]
    +
    +
    +##
    +# Image plugin for the FLI/FLC animation format.  Use the seek
    +# method to load individual frames.
    +
    +class FliImageFile(ImageFile.ImageFile):
    +
    +    format = "FLI"
    +    format_description = "Autodesk FLI/FLC Animation"
    +
    +    def _open(self):
    +
    +        # HEAD
    +        s = self.fp.read(128)
    +        magic = i16(s[4:6])
    +        if not (magic in [0xAF11, 0xAF12] and
    +                i16(s[14:16]) in [0, 3] and  # flags
    +                s[20:22] == b"\x00\x00"):  # reserved
    +            raise SyntaxError("not an FLI/FLC file")
    +
    +        # image characteristics
    +        self.mode = "P"
    +        self.size = i16(s[8:10]), i16(s[10:12])
    +
    +        # animation speed
    +        duration = i32(s[16:20])
    +        if magic == 0xAF11:
    +            duration = (duration * 1000) / 70
    +        self.info["duration"] = duration
    +
    +        # look for palette
    +        palette = [(a, a, a) for a in range(256)]
    +
    +        s = self.fp.read(16)
    +
    +        self.__offset = 128
    +
    +        if i16(s[4:6]) == 0xF100:
    +            # prefix chunk; ignore it
    +            self.__offset = self.__offset + i32(s)
    +            s = self.fp.read(16)
    +
    +        if i16(s[4:6]) == 0xF1FA:
    +            # look for palette chunk
    +            s = self.fp.read(6)
    +            if i16(s[4:6]) == 11:
    +                self._palette(palette, 2)
    +            elif i16(s[4:6]) == 4:
    +                self._palette(palette, 0)
    +
    +        palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette]
    +        self.palette = ImagePalette.raw("RGB", b"".join(palette))
    +
    +        # set things up to decode first frame
    +        self.__frame = -1
    +        self.__fp = self.fp
    +        self.__rewind = self.fp.tell()
    +        self._n_frames = None
    +        self._is_animated = None
    +        self.seek(0)
    +
    +    def _palette(self, palette, shift):
    +        # load palette
    +
    +        i = 0
    +        for e in range(i16(self.fp.read(2))):
    +            s = self.fp.read(2)
    +            i = i + i8(s[0])
    +            n = i8(s[1])
    +            if n == 0:
    +                n = 256
    +            s = self.fp.read(n * 3)
    +            for n in range(0, len(s), 3):
    +                r = i8(s[n]) << shift
    +                g = i8(s[n+1]) << shift
    +                b = i8(s[n+2]) << shift
    +                palette[i] = (r, g, b)
    +                i += 1
    +
    +    @property
    +    def n_frames(self):
    +        if self._n_frames is None:
    +            current = self.tell()
    +            try:
    +                while True:
    +                    self.seek(self.tell() + 1)
    +            except EOFError:
    +                self._n_frames = self.tell() + 1
    +            self.seek(current)
    +        return self._n_frames
    +
    +    @property
    +    def is_animated(self):
    +        if self._is_animated is None:
    +            current = self.tell()
    +
    +            try:
    +                self.seek(1)
    +                self._is_animated = True
    +            except EOFError:
    +                self._is_animated = False
    +
    +            self.seek(current)
    +        return self._is_animated
    +
    +    def seek(self, frame):
    +        if frame == self.__frame:
    +            return
    +        if frame < self.__frame:
    +            self._seek(0)
    +
    +        last_frame = self.__frame
    +        for f in range(self.__frame + 1, frame + 1):
    +            try:
    +                self._seek(f)
    +            except EOFError:
    +                self.seek(last_frame)
    +                raise EOFError("no more images in FLI file")
    +
    +    def _seek(self, frame):
    +        if frame == 0:
    +            self.__frame = -1
    +            self.__fp.seek(self.__rewind)
    +            self.__offset = 128
    +
    +        if frame != self.__frame + 1:
    +            raise ValueError("cannot seek to frame %d" % frame)
    +        self.__frame = frame
    +
    +        # move to next frame
    +        self.fp = self.__fp
    +        self.fp.seek(self.__offset)
    +
    +        s = self.fp.read(4)
    +        if not s:
    +            raise EOFError
    +
    +        framesize = i32(s)
    +
    +        self.decodermaxblock = framesize
    +        self.tile = [("fli", (0, 0)+self.size, self.__offset, None)]
    +
    +        self.__offset += framesize
    +
    +    def tell(self):
    +        return self.__frame
    +
    +#
    +# registry
    +
    +Image.register_open(FliImageFile.format, FliImageFile, _accept)
    +
    +Image.register_extension(FliImageFile.format, ".fli")
    +Image.register_extension(FliImageFile.format, ".flc")
    diff --git a/server/www/packages/packages-linux/x64/PIL/FontFile.py b/server/www/packages/packages-linux/x64/PIL/FontFile.py
    new file mode 100644
    index 0000000..db8e6be
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/FontFile.py
    @@ -0,0 +1,115 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# base class for raster font file parsers
    +#
    +# history:
    +# 1997-06-05 fl   created
    +# 1997-08-19 fl   restrict image width
    +#
    +# Copyright (c) 1997-1998 by Secret Labs AB
    +# Copyright (c) 1997-1998 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +import os
    +from PIL import Image, _binary
    +
    +WIDTH = 800
    +
    +
    +def puti16(fp, values):
    +    # write network order (big-endian) 16-bit sequence
    +    for v in values:
    +        if v < 0:
    +            v += 65536
    +        fp.write(_binary.o16be(v))
    +
    +
    +##
    +# Base class for raster font file handlers.
    +
    +class FontFile(object):
    +
    +    bitmap = None
    +
    +    def __init__(self):
    +
    +        self.info = {}
    +        self.glyph = [None] * 256
    +
    +    def __getitem__(self, ix):
    +        return self.glyph[ix]
    +
    +    def compile(self):
    +        "Create metrics and bitmap"
    +
    +        if self.bitmap:
    +            return
    +
    +        # create bitmap large enough to hold all data
    +        h = w = maxwidth = 0
    +        lines = 1
    +        for glyph in self:
    +            if glyph:
    +                d, dst, src, im = glyph
    +                h = max(h, src[3] - src[1])
    +                w = w + (src[2] - src[0])
    +                if w > WIDTH:
    +                    lines += 1
    +                    w = (src[2] - src[0])
    +                maxwidth = max(maxwidth, w)
    +
    +        xsize = maxwidth
    +        ysize = lines * h
    +
    +        if xsize == 0 and ysize == 0:
    +            return ""
    +
    +        self.ysize = h
    +
    +        # paste glyphs into bitmap
    +        self.bitmap = Image.new("1", (xsize, ysize))
    +        self.metrics = [None] * 256
    +        x = y = 0
    +        for i in range(256):
    +            glyph = self[i]
    +            if glyph:
    +                d, dst, src, im = glyph
    +                xx = src[2] - src[0]
    +                # yy = src[3] - src[1]
    +                x0, y0 = x, y
    +                x = x + xx
    +                if x > WIDTH:
    +                    x, y = 0, y + h
    +                    x0, y0 = x, y
    +                    x = xx
    +                s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
    +                self.bitmap.paste(im.crop(src), s)
    +                # print chr(i), dst, s
    +                self.metrics[i] = d, dst, s
    +
    +    def save(self, filename):
    +        "Save font"
    +
    +        self.compile()
    +
    +        # font data
    +        self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG")
    +
    +        # font metrics
    +        fp = open(os.path.splitext(filename)[0] + ".pil", "wb")
    +        fp.write(b"PILfont\n")
    +        fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii'))  # HACK!!!
    +        fp.write(b"DATA\n")
    +        for id in range(256):
    +            m = self.metrics[id]
    +            if not m:
    +                puti16(fp, [0] * 10)
    +            else:
    +                puti16(fp, m[0] + m[1] + m[2])
    +        fp.close()
    +
    +# End of file
    diff --git a/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py
    new file mode 100644
    index 0000000..aefc574
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/FpxImagePlugin.py
    @@ -0,0 +1,226 @@
    +#
    +# THIS IS WORK IN PROGRESS
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# FlashPix support for PIL
    +#
    +# History:
    +# 97-01-25 fl   Created (reads uncompressed RGB images only)
    +#
    +# Copyright (c) Secret Labs AB 1997.
    +# Copyright (c) Fredrik Lundh 1997.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +
    +from PIL import Image, ImageFile
    +from PIL.OleFileIO import i8, i32, MAGIC, OleFileIO
    +
    +__version__ = "0.1"
    +
    +
    +# we map from colour field tuples to (mode, rawmode) descriptors
    +MODES = {
    +    # opacity
    +    (0x00007ffe): ("A", "L"),
    +    # monochrome
    +    (0x00010000,): ("L", "L"),
    +    (0x00018000, 0x00017ffe): ("RGBA", "LA"),
    +    # photo YCC
    +    (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
    +    (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"),
    +    # standard RGB (NIFRGB)
    +    (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
    +    (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"),
    +}
    +
    +
    +#
    +# --------------------------------------------------------------------
    +
    +def _accept(prefix):
    +    return prefix[:8] == MAGIC
    +
    +
    +##
    +# Image plugin for the FlashPix images.
    +
    +class FpxImageFile(ImageFile.ImageFile):
    +
    +    format = "FPX"
    +    format_description = "FlashPix"
    +
    +    def _open(self):
    +        #
    +        # read the OLE directory and see if this is a likely
    +        # to be a FlashPix file
    +
    +        try:
    +            self.ole = OleFileIO(self.fp)
    +        except IOError:
    +            raise SyntaxError("not an FPX file; invalid OLE file")
    +
    +        if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
    +            raise SyntaxError("not an FPX file; bad root CLSID")
    +
    +        self._open_index(1)
    +
    +    def _open_index(self, index=1):
    +        #
    +        # get the Image Contents Property Set
    +
    +        prop = self.ole.getproperties([
    +            "Data Object Store %06d" % index,
    +            "\005Image Contents"
    +        ])
    +
    +        # size (highest resolution)
    +
    +        self.size = prop[0x1000002], prop[0x1000003]
    +
    +        size = max(self.size)
    +        i = 1
    +        while size > 64:
    +            size = size / 2
    +            i += 1
    +        self.maxid = i - 1
    +
    +        # mode.  instead of using a single field for this, flashpix
    +        # requires you to specify the mode for each channel in each
    +        # resolution subimage, and leaves it to the decoder to make
    +        # sure that they all match.  for now, we'll cheat and assume
    +        # that this is always the case.
    +
    +        id = self.maxid << 16
    +
    +        s = prop[0x2000002 | id]
    +
    +        colors = []
    +        for i in range(i32(s, 4)):
    +            # note: for now, we ignore the "uncalibrated" flag
    +            colors.append(i32(s, 8+i*4) & 0x7fffffff)
    +
    +        self.mode, self.rawmode = MODES[tuple(colors)]
    +
    +        # load JPEG tables, if any
    +        self.jpeg = {}
    +        for i in range(256):
    +            id = 0x3000001 | (i << 16)
    +            if id in prop:
    +                self.jpeg[i] = prop[id]
    +
    +        # print len(self.jpeg), "tables loaded"
    +
    +        self._open_subimage(1, self.maxid)
    +
    +    def _open_subimage(self, index=1, subimage=0):
    +        #
    +        # setup tile descriptors for a given subimage
    +
    +        stream = [
    +            "Data Object Store %06d" % index,
    +            "Resolution %04d" % subimage,
    +            "Subimage 0000 Header"
    +        ]
    +
    +        fp = self.ole.openstream(stream)
    +
    +        # skip prefix
    +        fp.read(28)
    +
    +        # header stream
    +        s = fp.read(36)
    +
    +        size = i32(s, 4), i32(s, 8)
    +        # tilecount = i32(s, 12)
    +        tilesize = i32(s, 16), i32(s, 20)
    +        # channels = i32(s, 24)
    +        offset = i32(s, 28)
    +        length = i32(s, 32)
    +
    +        # print size, self.mode, self.rawmode
    +
    +        if size != self.size:
    +            raise IOError("subimage mismatch")
    +
    +        # get tile descriptors
    +        fp.seek(28 + offset)
    +        s = fp.read(i32(s, 12) * length)
    +
    +        x = y = 0
    +        xsize, ysize = size
    +        xtile, ytile = tilesize
    +        self.tile = []
    +
    +        for i in range(0, len(s), length):
    +
    +            compression = i32(s, i+8)
    +
    +            if compression == 0:
    +                self.tile.append(("raw", (x, y, x+xtile, y+ytile),
    +                                 i32(s, i) + 28, (self.rawmode)))
    +
    +            elif compression == 1:
    +
    +                # FIXME: the fill decoder is not implemented
    +                self.tile.append(("fill", (x, y, x+xtile, y+ytile),
    +                                 i32(s, i) + 28, (self.rawmode, s[12:16])))
    +
    +            elif compression == 2:
    +
    +                internal_color_conversion = i8(s[14])
    +                jpeg_tables = i8(s[15])
    +                rawmode = self.rawmode
    +
    +                if internal_color_conversion:
    +                    # The image is stored as usual (usually YCbCr).
    +                    if rawmode == "RGBA":
    +                        # For "RGBA", data is stored as YCbCrA based on
    +                        # negative RGB. The following trick works around
    +                        # this problem :
    +                        jpegmode, rawmode = "YCbCrK", "CMYK"
    +                    else:
    +                        jpegmode = None  # let the decoder decide
    +
    +                else:
    +                    # The image is stored as defined by rawmode
    +                    jpegmode = rawmode
    +
    +                self.tile.append(("jpeg", (x, y, x+xtile, y+ytile),
    +                                 i32(s, i) + 28, (rawmode, jpegmode)))
    +
    +                # FIXME: jpeg tables are tile dependent; the prefix
    +                # data must be placed in the tile descriptor itself!
    +
    +                if jpeg_tables:
    +                    self.tile_prefix = self.jpeg[jpeg_tables]
    +
    +            else:
    +                raise IOError("unknown/invalid compression")
    +
    +            x = x + xtile
    +            if x >= xsize:
    +                x, y = 0, y + ytile
    +                if y >= ysize:
    +                    break  # isn't really required
    +
    +        self.stream = stream
    +        self.fp = None
    +
    +    def load(self):
    +
    +        if not self.fp:
    +            self.fp = self.ole.openstream(self.stream[:2] +
    +                                          ["Subimage 0000 Data"])
    +
    +        ImageFile.ImageFile.load(self)
    +
    +#
    +# --------------------------------------------------------------------
    +
    +Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
    +
    +Image.register_extension(FpxImageFile.format, ".fpx")
    diff --git a/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py
    new file mode 100644
    index 0000000..f3a2d7f
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/FtexImagePlugin.py
    @@ -0,0 +1,96 @@
    +"""
    +A Pillow loader for .ftc and .ftu files (FTEX)
    +Jerome Leclanche 
    +
    +The contents of this file are hereby released in the public domain (CC0)
    +Full text of the CC0 license:
    +  https://creativecommons.org/publicdomain/zero/1.0/
    +
    +Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
    +
    +The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
    +packed custom format called FTEX. This file format uses file extensions FTC and FTU.
    +* FTC files are compressed textures (using standard texture compression).
    +* FTU files are not compressed.
    +Texture File Format
    +The FTC and FTU texture files both use the same format, called. This
    +has the following structure:
    +{header}
    +{format_directory}
    +{data}
    +Where:
    +{header} = { u32:magic, u32:version, u32:width, u32:height, u32:mipmap_count, u32:format_count }
    +
    +* The "magic" number is "FTEX".
    +* "width" and "height" are the dimensions of the texture.
    +* "mipmap_count" is the number of mipmaps in the texture.
    +* "format_count" is the number of texture formats (different versions of the same texture) in this file.
    +
    +{format_directory} = format_count * { u32:format, u32:where }
    +
    +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB uncompressed textures.
    +The texture data for a format starts at the position "where" in the file.
    +
    +Each set of texture data in the file has the following structure:
    +{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
    +* "mipmap_size" is the number of bytes in that mip level. For compressed textures this is the
    +size of the texture data compressed with DXT1. For 24 bit uncompressed textures, this is 3 * width * height.
    +Following this are the image bytes for that mipmap level.
    +
    +Note: All data is stored in little-Endian (Intel) byte order.
    +"""
    +
    +import struct
    +from io import BytesIO
    +from PIL import Image, ImageFile
    +from PIL.DdsImagePlugin import _dxt1
    +
    +
    +MAGIC = b"FTEX"
    +FORMAT_DXT1 = 0
    +FORMAT_UNCOMPRESSED = 1
    +
    +
    +class FtexImageFile(ImageFile.ImageFile):
    +    format = "FTEX"
    +    format_description = "Texture File Format (IW2:EOC)"
    +
    +    def _open(self):
    +        magic = struct.unpack("= 8 and i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2)
    +
    +
    +##
    +# Image plugin for the GIMP brush format.
    +
    +class GbrImageFile(ImageFile.ImageFile):
    +
    +    format = "GBR"
    +    format_description = "GIMP brush file"
    +
    +    def _open(self):
    +        header_size = i32(self.fp.read(4))
    +        version = i32(self.fp.read(4))
    +        if header_size < 20:
    +            raise SyntaxError("not a GIMP brush")
    +        if version not in (1, 2):
    +            raise SyntaxError("Unsupported GIMP brush version: %s" % version)
    +
    +        width = i32(self.fp.read(4))
    +        height = i32(self.fp.read(4))
    +        color_depth = i32(self.fp.read(4))
    +        if width <= 0 or height <= 0:
    +            raise SyntaxError("not a GIMP brush")
    +        if color_depth not in (1, 4):
    +            raise SyntaxError("Unsupported GIMP brush color depth: %s" % color_depth)
    +
    +        if version == 1:
    +            comment_length = header_size-20
    +        else:
    +            comment_length = header_size-28
    +            magic_number = self.fp.read(4)
    +            if magic_number != b'GIMP':
    +                raise SyntaxError("not a GIMP brush, bad magic number")
    +            self.info['spacing'] = i32(self.fp.read(4))
    +
    +        comment = self.fp.read(comment_length)[:-1]
    +
    +        if color_depth == 1:
    +            self.mode = "L"
    +        else:
    +            self.mode = 'RGBA'
    +
    +        self.size = width, height
    +
    +        self.info["comment"] = comment
    +
    +        # Image might not be small
    +        Image._decompression_bomb_check(self.size)
    +
    +        # Data is an uncompressed block of w * h * bytes/pixel
    +        self._data_size = width * height * color_depth
    +
    +    def load(self):
    +        self.im = Image.core.new(self.mode, self.size)
    +        self.frombytes(self.fp.read(self._data_size))
    +
    +#
    +# registry
    +
    +Image.register_open(GbrImageFile.format, GbrImageFile, _accept)
    +Image.register_extension(GbrImageFile.format, ".gbr")
    diff --git a/server/www/packages/packages-linux/x64/PIL/GdImageFile.py b/server/www/packages/packages-linux/x64/PIL/GdImageFile.py
    new file mode 100644
    index 0000000..ae3500f
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/GdImageFile.py
    @@ -0,0 +1,92 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# GD file handling
    +#
    +# History:
    +# 1996-04-12 fl   Created
    +#
    +# Copyright (c) 1997 by Secret Labs AB.
    +# Copyright (c) 1996 by Fredrik Lundh.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +
    +# NOTE: This format cannot be automatically recognized, so the
    +# class is not registered for use with Image.open().  To open a
    +# gd file, use the GdImageFile.open() function instead.
    +
    +# THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE.  This
    +# implementation is provided for convenience and demonstrational
    +# purposes only.
    +
    +
    +from PIL import ImageFile, ImagePalette, _binary
    +from PIL._util import isPath
    +
    +__version__ = "0.1"
    +
    +try:
    +    import builtins
    +except ImportError:
    +    import __builtin__
    +    builtins = __builtin__
    +
    +i16 = _binary.i16be
    +
    +
    +##
    +# Image plugin for the GD uncompressed format.  Note that this format
    +# is not supported by the standard Image.open function.  To use
    +# this plugin, you have to import the GdImageFile module and
    +# use the GdImageFile.open function.
    +
    +class GdImageFile(ImageFile.ImageFile):
    +
    +    format = "GD"
    +    format_description = "GD uncompressed images"
    +
    +    def _open(self):
    +
    +        # Header
    +        s = self.fp.read(775)
    +
    +        self.mode = "L"  # FIXME: "P"
    +        self.size = i16(s[0:2]), i16(s[2:4])
    +
    +        # transparency index
    +        tindex = i16(s[5:7])
    +        if tindex < 256:
    +            self.info["transparent"] = tindex
    +
    +        self.palette = ImagePalette.raw("RGB", s[7:])
    +
    +        self.tile = [("raw", (0, 0)+self.size, 775, ("L", 0, -1))]
    +
    +
    +##
    +# Load texture from a GD image file.
    +#
    +# @param filename GD file name, or an opened file handle.
    +# @param mode Optional mode.  In this version, if the mode argument
    +#     is given, it must be "r".
    +# @return An image instance.
    +# @exception IOError If the image could not be read.
    +
    +def open(fp, mode="r"):
    +
    +    if mode != "r":
    +        raise ValueError("bad mode")
    +
    +    if isPath(fp):
    +        filename = fp
    +        fp = builtins.open(fp, "rb")
    +    else:
    +        filename = ""
    +
    +    try:
    +        return GdImageFile(fp, filename)
    +    except SyntaxError:
    +        raise IOError("cannot identify this image file")
    diff --git a/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py
    new file mode 100644
    index 0000000..6bca4dd
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/GifImagePlugin.py
    @@ -0,0 +1,698 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# GIF file handling
    +#
    +# History:
    +# 1995-09-01 fl   Created
    +# 1996-12-14 fl   Added interlace support
    +# 1996-12-30 fl   Added animation support
    +# 1997-01-05 fl   Added write support, fixed local colour map bug
    +# 1997-02-23 fl   Make sure to load raster data in getdata()
    +# 1997-07-05 fl   Support external decoder (0.4)
    +# 1998-07-09 fl   Handle all modes when saving (0.5)
    +# 1998-07-15 fl   Renamed offset attribute to avoid name clash
    +# 2001-04-16 fl   Added rewind support (seek to frame 0) (0.6)
    +# 2001-04-17 fl   Added palette optimization (0.7)
    +# 2002-06-06 fl   Added transparency support for save (0.8)
    +# 2004-02-24 fl   Disable interlacing for small images
    +#
    +# Copyright (c) 1997-2004 by Secret Labs AB
    +# Copyright (c) 1995-2004 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, ImageFile, ImagePalette, \
    +                ImageChops, ImageSequence, _binary
    +
    +__version__ = "0.9"
    +
    +
    +# --------------------------------------------------------------------
    +# Helpers
    +
    +i8 = _binary.i8
    +i16 = _binary.i16le
    +o8 = _binary.o8
    +o16 = _binary.o16le
    +
    +
    +# --------------------------------------------------------------------
    +# Identify/read GIF files
    +
    +def _accept(prefix):
    +    return prefix[:6] in [b"GIF87a", b"GIF89a"]
    +
    +
    +##
    +# Image plugin for GIF images.  This plugin supports both GIF87 and
    +# GIF89 images.
    +
    +class GifImageFile(ImageFile.ImageFile):
    +
    +    format = "GIF"
    +    format_description = "Compuserve GIF"
    +    global_palette = None
    +
    +    def data(self):
    +        s = self.fp.read(1)
    +        if s and i8(s):
    +            return self.fp.read(i8(s))
    +        return None
    +
    +    def _open(self):
    +
    +        # Screen
    +        s = self.fp.read(13)
    +        if s[:6] not in [b"GIF87a", b"GIF89a"]:
    +            raise SyntaxError("not a GIF file")
    +
    +        self.info["version"] = s[:6]
    +        self.size = i16(s[6:]), i16(s[8:])
    +        self.tile = []
    +        flags = i8(s[10])
    +        bits = (flags & 7) + 1
    +
    +        if flags & 128:
    +            # get global palette
    +            self.info["background"] = i8(s[11])
    +            # check if palette contains colour indices
    +            p = self.fp.read(3 << bits)
    +            for i in range(0, len(p), 3):
    +                if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])):
    +                    p = ImagePalette.raw("RGB", p)
    +                    self.global_palette = self.palette = p
    +                    break
    +
    +        self.__fp = self.fp  # FIXME: hack
    +        self.__rewind = self.fp.tell()
    +        self._n_frames = None
    +        self._is_animated = None
    +        self._seek(0)  # get ready to read first frame
    +
    +    @property
    +    def n_frames(self):
    +        if self._n_frames is None:
    +            current = self.tell()
    +            try:
    +                while True:
    +                    self.seek(self.tell() + 1)
    +            except EOFError:
    +                self._n_frames = self.tell() + 1
    +            self.seek(current)
    +        return self._n_frames
    +
    +    @property
    +    def is_animated(self):
    +        if self._is_animated is None:
    +            current = self.tell()
    +
    +            try:
    +                self.seek(1)
    +                self._is_animated = True
    +            except EOFError:
    +                self._is_animated = False
    +
    +            self.seek(current)
    +        return self._is_animated
    +
    +    def seek(self, frame):
    +        if frame == self.__frame:
    +            return
    +        if frame < self.__frame:
    +            self._seek(0)
    +
    +        last_frame = self.__frame
    +        for f in range(self.__frame + 1, frame + 1):
    +            try:
    +                self._seek(f)
    +            except EOFError:
    +                self.seek(last_frame)
    +                raise EOFError("no more images in GIF file")
    +
    +    def _seek(self, frame):
    +
    +        if frame == 0:
    +            # rewind
    +            self.__offset = 0
    +            self.dispose = None
    +            self.dispose_extent = [0, 0, 0, 0]  # x0, y0, x1, y1
    +            self.__frame = -1
    +            self.__fp.seek(self.__rewind)
    +            self._prev_im = None
    +            self.disposal_method = 0
    +        else:
    +            # ensure that the previous frame was loaded
    +            if not self.im:
    +                self.load()
    +
    +        if frame != self.__frame + 1:
    +            raise ValueError("cannot seek to frame %d" % frame)
    +        self.__frame = frame
    +
    +        self.tile = []
    +
    +        self.fp = self.__fp
    +        if self.__offset:
    +            # backup to last frame
    +            self.fp.seek(self.__offset)
    +            while self.data():
    +                pass
    +            self.__offset = 0
    +
    +        if self.dispose:
    +            self.im.paste(self.dispose, self.dispose_extent)
    +
    +        from copy import copy
    +        self.palette = copy(self.global_palette)
    +
    +        while True:
    +
    +            s = self.fp.read(1)
    +            if not s or s == b";":
    +                break
    +
    +            elif s == b"!":
    +                #
    +                # extensions
    +                #
    +                s = self.fp.read(1)
    +                block = self.data()
    +                if i8(s) == 249:
    +                    #
    +                    # graphic control extension
    +                    #
    +                    flags = i8(block[0])
    +                    if flags & 1:
    +                        self.info["transparency"] = i8(block[3])
    +                    self.info["duration"] = i16(block[1:3]) * 10
    +
    +                    # disposal method - find the value of bits 4 - 6
    +                    dispose_bits = 0b00011100 & flags
    +                    dispose_bits = dispose_bits >> 2
    +                    if dispose_bits:
    +                        # only set the dispose if it is not
    +                        # unspecified. I'm not sure if this is
    +                        # correct, but it seems to prevent the last
    +                        # frame from looking odd for some animations
    +                        self.disposal_method = dispose_bits
    +                elif i8(s) == 254:
    +                    #
    +                    # comment extension
    +                    #
    +                    self.info["comment"] = block
    +                elif i8(s) == 255:
    +                    #
    +                    # application extension
    +                    #
    +                    self.info["extension"] = block, self.fp.tell()
    +                    if block[:11] == b"NETSCAPE2.0":
    +                        block = self.data()
    +                        if len(block) >= 3 and i8(block[0]) == 1:
    +                            self.info["loop"] = i16(block[1:3])
    +                while self.data():
    +                    pass
    +
    +            elif s == b",":
    +                #
    +                # local image
    +                #
    +                s = self.fp.read(9)
    +
    +                # extent
    +                x0, y0 = i16(s[0:]), i16(s[2:])
    +                x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:])
    +                self.dispose_extent = x0, y0, x1, y1
    +                flags = i8(s[8])
    +
    +                interlace = (flags & 64) != 0
    +
    +                if flags & 128:
    +                    bits = (flags & 7) + 1
    +                    self.palette =\
    +                        ImagePalette.raw("RGB", self.fp.read(3 << bits))
    +
    +                # image data
    +                bits = i8(self.fp.read(1))
    +                self.__offset = self.fp.tell()
    +                self.tile = [("gif",
    +                             (x0, y0, x1, y1),
    +                             self.__offset,
    +                             (bits, interlace))]
    +                break
    +
    +            else:
    +                pass
    +                # raise IOError, "illegal GIF tag `%x`" % i8(s)
    +
    +        try:
    +            if self.disposal_method < 2:
    +                # do not dispose or none specified
    +                self.dispose = None
    +            elif self.disposal_method == 2:
    +                # replace with background colour
    +                self.dispose = Image.core.fill("P", self.size,
    +                                               self.info["background"])
    +            else:
    +                # replace with previous contents
    +                if self.im:
    +                    self.dispose = self.im.copy()
    +
    +            # only dispose the extent in this frame
    +            if self.dispose:
    +                self.dispose = self.dispose.crop(self.dispose_extent)
    +        except (AttributeError, KeyError):
    +            pass
    +
    +        if not self.tile:
    +            # self.__fp = None
    +            raise EOFError
    +
    +        self.mode = "L"
    +        if self.palette:
    +            self.mode = "P"
    +
    +    def tell(self):
    +        return self.__frame
    +
    +    def load_end(self):
    +        ImageFile.ImageFile.load_end(self)
    +
    +        # if the disposal method is 'do not dispose', transparent
    +        # pixels should show the content of the previous frame
    +        if self._prev_im and self.disposal_method == 1:
    +            # we do this by pasting the updated area onto the previous
    +            # frame which we then use as the current image content
    +            updated = self.im.crop(self.dispose_extent)
    +            self._prev_im.paste(updated, self.dispose_extent,
    +                                updated.convert('RGBA'))
    +            self.im = self._prev_im
    +        self._prev_im = self.im.copy()
    +
    +# --------------------------------------------------------------------
    +# Write GIF files
    +
    +try:
    +    import _imaging_gif
    +except ImportError:
    +    _imaging_gif = None
    +
    +RAWMODE = {
    +    "1": "L",
    +    "L": "L",
    +    "P": "P",
    +}
    +
    +
    +def _convert_mode(im, initial_call=False):
    +    # convert on the fly (EXPERIMENTAL -- I'm not sure PIL
    +    # should automatically convert images on save...)
    +    if Image.getmodebase(im.mode) == "RGB":
    +        if initial_call:
    +            palette_size = 256
    +            if im.palette:
    +                palette_size = len(im.palette.getdata()[1]) // 3
    +            return im.convert("P", palette=1, colors=palette_size)
    +        else:
    +            return im.convert("P")
    +    return im.convert("L")
    +
    +
    +def _save_all(im, fp, filename):
    +    _save(im, fp, filename, save_all=True)
    +
    +
    +def _save(im, fp, filename, save_all=False):
    +
    +    im.encoderinfo.update(im.info)
    +    if _imaging_gif:
    +        # call external driver
    +        try:
    +            _imaging_gif.save(im, fp, filename)
    +            return
    +        except IOError:
    +            pass  # write uncompressed file
    +
    +    if im.mode in RAWMODE:
    +        im_out = im.copy()
    +    else:
    +        im_out = _convert_mode(im, True)
    +
    +    # header
    +    try:
    +        palette = im.encoderinfo["palette"]
    +    except KeyError:
    +        palette = None
    +        im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True)
    +
    +    if save_all:
    +        previous = None
    +
    +        first_frame = None
    +        for im_frame in ImageSequence.Iterator(im):
    +            im_frame = _convert_mode(im_frame)
    +
    +            # To specify duration, add the time in milliseconds to getdata(),
    +            # e.g. getdata(im_frame, duration=1000)
    +            if not previous:
    +                # global header
    +                first_frame = getheader(im_frame, palette, im.encoderinfo)[0]
    +                first_frame += getdata(im_frame, (0, 0), **im.encoderinfo)
    +            else:
    +                if first_frame:
    +                    for s in first_frame:
    +                        fp.write(s)
    +                    first_frame = None
    +
    +                # delta frame
    +                delta = ImageChops.subtract_modulo(im_frame, previous.copy())
    +                bbox = delta.getbbox()
    +
    +                if bbox:
    +                    # compress difference
    +                    for s in getdata(im_frame.crop(bbox),
    +                                     bbox[:2], **im.encoderinfo):
    +                        fp.write(s)
    +                else:
    +                    # FIXME: what should we do in this case?
    +                    pass
    +            previous = im_frame
    +        if first_frame:
    +            save_all = False
    +    if not save_all:
    +        header = getheader(im_out, palette, im.encoderinfo)[0]
    +        for s in header:
    +            fp.write(s)
    +
    +        flags = 0
    +
    +        if get_interlace(im):
    +            flags = flags | 64
    +
    +        # local image header
    +        _get_local_header(fp, im, (0, 0), flags)
    +
    +        im_out.encoderconfig = (8, get_interlace(im))
    +        ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0,
    +                                      RAWMODE[im_out.mode])])
    +
    +        fp.write(b"\0")  # end of image data
    +
    +    fp.write(b";")  # end of file
    +
    +    if hasattr(fp, "flush"):
    +        fp.flush()
    +
    +
    +def get_interlace(im):
    +    try:
    +        interlace = im.encoderinfo["interlace"]
    +    except KeyError:
    +        interlace = 1
    +
    +    # workaround for @PIL153
    +    if min(im.size) < 16:
    +        interlace = 0
    +
    +    return interlace
    +
    +
    +def _get_local_header(fp, im, offset, flags):
    +    transparent_color_exists = False
    +    try:
    +        transparency = im.encoderinfo["transparency"]
    +    except KeyError:
    +        pass
    +    else:
    +        transparency = int(transparency)
    +        # optimize the block away if transparent color is not used
    +        transparent_color_exists = True
    +
    +        if _get_optimize(im, im.encoderinfo):
    +            used_palette_colors = _get_used_palette_colors(im)
    +
    +            # adjust the transparency index after optimize
    +            if len(used_palette_colors) < 256:
    +                for i in range(len(used_palette_colors)):
    +                    if used_palette_colors[i] == transparency:
    +                        transparency = i
    +                        transparent_color_exists = True
    +                        break
    +                    else:
    +                        transparent_color_exists = False
    +
    +    if "duration" in im.encoderinfo:
    +        duration = int(im.encoderinfo["duration"] / 10)
    +    else:
    +        duration = 0
    +    if transparent_color_exists or duration != 0:
    +        transparency_flag = 1 if transparent_color_exists else 0
    +        if not transparent_color_exists:
    +            transparency = 0
    +
    +        fp.write(b"!" +
    +                 o8(249) +                # extension intro
    +                 o8(4) +                  # length
    +                 o8(transparency_flag) +  # transparency info present
    +                 o16(duration) +          # duration
    +                 o8(transparency) +       # transparency index
    +                 o8(0))
    +
    +    if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]) <= 255:
    +        fp.write(b"!" +
    +                 o8(254) +                # extension intro
    +                 o8(len(im.encoderinfo["comment"])) +
    +                 im.encoderinfo["comment"] +
    +                 o8(0))
    +    if "loop" in im.encoderinfo:
    +        number_of_loops = im.encoderinfo["loop"]
    +        fp.write(b"!" +
    +                 o8(255) +                # extension intro
    +                 o8(11) +
    +                 b"NETSCAPE2.0" +
    +                 o8(3) +
    +                 o8(1) +
    +                 o16(number_of_loops) +   # number of loops
    +                 o8(0))
    +    fp.write(b"," +
    +             o16(offset[0]) +             # offset
    +             o16(offset[1]) +
    +             o16(im.size[0]) +            # size
    +             o16(im.size[1]) +
    +             o8(flags) +                  # flags
    +             o8(8))                       # bits
    +
    +
    +def _save_netpbm(im, fp, filename):
    +
    +    #
    +    # If you need real GIF compression and/or RGB quantization, you
    +    # can use the external NETPBM/PBMPLUS utilities.  See comments
    +    # below for information on how to enable this.
    +
    +    import os
    +    from subprocess import Popen, check_call, PIPE, CalledProcessError
    +    import tempfile
    +    file = im._dump()
    +
    +    if im.mode != "RGB":
    +        with open(filename, 'wb') as f:
    +            stderr = tempfile.TemporaryFile()
    +            check_call(["ppmtogif", file], stdout=f, stderr=stderr)
    +    else:
    +        with open(filename, 'wb') as f:
    +
    +            # Pipe ppmquant output into ppmtogif
    +            # "ppmquant 256 %s | ppmtogif > %s" % (file, filename)
    +            quant_cmd = ["ppmquant", "256", file]
    +            togif_cmd = ["ppmtogif"]
    +            stderr = tempfile.TemporaryFile()
    +            quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr)
    +            stderr = tempfile.TemporaryFile()
    +            togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f,
    +                               stderr=stderr)
    +
    +            # Allow ppmquant to receive SIGPIPE if ppmtogif exits
    +            quant_proc.stdout.close()
    +
    +            retcode = quant_proc.wait()
    +            if retcode:
    +                raise CalledProcessError(retcode, quant_cmd)
    +
    +            retcode = togif_proc.wait()
    +            if retcode:
    +                raise CalledProcessError(retcode, togif_cmd)
    +
    +    try:
    +        os.unlink(file)
    +    except OSError:
    +        pass
    +
    +
    +# --------------------------------------------------------------------
    +# GIF utilities
    +
    +def _get_optimize(im, info):
    +    return im.mode in ("P", "L") and info and info.get("optimize", 0)
    +
    +
    +def _get_used_palette_colors(im):
    +    used_palette_colors = []
    +
    +    # check which colors are used
    +    i = 0
    +    for count in im.histogram():
    +        if count:
    +            used_palette_colors.append(i)
    +        i += 1
    +
    +    return used_palette_colors
    +
    +
    +def getheader(im, palette=None, info=None):
    +    """Return a list of strings representing a GIF header"""
    +
    +    # Header Block
    +    # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp
    +
    +    version = b"87a"
    +    for extensionKey in ["transparency", "duration", "loop", "comment"]:
    +        if info and extensionKey in info:
    +            if ((extensionKey == "duration" and info[extensionKey] == 0) or
    +                (extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255))):
    +                continue
    +            version = b"89a"
    +            break
    +    else:
    +        if im.info.get("version") == "89a":
    +            version = b"89a"
    +
    +    header = [
    +        b"GIF"+version +        # signature + version
    +        o16(im.size[0]) +       # canvas width
    +        o16(im.size[1])         # canvas height
    +    ]
    +
    +    if im.mode == "P":
    +        if palette and isinstance(palette, bytes):
    +            source_palette = palette[:768]
    +        else:
    +            source_palette = im.im.getpalette("RGB")[:768]
    +    else:  # L-mode
    +        if palette and isinstance(palette, bytes):
    +            source_palette = palette[:768]
    +        else:
    +            source_palette = bytearray([i//3 for i in range(768)])
    +
    +    used_palette_colors = palette_bytes = None
    +
    +    if _get_optimize(im, info):
    +        used_palette_colors = _get_used_palette_colors(im)
    +
    +        # create the new palette if not every color is used
    +        if len(used_palette_colors) < 256:
    +            palette_bytes = b""
    +            new_positions = {}
    +
    +            i = 0
    +            # pick only the used colors from the palette
    +            for oldPosition in used_palette_colors:
    +                palette_bytes += source_palette[oldPosition*3:oldPosition*3+3]
    +                new_positions[oldPosition] = i
    +                i += 1
    +
    +            # replace the palette color id of all pixel with the new id
    +            image_bytes = bytearray(im.tobytes())
    +            for i in range(len(image_bytes)):
    +                image_bytes[i] = new_positions[image_bytes[i]]
    +            im.frombytes(bytes(image_bytes))
    +            new_palette_bytes = (palette_bytes +
    +                                 (768 - len(palette_bytes)) * b'\x00')
    +            im.putpalette(new_palette_bytes)
    +            im.palette = ImagePalette.ImagePalette("RGB",
    +                                                   palette=palette_bytes,
    +                                                   size=len(palette_bytes))
    +
    +    if not palette_bytes:
    +        palette_bytes = source_palette
    +
    +    # Logical Screen Descriptor
    +    # calculate the palette size for the header
    +    import math
    +    color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1
    +    if color_table_size < 0:
    +        color_table_size = 0
    +    # size of global color table + global color table flag
    +    header.append(o8(color_table_size + 128))
    +    # background + reserved/aspect
    +    if info and "background" in info:
    +        background = info["background"]
    +    elif "background" in im.info:
    +        # This elif is redundant within GifImagePlugin
    +        # since im.info parameters are bundled into the info dictionary
    +        # However, external scripts may call getheader directly
    +        # So this maintains earlier behaviour
    +        background = im.info["background"]
    +    else:
    +        background = 0
    +    header.append(o8(background) + o8(0))
    +    # end of Logical Screen Descriptor
    +
    +    # add the missing amount of bytes
    +    # the palette has to be 2< 0:
    +        palette_bytes += o8(0) * 3 * actual_target_size_diff
    +
    +    # Header + Logical Screen Descriptor + Global Color Table
    +    header.append(palette_bytes)
    +    return header, used_palette_colors
    +
    +
    +def getdata(im, offset=(0, 0), **params):
    +    """Return a list of strings representing this image.
    +       The first string is a local image header, the rest contains
    +       encoded image data."""
    +
    +    class Collector(object):
    +        data = []
    +
    +        def write(self, data):
    +            self.data.append(data)
    +
    +    im.load()  # make sure raster data is available
    +
    +    fp = Collector()
    +
    +    try:
    +        im.encoderinfo = params
    +
    +        # local image header
    +        _get_local_header(fp, im, offset, 0)
    +
    +        ImageFile._save(im, fp, [("gif", (0, 0)+im.size, 0, RAWMODE[im.mode])])
    +
    +        fp.write(b"\0")  # end of image data
    +
    +    finally:
    +        del im.encoderinfo
    +
    +    return fp.data
    +
    +
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(GifImageFile.format, GifImageFile, _accept)
    +Image.register_save(GifImageFile.format, _save)
    +Image.register_save_all(GifImageFile.format, _save_all)
    +Image.register_extension(GifImageFile.format, ".gif")
    +Image.register_mime(GifImageFile.format, "image/gif")
    +
    +#
    +# Uncomment the following line if you wish to use NETPBM/PBMPLUS
    +# instead of the built-in "uncompressed" GIF encoder
    +
    +# Image.register_save(GifImageFile.format, _save_netpbm)
    diff --git a/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py b/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py
    new file mode 100644
    index 0000000..45af573
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/GimpGradientFile.py
    @@ -0,0 +1,137 @@
    +#
    +# Python Imaging Library
    +# $Id$
    +#
    +# stuff to read (and render) GIMP gradient files
    +#
    +# History:
    +#       97-08-23 fl     Created
    +#
    +# Copyright (c) Secret Labs AB 1997.
    +# Copyright (c) Fredrik Lundh 1997.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from math import pi, log, sin, sqrt
    +from PIL._binary import o8
    +
    +# --------------------------------------------------------------------
    +# Stuff to translate curve segments to palette values (derived from
    +# the corresponding code in GIMP, written by Federico Mena Quintero.
    +# See the GIMP distribution for more information.)
    +#
    +
    +EPSILON = 1e-10
    +
    +
    +def linear(middle, pos):
    +    if pos <= middle:
    +        if middle < EPSILON:
    +            return 0.0
    +        else:
    +            return 0.5 * pos / middle
    +    else:
    +        pos = pos - middle
    +        middle = 1.0 - middle
    +        if middle < EPSILON:
    +            return 1.0
    +        else:
    +            return 0.5 + 0.5 * pos / middle
    +
    +
    +def curved(middle, pos):
    +    return pos ** (log(0.5) / log(max(middle, EPSILON)))
    +
    +
    +def sine(middle, pos):
    +    return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0
    +
    +
    +def sphere_increasing(middle, pos):
    +    return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2)
    +
    +
    +def sphere_decreasing(middle, pos):
    +    return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2)
    +
    +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing]
    +
    +
    +class GradientFile(object):
    +
    +    gradient = None
    +
    +    def getpalette(self, entries=256):
    +
    +        palette = []
    +
    +        ix = 0
    +        x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
    +
    +        for i in range(entries):
    +
    +            x = i / float(entries-1)
    +
    +            while x1 < x:
    +                ix += 1
    +                x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
    +
    +            w = x1 - x0
    +
    +            if w < EPSILON:
    +                scale = segment(0.5, 0.5)
    +            else:
    +                scale = segment((xm - x0) / w, (x - x0) / w)
    +
    +            # expand to RGBA
    +            r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5))
    +            g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5))
    +            b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5))
    +            a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5))
    +
    +            # add to palette
    +            palette.append(r + g + b + a)
    +
    +        return b"".join(palette), "RGBA"
    +
    +
    +##
    +# File handler for GIMP's gradient format.
    +
    +class GimpGradientFile(GradientFile):
    +
    +    def __init__(self, fp):
    +
    +        if fp.readline()[:13] != b"GIMP Gradient":
    +            raise SyntaxError("not a GIMP gradient file")
    +
    +        line = fp.readline()
    +
    +        # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do
    +        if line.startswith(b"Name: "):
    +            line = fp.readline().strip()
    +
    +        count = int(line)
    +
    +        gradient = []
    +
    +        for i in range(count):
    +
    +            s = fp.readline().split()
    +            w = [float(x) for x in s[:11]]
    +
    +            x0, x1 = w[0], w[2]
    +            xm = w[1]
    +            rgb0 = w[3:7]
    +            rgb1 = w[7:11]
    +
    +            segment = SEGMENTS[int(s[11])]
    +            cspace = int(s[12])
    +
    +            if cspace != 0:
    +                raise IOError("cannot handle HSV colour space")
    +
    +            gradient.append((x0, x1, xm, rgb0, rgb1, segment))
    +
    +        self.gradient = gradient
    diff --git a/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py b/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py
    new file mode 100644
    index 0000000..4bf3ca3
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/GimpPaletteFile.py
    @@ -0,0 +1,62 @@
    +#
    +# Python Imaging Library
    +# $Id$
    +#
    +# stuff to read GIMP palette files
    +#
    +# History:
    +# 1997-08-23 fl     Created
    +# 2004-09-07 fl     Support GIMP 2.0 palette files.
    +#
    +# Copyright (c) Secret Labs AB 1997-2004.  All rights reserved.
    +# Copyright (c) Fredrik Lundh 1997-2004.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +import re
    +from PIL._binary import o8
    +
    +
    +##
    +# File handler for GIMP's palette format.
    +
    +class GimpPaletteFile(object):
    +
    +    rawmode = "RGB"
    +
    +    def __init__(self, fp):
    +
    +        self.palette = [o8(i)*3 for i in range(256)]
    +
    +        if fp.readline()[:12] != b"GIMP Palette":
    +            raise SyntaxError("not a GIMP palette file")
    +
    +        i = 0
    +
    +        while i <= 255:
    +
    +            s = fp.readline()
    +
    +            if not s:
    +                break
    +            # skip fields and comment lines
    +            if re.match(b"\w+:|#", s):
    +                continue
    +            if len(s) > 100:
    +                raise SyntaxError("bad palette file")
    +
    +            v = tuple(map(int, s.split()[:3]))
    +            if len(v) != 3:
    +                raise ValueError("bad palette entry")
    +
    +            if 0 <= i <= 255:
    +                self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2])
    +
    +            i += 1
    +
    +        self.palette = b"".join(self.palette)
    +
    +    def getpalette(self):
    +
    +        return self.palette, self.rawmode
    diff --git a/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py
    new file mode 100644
    index 0000000..8ffad81
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/GribStubImagePlugin.py
    @@ -0,0 +1,72 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# GRIB stub adapter
    +#
    +# Copyright (c) 1996-2003 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, ImageFile
    +
    +_handler = None
    +
    +
    +##
    +# Install application-specific GRIB image handler.
    +#
    +# @param handler Handler object.
    +
    +def register_handler(handler):
    +    global _handler
    +    _handler = handler
    +
    +
    +# --------------------------------------------------------------------
    +# Image adapter
    +
    +def _accept(prefix):
    +    return prefix[0:4] == b"GRIB" and prefix[7] == b'\x01'
    +
    +
    +class GribStubImageFile(ImageFile.StubImageFile):
    +
    +    format = "GRIB"
    +    format_description = "GRIB"
    +
    +    def _open(self):
    +
    +        offset = self.fp.tell()
    +
    +        if not _accept(self.fp.read(8)):
    +            raise SyntaxError("Not a GRIB file")
    +
    +        self.fp.seek(offset)
    +
    +        # make something up
    +        self.mode = "F"
    +        self.size = 1, 1
    +
    +        loader = self._load()
    +        if loader:
    +            loader.open(self)
    +
    +    def _load(self):
    +        return _handler
    +
    +
    +def _save(im, fp, filename):
    +    if _handler is None or not hasattr("_handler", "save"):
    +        raise IOError("GRIB save handler not installed")
    +    _handler.save(im, fp, filename)
    +
    +
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept)
    +Image.register_save(GribStubImageFile.format, _save)
    +
    +Image.register_extension(GribStubImageFile.format, ".grib")
    diff --git a/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py
    new file mode 100644
    index 0000000..f7945be
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/Hdf5StubImagePlugin.py
    @@ -0,0 +1,73 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# HDF5 stub adapter
    +#
    +# Copyright (c) 2000-2003 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, ImageFile
    +
    +_handler = None
    +
    +
    +##
    +# Install application-specific HDF5 image handler.
    +#
    +# @param handler Handler object.
    +
    +def register_handler(handler):
    +    global _handler
    +    _handler = handler
    +
    +
    +# --------------------------------------------------------------------
    +# Image adapter
    +
    +def _accept(prefix):
    +    return prefix[:8] == b"\x89HDF\r\n\x1a\n"
    +
    +
    +class HDF5StubImageFile(ImageFile.StubImageFile):
    +
    +    format = "HDF5"
    +    format_description = "HDF5"
    +
    +    def _open(self):
    +
    +        offset = self.fp.tell()
    +
    +        if not _accept(self.fp.read(8)):
    +            raise SyntaxError("Not an HDF file")
    +
    +        self.fp.seek(offset)
    +
    +        # make something up
    +        self.mode = "F"
    +        self.size = 1, 1
    +
    +        loader = self._load()
    +        if loader:
    +            loader.open(self)
    +
    +    def _load(self):
    +        return _handler
    +
    +
    +def _save(im, fp, filename):
    +    if _handler is None or not hasattr("_handler", "save"):
    +        raise IOError("HDF5 save handler not installed")
    +    _handler.save(im, fp, filename)
    +
    +
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept)
    +Image.register_save(HDF5StubImageFile.format, _save)
    +
    +Image.register_extension(HDF5StubImageFile.format, ".h5")
    +Image.register_extension(HDF5StubImageFile.format, ".hdf")
    diff --git a/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py
    new file mode 100644
    index 0000000..a4366e9
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/IcnsImagePlugin.py
    @@ -0,0 +1,366 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# Mac OS X icns file decoder, based on icns.py by Bob Ippolito.
    +#
    +# history:
    +# 2004-10-09 fl   Turned into a PIL plugin; removed 2.3 dependencies.
    +#
    +# Copyright (c) 2004 by Bob Ippolito.
    +# Copyright (c) 2004 by Secret Labs.
    +# Copyright (c) 2004 by Fredrik Lundh.
    +# Copyright (c) 2014 by Alastair Houghton.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image, ImageFile, PngImagePlugin, _binary
    +import io
    +import os
    +import shutil
    +import struct
    +import sys
    +import tempfile
    +
    +enable_jpeg2k = hasattr(Image.core, 'jp2klib_version')
    +if enable_jpeg2k:
    +    from PIL import Jpeg2KImagePlugin
    +
    +i8 = _binary.i8
    +
    +HEADERSIZE = 8
    +
    +
    +def nextheader(fobj):
    +    return struct.unpack('>4sI', fobj.read(HEADERSIZE))
    +
    +
    +def read_32t(fobj, start_length, size):
    +    # The 128x128 icon seems to have an extra header for some reason.
    +    (start, length) = start_length
    +    fobj.seek(start)
    +    sig = fobj.read(4)
    +    if sig != b'\x00\x00\x00\x00':
    +        raise SyntaxError('Unknown signature, expecting 0x00000000')
    +    return read_32(fobj, (start + 4, length - 4), size)
    +
    +
    +def read_32(fobj, start_length, size):
    +    """
    +    Read a 32bit RGB icon resource.  Seems to be either uncompressed or
    +    an RLE packbits-like scheme.
    +    """
    +    (start, length) = start_length
    +    fobj.seek(start)
    +    pixel_size = (size[0] * size[2], size[1] * size[2])
    +    sizesq = pixel_size[0] * pixel_size[1]
    +    if length == sizesq * 3:
    +        # uncompressed ("RGBRGBGB")
    +        indata = fobj.read(length)
    +        im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1)
    +    else:
    +        # decode image
    +        im = Image.new("RGB", pixel_size, None)
    +        for band_ix in range(3):
    +            data = []
    +            bytesleft = sizesq
    +            while bytesleft > 0:
    +                byte = fobj.read(1)
    +                if not byte:
    +                    break
    +                byte = i8(byte)
    +                if byte & 0x80:
    +                    blocksize = byte - 125
    +                    byte = fobj.read(1)
    +                    for i in range(blocksize):
    +                        data.append(byte)
    +                else:
    +                    blocksize = byte + 1
    +                    data.append(fobj.read(blocksize))
    +                bytesleft -= blocksize
    +                if bytesleft <= 0:
    +                    break
    +            if bytesleft != 0:
    +                raise SyntaxError(
    +                    "Error reading channel [%r left]" % bytesleft
    +                    )
    +            band = Image.frombuffer(
    +                "L", pixel_size, b"".join(data), "raw", "L", 0, 1
    +                )
    +            im.im.putband(band.im, band_ix)
    +    return {"RGB": im}
    +
    +
    +def read_mk(fobj, start_length, size):
    +    # Alpha masks seem to be uncompressed
    +    start = start_length[0]
    +    fobj.seek(start)
    +    pixel_size = (size[0] * size[2], size[1] * size[2])
    +    sizesq = pixel_size[0] * pixel_size[1]
    +    band = Image.frombuffer(
    +        "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1
    +        )
    +    return {"A": band}
    +
    +
    +def read_png_or_jpeg2000(fobj, start_length, size):
    +    (start, length) = start_length
    +    fobj.seek(start)
    +    sig = fobj.read(12)
    +    if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a':
    +        fobj.seek(start)
    +        im = PngImagePlugin.PngImageFile(fobj)
    +        return {"RGBA": im}
    +    elif sig[:4] == b'\xff\x4f\xff\x51' \
    +            or sig[:4] == b'\x0d\x0a\x87\x0a' \
    +            or sig == b'\x00\x00\x00\x0cjP  \x0d\x0a\x87\x0a':
    +        if not enable_jpeg2k:
    +            raise ValueError('Unsupported icon subimage format (rebuild PIL '
    +                             'with JPEG 2000 support to fix this)')
    +        # j2k, jpc or j2c
    +        fobj.seek(start)
    +        jp2kstream = fobj.read(length)
    +        f = io.BytesIO(jp2kstream)
    +        im = Jpeg2KImagePlugin.Jpeg2KImageFile(f)
    +        if im.mode != 'RGBA':
    +            im = im.convert('RGBA')
    +        return {"RGBA": im}
    +    else:
    +        raise ValueError('Unsupported icon subimage format')
    +
    +
    +class IcnsFile(object):
    +
    +    SIZES = {
    +        (512, 512, 2): [
    +            (b'ic10', read_png_or_jpeg2000),
    +        ],
    +        (512, 512, 1): [
    +            (b'ic09', read_png_or_jpeg2000),
    +        ],
    +        (256, 256, 2): [
    +            (b'ic14', read_png_or_jpeg2000),
    +        ],
    +        (256, 256, 1): [
    +            (b'ic08', read_png_or_jpeg2000),
    +        ],
    +        (128, 128, 2): [
    +            (b'ic13', read_png_or_jpeg2000),
    +        ],
    +        (128, 128, 1): [
    +            (b'ic07', read_png_or_jpeg2000),
    +            (b'it32', read_32t),
    +            (b't8mk', read_mk),
    +        ],
    +        (64, 64, 1): [
    +            (b'icp6', read_png_or_jpeg2000),
    +        ],
    +        (32, 32, 2): [
    +            (b'ic12', read_png_or_jpeg2000),
    +        ],
    +        (48, 48, 1): [
    +            (b'ih32', read_32),
    +            (b'h8mk', read_mk),
    +        ],
    +        (32, 32, 1): [
    +            (b'icp5', read_png_or_jpeg2000),
    +            (b'il32', read_32),
    +            (b'l8mk', read_mk),
    +        ],
    +        (16, 16, 2): [
    +            (b'ic11', read_png_or_jpeg2000),
    +        ],
    +        (16, 16, 1): [
    +            (b'icp4', read_png_or_jpeg2000),
    +            (b'is32', read_32),
    +            (b's8mk', read_mk),
    +        ],
    +    }
    +
    +    def __init__(self, fobj):
    +        """
    +        fobj is a file-like object as an icns resource
    +        """
    +        # signature : (start, length)
    +        self.dct = dct = {}
    +        self.fobj = fobj
    +        sig, filesize = nextheader(fobj)
    +        if sig != b'icns':
    +            raise SyntaxError('not an icns file')
    +        i = HEADERSIZE
    +        while i < filesize:
    +            sig, blocksize = nextheader(fobj)
    +            if blocksize <= 0:
    +                raise SyntaxError('invalid block header')
    +            i += HEADERSIZE
    +            blocksize -= HEADERSIZE
    +            dct[sig] = (i, blocksize)
    +            fobj.seek(blocksize, 1)
    +            i += blocksize
    +
    +    def itersizes(self):
    +        sizes = []
    +        for size, fmts in self.SIZES.items():
    +            for (fmt, reader) in fmts:
    +                if fmt in self.dct:
    +                    sizes.append(size)
    +                    break
    +        return sizes
    +
    +    def bestsize(self):
    +        sizes = self.itersizes()
    +        if not sizes:
    +            raise SyntaxError("No 32bit icon resources found")
    +        return max(sizes)
    +
    +    def dataforsize(self, size):
    +        """
    +        Get an icon resource as {channel: array}.  Note that
    +        the arrays are bottom-up like windows bitmaps and will likely
    +        need to be flipped or transposed in some way.
    +        """
    +        dct = {}
    +        for code, reader in self.SIZES[size]:
    +            desc = self.dct.get(code)
    +            if desc is not None:
    +                dct.update(reader(self.fobj, desc, size))
    +        return dct
    +
    +    def getimage(self, size=None):
    +        if size is None:
    +            size = self.bestsize()
    +        if len(size) == 2:
    +            size = (size[0], size[1], 1)
    +        channels = self.dataforsize(size)
    +
    +        im = channels.get('RGBA', None)
    +        if im:
    +            return im
    +
    +        im = channels.get("RGB").copy()
    +        try:
    +            im.putalpha(channels["A"])
    +        except KeyError:
    +            pass
    +        return im
    +
    +
    +##
    +# Image plugin for Mac OS icons.
    +
    +class IcnsImageFile(ImageFile.ImageFile):
    +    """
    +    PIL image support for Mac OS .icns files.
    +    Chooses the best resolution, but will possibly load
    +    a different size image if you mutate the size attribute
    +    before calling 'load'.
    +
    +    The info dictionary has a key 'sizes' that is a list
    +    of sizes that the icns file has.
    +    """
    +
    +    format = "ICNS"
    +    format_description = "Mac OS icns resource"
    +
    +    def _open(self):
    +        self.icns = IcnsFile(self.fp)
    +        self.mode = 'RGBA'
    +        self.best_size = self.icns.bestsize()
    +        self.size = (self.best_size[0] * self.best_size[2],
    +                     self.best_size[1] * self.best_size[2])
    +        self.info['sizes'] = self.icns.itersizes()
    +        # Just use this to see if it's loaded or not yet.
    +        self.tile = ('',)
    +
    +    def load(self):
    +        if len(self.size) == 3:
    +            self.best_size = self.size
    +            self.size = (self.best_size[0] * self.best_size[2],
    +                         self.best_size[1] * self.best_size[2])
    +
    +        Image.Image.load(self)
    +        if not self.tile:
    +            return
    +        self.load_prepare()
    +        # This is likely NOT the best way to do it, but whatever.
    +        im = self.icns.getimage(self.best_size)
    +
    +        # If this is a PNG or JPEG 2000, it won't be loaded yet
    +        im.load()
    +
    +        self.im = im.im
    +        self.mode = im.mode
    +        self.size = im.size
    +        self.fp = None
    +        self.icns = None
    +        self.tile = ()
    +        self.load_end()
    +
    +
    +def _save(im, fp, filename):
    +    """
    +    Saves the image as a series of PNG files,
    +    that are then converted to a .icns file
    +    using the OS X command line utility 'iconutil'.
    +
    +    OS X only.
    +    """
    +    if hasattr(fp, "flush"):
    +        fp.flush()
    +
    +    # create the temporary set of pngs
    +    iconset = tempfile.mkdtemp('.iconset')
    +    last_w = None
    +    last_im = None
    +    for w in [16, 32, 128, 256, 512]:
    +        prefix = 'icon_{}x{}'.format(w, w)
    +
    +        if last_w == w:
    +            im_scaled = last_im
    +        else:
    +            im_scaled = im.resize((w, w), Image.LANCZOS)
    +        im_scaled.save(os.path.join(iconset, prefix+'.png'))
    +
    +        im_scaled = im.resize((w*2, w*2), Image.LANCZOS)
    +        im_scaled.save(os.path.join(iconset, prefix+'@2x.png'))
    +        last_im = im_scaled
    +
    +    # iconutil -c icns -o {} {}
    +    from subprocess import Popen, PIPE, CalledProcessError
    +
    +    convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset]
    +    stderr = tempfile.TemporaryFile()
    +    convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=stderr)
    +
    +    convert_proc.stdout.close()
    +
    +    retcode = convert_proc.wait()
    +
    +    # remove the temporary files
    +    shutil.rmtree(iconset)
    +
    +    if retcode:
    +        raise CalledProcessError(retcode, convert_cmd)
    +
    +Image.register_open(IcnsImageFile.format, IcnsImageFile,
    +                    lambda x: x[:4] == b'icns')
    +Image.register_extension(IcnsImageFile.format, '.icns')
    +
    +if sys.platform == 'darwin':
    +    Image.register_save(IcnsImageFile.format, _save)
    +
    +    Image.register_mime(IcnsImageFile.format, "image/icns")
    +
    +
    +if __name__ == '__main__':
    +    imf = IcnsImageFile(open(sys.argv[1], 'rb'))
    +    for size in imf.info['sizes']:
    +        imf.size = size
    +        imf.load()
    +        im = imf.im
    +        im.save('out-%s-%s-%s.png' % size)
    +    im = Image.open(open(sys.argv[1], "rb"))
    +    im.save("out.png")
    +    if sys.platform == 'windows':
    +        os.startfile("out.png")
    diff --git a/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py
    new file mode 100644
    index 0000000..a01aed3
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/IcoImagePlugin.py
    @@ -0,0 +1,283 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# Windows Icon support for PIL
    +#
    +# History:
    +#       96-05-27 fl     Created
    +#
    +# Copyright (c) Secret Labs AB 1997.
    +# Copyright (c) Fredrik Lundh 1996.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
    +# .
    +# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
    +#
    +# Icon format references:
    +#   * https://en.wikipedia.org/wiki/ICO_(file_format)
    +#   * https://msdn.microsoft.com/en-us/library/ms997538.aspx
    +
    +
    +import struct
    +from io import BytesIO
    +
    +from PIL import Image, ImageFile, BmpImagePlugin, PngImagePlugin, _binary
    +from math import log, ceil
    +
    +__version__ = "0.1"
    +
    +#
    +# --------------------------------------------------------------------
    +
    +i8 = _binary.i8
    +i16 = _binary.i16le
    +i32 = _binary.i32le
    +
    +_MAGIC = b"\0\0\1\0"
    +
    +
    +def _save(im, fp, filename):
    +    fp.write(_MAGIC)  # (2+2)
    +    sizes = im.encoderinfo.get("sizes",
    +                               [(16, 16), (24, 24), (32, 32), (48, 48),
    +                                (64, 64), (128, 128), (255, 255)])
    +    width, height = im.size
    +    filter(lambda x: False if (x[0] > width or x[1] > height or
    +                               x[0] > 255 or x[1] > 255) else True, sizes)
    +    fp.write(struct.pack("=8bpp)
    +                'reserved': i8(s[3]),
    +                'planes': i16(s[4:]),
    +                'bpp': i16(s[6:]),
    +                'size': i32(s[8:]),
    +                'offset': i32(s[12:])
    +            }
    +
    +            # See Wikipedia
    +            for j in ('width', 'height'):
    +                if not icon_header[j]:
    +                    icon_header[j] = 256
    +
    +            # See Wikipedia notes about color depth.
    +            # We need this just to differ images with equal sizes
    +            icon_header['color_depth'] = (icon_header['bpp'] or
    +                                          (icon_header['nb_color'] != 0 and
    +                                           ceil(log(icon_header['nb_color'],
    +                                                    2))) or 256)
    +
    +            icon_header['dim'] = (icon_header['width'], icon_header['height'])
    +            icon_header['square'] = (icon_header['width'] *
    +                                     icon_header['height'])
    +
    +            self.entry.append(icon_header)
    +
    +        self.entry = sorted(self.entry, key=lambda x: x['color_depth'])
    +        # ICO images are usually squares
    +        # self.entry = sorted(self.entry, key=lambda x: x['width'])
    +        self.entry = sorted(self.entry, key=lambda x: x['square'])
    +        self.entry.reverse()
    +
    +    def sizes(self):
    +        """
    +        Get a list of all available icon sizes and color depths.
    +        """
    +        return set((h['width'], h['height']) for h in self.entry)
    +
    +    def getimage(self, size, bpp=False):
    +        """
    +        Get an image from the icon
    +        """
    +        for (i, h) in enumerate(self.entry):
    +            if size == h['dim'] and (bpp is False or bpp == h['color_depth']):
    +                return self.frame(i)
    +        return self.frame(0)
    +
    +    def frame(self, idx):
    +        """
    +        Get an image from frame idx
    +        """
    +
    +        header = self.entry[idx]
    +
    +        self.buf.seek(header['offset'])
    +        data = self.buf.read(8)
    +        self.buf.seek(header['offset'])
    +
    +        if data[:8] == PngImagePlugin._MAGIC:
    +            # png frame
    +            im = PngImagePlugin.PngImageFile(self.buf)
    +        else:
    +            # XOR + AND mask bmp frame
    +            im = BmpImagePlugin.DibImageFile(self.buf)
    +
    +            # change tile dimension to only encompass XOR image
    +            im.size = (im.size[0], int(im.size[1] / 2))
    +            d, e, o, a = im.tile[0]
    +            im.tile[0] = d, (0, 0) + im.size, o, a
    +
    +            # figure out where AND mask image starts
    +            mode = a[0]
    +            bpp = 8
    +            for k in BmpImagePlugin.BIT2MODE.keys():
    +                if mode == BmpImagePlugin.BIT2MODE[k][1]:
    +                    bpp = k
    +                    break
    +
    +            if 32 == bpp:
    +                # 32-bit color depth icon image allows semitransparent areas
    +                # PIL's DIB format ignores transparency bits, recover them.
    +                # The DIB is packed in BGRX byte order where X is the alpha
    +                # channel.
    +
    +                # Back up to start of bmp data
    +                self.buf.seek(o)
    +                # extract every 4th byte (eg. 3,7,11,15,...)
    +                alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4]
    +
    +                # convert to an 8bpp grayscale image
    +                mask = Image.frombuffer(
    +                    'L',            # 8bpp
    +                    im.size,        # (w, h)
    +                    alpha_bytes,    # source chars
    +                    'raw',          # raw decoder
    +                    ('L', 0, -1)    # 8bpp inverted, unpadded, reversed
    +                )
    +            else:
    +                # get AND image from end of bitmap
    +                w = im.size[0]
    +                if (w % 32) > 0:
    +                    # bitmap row data is aligned to word boundaries
    +                    w += 32 - (im.size[0] % 32)
    +
    +                # the total mask data is
    +                # padded row size * height / bits per char
    +
    +                and_mask_offset = o + int(im.size[0] * im.size[1] *
    +                                          (bpp / 8.0))
    +                total_bytes = int((w * im.size[1]) / 8)
    +
    +                self.buf.seek(and_mask_offset)
    +                maskData = self.buf.read(total_bytes)
    +
    +                # convert raw data to image
    +                mask = Image.frombuffer(
    +                    '1',            # 1 bpp
    +                    im.size,        # (w, h)
    +                    maskData,       # source chars
    +                    'raw',          # raw decoder
    +                    ('1;I', int(w/8), -1)  # 1bpp inverted, padded, reversed
    +                )
    +
    +                # now we have two images, im is XOR image and mask is AND image
    +
    +            # apply mask image as alpha channel
    +            im = im.convert('RGBA')
    +            im.putalpha(mask)
    +
    +        return im
    +
    +
    +##
    +# Image plugin for Windows Icon files.
    +
    +class IcoImageFile(ImageFile.ImageFile):
    +    """
    +    PIL read-only image support for Microsoft Windows .ico files.
    +
    +    By default the largest resolution image in the file will be loaded. This
    +    can be changed by altering the 'size' attribute before calling 'load'.
    +
    +    The info dictionary has a key 'sizes' that is a list of the sizes available
    +    in the icon file.
    +
    +    Handles classic, XP and Vista icon formats.
    +
    +    This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
    +    .
    +    https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
    +    """
    +    format = "ICO"
    +    format_description = "Windows Icon"
    +
    +    def _open(self):
    +        self.ico = IcoFile(self.fp)
    +        self.info['sizes'] = self.ico.sizes()
    +        self.size = self.ico.entry[0]['dim']
    +        self.load()
    +
    +    def load(self):
    +        im = self.ico.getimage(self.size)
    +        # if tile is PNG, it won't really be loaded yet
    +        im.load()
    +        self.im = im.im
    +        self.mode = im.mode
    +        self.size = im.size
    +
    +    def load_seek(self):
    +        # Flag the ImageFile.Parser so that it
    +        # just does all the decode at the end.
    +        pass
    +#
    +# --------------------------------------------------------------------
    +
    +Image.register_open(IcoImageFile.format, IcoImageFile, _accept)
    +Image.register_save(IcoImageFile.format, _save)
    +Image.register_extension(IcoImageFile.format, ".ico")
    diff --git a/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py
    new file mode 100644
    index 0000000..dd4f829
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ImImagePlugin.py
    @@ -0,0 +1,355 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# IFUNC IM file handling for PIL
    +#
    +# history:
    +# 1995-09-01 fl   Created.
    +# 1997-01-03 fl   Save palette images
    +# 1997-01-08 fl   Added sequence support
    +# 1997-01-23 fl   Added P and RGB save support
    +# 1997-05-31 fl   Read floating point images
    +# 1997-06-22 fl   Save floating point images
    +# 1997-08-27 fl   Read and save 1-bit images
    +# 1998-06-25 fl   Added support for RGB+LUT images
    +# 1998-07-02 fl   Added support for YCC images
    +# 1998-07-15 fl   Renamed offset attribute to avoid name clash
    +# 1998-12-29 fl   Added I;16 support
    +# 2001-02-17 fl   Use 're' instead of 'regex' (Python 2.1) (0.7)
    +# 2003-09-26 fl   Added LA/PA support
    +#
    +# Copyright (c) 1997-2003 by Secret Labs AB.
    +# Copyright (c) 1995-2001 by Fredrik Lundh.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +
    +import re
    +from PIL import Image, ImageFile, ImagePalette
    +from PIL._binary import i8
    +
    +__version__ = "0.7"
    +
    +
    +# --------------------------------------------------------------------
    +# Standard tags
    +
    +COMMENT = "Comment"
    +DATE = "Date"
    +EQUIPMENT = "Digitalization equipment"
    +FRAMES = "File size (no of images)"
    +LUT = "Lut"
    +NAME = "Name"
    +SCALE = "Scale (x,y)"
    +SIZE = "Image size (x*y)"
    +MODE = "Image type"
    +
    +TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0,
    +        SCALE: 0, SIZE: 0, MODE: 0}
    +
    +OPEN = {
    +    # ifunc93/p3cfunc formats
    +    "0 1 image": ("1", "1"),
    +    "L 1 image": ("1", "1"),
    +    "Greyscale image": ("L", "L"),
    +    "Grayscale image": ("L", "L"),
    +    "RGB image": ("RGB", "RGB;L"),
    +    "RLB image": ("RGB", "RLB"),
    +    "RYB image": ("RGB", "RLB"),
    +    "B1 image": ("1", "1"),
    +    "B2 image": ("P", "P;2"),
    +    "B4 image": ("P", "P;4"),
    +    "X 24 image": ("RGB", "RGB"),
    +    "L 32 S image": ("I", "I;32"),
    +    "L 32 F image": ("F", "F;32"),
    +    # old p3cfunc formats
    +    "RGB3 image": ("RGB", "RGB;T"),
    +    "RYB3 image": ("RGB", "RYB;T"),
    +    # extensions
    +    "LA image": ("LA", "LA;L"),
    +    "RGBA image": ("RGBA", "RGBA;L"),
    +    "RGBX image": ("RGBX", "RGBX;L"),
    +    "CMYK image": ("CMYK", "CMYK;L"),
    +    "YCC image": ("YCbCr", "YCbCr;L"),
    +}
    +
    +# ifunc95 extensions
    +for i in ["8", "8S", "16", "16S", "32", "32F"]:
    +    OPEN["L %s image" % i] = ("F", "F;%s" % i)
    +    OPEN["L*%s image" % i] = ("F", "F;%s" % i)
    +for i in ["16", "16L", "16B"]:
    +    OPEN["L %s image" % i] = ("I;%s" % i, "I;%s" % i)
    +    OPEN["L*%s image" % i] = ("I;%s" % i, "I;%s" % i)
    +for i in ["32S"]:
    +    OPEN["L %s image" % i] = ("I", "I;%s" % i)
    +    OPEN["L*%s image" % i] = ("I", "I;%s" % i)
    +for i in range(2, 33):
    +    OPEN["L*%s image" % i] = ("F", "F;%s" % i)
    +
    +
    +# --------------------------------------------------------------------
    +# Read IM directory
    +
    +split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$")
    +
    +
    +def number(s):
    +    try:
    +        return int(s)
    +    except ValueError:
    +        return float(s)
    +
    +
    +##
    +# Image plugin for the IFUNC IM file format.
    +
    +class ImImageFile(ImageFile.ImageFile):
    +
    +    format = "IM"
    +    format_description = "IFUNC Image Memory"
    +
    +    def _open(self):
    +
    +        # Quick rejection: if there's not an LF among the first
    +        # 100 bytes, this is (probably) not a text header.
    +
    +        if b"\n" not in self.fp.read(100):
    +            raise SyntaxError("not an IM file")
    +        self.fp.seek(0)
    +
    +        n = 0
    +
    +        # Default values
    +        self.info[MODE] = "L"
    +        self.info[SIZE] = (512, 512)
    +        self.info[FRAMES] = 1
    +
    +        self.rawmode = "L"
    +
    +        while True:
    +
    +            s = self.fp.read(1)
    +
    +            # Some versions of IFUNC uses \n\r instead of \r\n...
    +            if s == b"\r":
    +                continue
    +
    +            if not s or s == b'\0' or s == b'\x1A':
    +                break
    +
    +            # FIXME: this may read whole file if not a text file
    +            s = s + self.fp.readline()
    +
    +            if len(s) > 100:
    +                raise SyntaxError("not an IM file")
    +
    +            if s[-2:] == b'\r\n':
    +                s = s[:-2]
    +            elif s[-1:] == b'\n':
    +                s = s[:-1]
    +
    +            try:
    +                m = split.match(s)
    +            except re.error as v:
    +                raise SyntaxError("not an IM file")
    +
    +            if m:
    +
    +                k, v = m.group(1, 2)
    +
    +                # Don't know if this is the correct encoding,
    +                # but a decent guess (I guess)
    +                k = k.decode('latin-1', 'replace')
    +                v = v.decode('latin-1', 'replace')
    +
    +                # Convert value as appropriate
    +                if k in [FRAMES, SCALE, SIZE]:
    +                    v = v.replace("*", ",")
    +                    v = tuple(map(number, v.split(",")))
    +                    if len(v) == 1:
    +                        v = v[0]
    +                elif k == MODE and v in OPEN:
    +                    v, self.rawmode = OPEN[v]
    +
    +                # Add to dictionary. Note that COMMENT tags are
    +                # combined into a list of strings.
    +                if k == COMMENT:
    +                    if k in self.info:
    +                        self.info[k].append(v)
    +                    else:
    +                        self.info[k] = [v]
    +                else:
    +                    self.info[k] = v
    +
    +                if k in TAGS:
    +                    n += 1
    +
    +            else:
    +
    +                raise SyntaxError("Syntax error in IM header: " +
    +                                  s.decode('ascii', 'replace'))
    +
    +        if not n:
    +            raise SyntaxError("Not an IM file")
    +
    +        # Basic attributes
    +        self.size = self.info[SIZE]
    +        self.mode = self.info[MODE]
    +
    +        # Skip forward to start of image data
    +        while s and s[0:1] != b'\x1A':
    +            s = self.fp.read(1)
    +        if not s:
    +            raise SyntaxError("File truncated")
    +
    +        if LUT in self.info:
    +            # convert lookup table to palette or lut attribute
    +            palette = self.fp.read(768)
    +            greyscale = 1  # greyscale palette
    +            linear = 1  # linear greyscale palette
    +            for i in range(256):
    +                if palette[i] == palette[i+256] == palette[i+512]:
    +                    if i8(palette[i]) != i:
    +                        linear = 0
    +                else:
    +                    greyscale = 0
    +            if self.mode == "L" or self.mode == "LA":
    +                if greyscale:
    +                    if not linear:
    +                        self.lut = [i8(c) for c in palette[:256]]
    +                else:
    +                    if self.mode == "L":
    +                        self.mode = self.rawmode = "P"
    +                    elif self.mode == "LA":
    +                        self.mode = self.rawmode = "PA"
    +                    self.palette = ImagePalette.raw("RGB;L", palette)
    +            elif self.mode == "RGB":
    +                if not greyscale or not linear:
    +                    self.lut = [i8(c) for c in palette]
    +
    +        self.frame = 0
    +
    +        self.__offset = offs = self.fp.tell()
    +
    +        self.__fp = self.fp  # FIXME: hack
    +
    +        if self.rawmode[:2] == "F;":
    +
    +            # ifunc95 formats
    +            try:
    +                # use bit decoder (if necessary)
    +                bits = int(self.rawmode[2:])
    +                if bits not in [8, 16, 32]:
    +                    self.tile = [("bit", (0, 0)+self.size, offs,
    +                                 (bits, 8, 3, 0, -1))]
    +                    return
    +            except ValueError:
    +                pass
    +
    +        if self.rawmode in ["RGB;T", "RYB;T"]:
    +            # Old LabEye/3PC files.  Would be very surprised if anyone
    +            # ever stumbled upon such a file ;-)
    +            size = self.size[0] * self.size[1]
    +            self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)),
    +                         ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)),
    +                         ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))]
    +        else:
    +            # LabEye/IFUNC files
    +            self.tile = [("raw", (0, 0)+self.size, offs,
    +                         (self.rawmode, 0, -1))]
    +
    +    @property
    +    def n_frames(self):
    +        return self.info[FRAMES]
    +
    +    @property
    +    def is_animated(self):
    +        return self.info[FRAMES] > 1
    +
    +    def seek(self, frame):
    +
    +        if frame < 0 or frame >= self.info[FRAMES]:
    +            raise EOFError("seek outside sequence")
    +
    +        if self.frame == frame:
    +            return
    +
    +        self.frame = frame
    +
    +        if self.mode == "1":
    +            bits = 1
    +        else:
    +            bits = 8 * len(self.mode)
    +
    +        size = ((self.size[0] * bits + 7) // 8) * self.size[1]
    +        offs = self.__offset + frame * size
    +
    +        self.fp = self.__fp
    +
    +        self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))]
    +
    +    def tell(self):
    +
    +        return self.frame
    +
    +#
    +# --------------------------------------------------------------------
    +# Save IM files
    +
    +SAVE = {
    +    # mode: (im type, raw mode)
    +    "1": ("0 1", "1"),
    +    "L": ("Greyscale", "L"),
    +    "LA": ("LA", "LA;L"),
    +    "P": ("Greyscale", "P"),
    +    "PA": ("LA", "PA;L"),
    +    "I": ("L 32S", "I;32S"),
    +    "I;16": ("L 16", "I;16"),
    +    "I;16L": ("L 16L", "I;16L"),
    +    "I;16B": ("L 16B", "I;16B"),
    +    "F": ("L 32F", "F;32F"),
    +    "RGB": ("RGB", "RGB;L"),
    +    "RGBA": ("RGBA", "RGBA;L"),
    +    "RGBX": ("RGBX", "RGBX;L"),
    +    "CMYK": ("CMYK", "CMYK;L"),
    +    "YCbCr": ("YCC", "YCbCr;L")
    +}
    +
    +
    +def _save(im, fp, filename, check=0):
    +
    +    try:
    +        image_type, rawmode = SAVE[im.mode]
    +    except KeyError:
    +        raise ValueError("Cannot save %s images as IM" % im.mode)
    +
    +    try:
    +        frames = im.encoderinfo["frames"]
    +    except KeyError:
    +        frames = 1
    +
    +    if check:
    +        return check
    +
    +    fp.write(("Image type: %s image\r\n" % image_type).encode('ascii'))
    +    if filename:
    +        fp.write(("Name: %s\r\n" % filename).encode('ascii'))
    +    fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii'))
    +    fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii'))
    +    if im.mode == "P":
    +        fp.write(b"Lut: 1\r\n")
    +    fp.write(b"\000" * (511-fp.tell()) + b"\032")
    +    if im.mode == "P":
    +        fp.write(im.im.getpalette("RGB", "RGB;L"))  # 768 bytes
    +    ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))])
    +
    +#
    +# --------------------------------------------------------------------
    +# Registry
    +
    +Image.register_open(ImImageFile.format, ImImageFile)
    +Image.register_save(ImImageFile.format, _save)
    +
    +Image.register_extension(ImImageFile.format, ".im")
    diff --git a/server/www/packages/packages-linux/x64/PIL/Image.py b/server/www/packages/packages-linux/x64/PIL/Image.py
    new file mode 100644
    index 0000000..64f4613
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/Image.py
    @@ -0,0 +1,2522 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# the Image class wrapper
    +#
    +# partial release history:
    +# 1995-09-09 fl   Created
    +# 1996-03-11 fl   PIL release 0.0 (proof of concept)
    +# 1996-04-30 fl   PIL release 0.1b1
    +# 1999-07-28 fl   PIL release 1.0 final
    +# 2000-06-07 fl   PIL release 1.1
    +# 2000-10-20 fl   PIL release 1.1.1
    +# 2001-05-07 fl   PIL release 1.1.2
    +# 2002-03-15 fl   PIL release 1.1.3
    +# 2003-05-10 fl   PIL release 1.1.4
    +# 2005-03-28 fl   PIL release 1.1.5
    +# 2006-12-02 fl   PIL release 1.1.6
    +# 2009-11-15 fl   PIL release 1.1.7
    +#
    +# Copyright (c) 1997-2009 by Secret Labs AB.  All rights reserved.
    +# Copyright (c) 1995-2009 by Fredrik Lundh.
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from __future__ import print_function
    +
    +from PIL import VERSION, PILLOW_VERSION, _plugins
    +
    +import logging
    +import warnings
    +import math
    +
    +logger = logging.getLogger(__name__)
    +
    +
    +class DecompressionBombWarning(RuntimeWarning):
    +    pass
    +
    +
    +class _imaging_not_installed(object):
    +    # module placeholder
    +    def __getattr__(self, id):
    +        raise ImportError("The _imaging C module is not installed")
    +
    +
    +# Limit to around a quarter gigabyte for a 24 bit (3 bpp) image
    +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 / 4 / 3)
    +
    +try:
    +    # give Tk a chance to set up the environment, in case we're
    +    # using an _imaging module linked against libtcl/libtk (use
    +    # __import__ to hide this from naive packagers; we don't really
    +    # depend on Tk unless ImageTk is used, and that module already
    +    # imports Tkinter)
    +    __import__("FixTk")
    +except ImportError:
    +    pass
    +
    +try:
    +    # If the _imaging C module is not present, Pillow will not load.
    +    # Note that other modules should not refer to _imaging directly;
    +    # import Image and use the Image.core variable instead.
    +    # Also note that Image.core is not a publicly documented interface,
    +    # and should be considered private and subject to change.
    +    from PIL import _imaging as core
    +    if PILLOW_VERSION != getattr(core, 'PILLOW_VERSION', None):
    +        raise ImportError("The _imaging extension was built for another "
    +                          " version of Pillow or PIL")
    +
    +except ImportError as v:
    +    core = _imaging_not_installed()
    +    # Explanations for ways that we know we might have an import error
    +    if str(v).startswith("Module use of python"):
    +        # The _imaging C module is present, but not compiled for
    +        # the right version (windows only).  Print a warning, if
    +        # possible.
    +        warnings.warn(
    +            "The _imaging extension was built for another version "
    +            "of Python.",
    +            RuntimeWarning
    +            )
    +    elif str(v).startswith("The _imaging extension"):
    +        warnings.warn(str(v), RuntimeWarning)
    +    elif "Symbol not found: _PyUnicodeUCS2_" in str(v):
    +        # should match _PyUnicodeUCS2_FromString and
    +        # _PyUnicodeUCS2_AsLatin1String
    +        warnings.warn(
    +            "The _imaging extension was built for Python with UCS2 support; "
    +            "recompile Pillow or build Python --without-wide-unicode. ",
    +            RuntimeWarning
    +            )
    +    elif "Symbol not found: _PyUnicodeUCS4_" in str(v):
    +        # should match _PyUnicodeUCS4_FromString and
    +        # _PyUnicodeUCS4_AsLatin1String
    +        warnings.warn(
    +            "The _imaging extension was built for Python with UCS4 support; "
    +            "recompile Pillow or build Python --with-wide-unicode. ",
    +            RuntimeWarning
    +            )
    +    # Fail here anyway. Don't let people run with a mostly broken Pillow.
    +    # see docs/porting.rst
    +    raise
    +
    +try:
    +    import builtins
    +except ImportError:
    +    import __builtin__
    +    builtins = __builtin__
    +
    +from PIL import ImageMode
    +from PIL._binary import i8
    +from PIL._util import isPath
    +from PIL._util import isStringType
    +from PIL._util import deferred_error
    +
    +import os
    +import sys
    +import io
    +import struct
    +
    +# type stuff
    +import collections
    +import numbers
    +
    +# works everywhere, win for pypy, not cpython
    +USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info')
    +try:
    +    import cffi
    +    HAS_CFFI = True
    +except ImportError:
    +    HAS_CFFI = False
    +
    +
    +def isImageType(t):
    +    """
    +    Checks if an object is an image object.
    +
    +    .. warning::
    +
    +       This function is for internal use only.
    +
    +    :param t: object to check if it's an image
    +    :returns: True if the object is an image
    +    """
    +    return hasattr(t, "im")
    +
    +#
    +# Constants (also defined in _imagingmodule.c!)
    +
    +NONE = 0
    +
    +# transpose
    +FLIP_LEFT_RIGHT = 0
    +FLIP_TOP_BOTTOM = 1
    +ROTATE_90 = 2
    +ROTATE_180 = 3
    +ROTATE_270 = 4
    +TRANSPOSE = 5
    +
    +# transforms
    +AFFINE = 0
    +EXTENT = 1
    +PERSPECTIVE = 2
    +QUAD = 3
    +MESH = 4
    +
    +# resampling filters
    +NEAREST = NONE = 0
    +LANCZOS = ANTIALIAS = 1
    +BILINEAR = LINEAR = 2
    +BICUBIC = CUBIC = 3
    +
    +# dithers
    +NONE = 0
    +NEAREST = 0
    +ORDERED = 1  # Not yet implemented
    +RASTERIZE = 2  # Not yet implemented
    +FLOYDSTEINBERG = 3  # default
    +
    +# palettes/quantizers
    +WEB = 0
    +ADAPTIVE = 1
    +
    +MEDIANCUT = 0
    +MAXCOVERAGE = 1
    +FASTOCTREE = 2
    +LIBIMAGEQUANT = 3
    +
    +# categories
    +NORMAL = 0
    +SEQUENCE = 1
    +CONTAINER = 2
    +
    +if hasattr(core, 'DEFAULT_STRATEGY'):
    +    DEFAULT_STRATEGY = core.DEFAULT_STRATEGY
    +    FILTERED = core.FILTERED
    +    HUFFMAN_ONLY = core.HUFFMAN_ONLY
    +    RLE = core.RLE
    +    FIXED = core.FIXED
    +
    +
    +# --------------------------------------------------------------------
    +# Registries
    +
    +ID = []
    +OPEN = {}
    +MIME = {}
    +SAVE = {}
    +SAVE_ALL = {}
    +EXTENSION = {}
    +
    +# --------------------------------------------------------------------
    +# Modes supported by this version
    +
    +_MODEINFO = {
    +    # NOTE: this table will be removed in future versions.  use
    +    # getmode* functions or ImageMode descriptors instead.
    +
    +    # official modes
    +    "1": ("L", "L", ("1",)),
    +    "L": ("L", "L", ("L",)),
    +    "I": ("L", "I", ("I",)),
    +    "F": ("L", "F", ("F",)),
    +    "P": ("RGB", "L", ("P",)),
    +    "RGB": ("RGB", "L", ("R", "G", "B")),
    +    "RGBX": ("RGB", "L", ("R", "G", "B", "X")),
    +    "RGBA": ("RGB", "L", ("R", "G", "B", "A")),
    +    "CMYK": ("RGB", "L", ("C", "M", "Y", "K")),
    +    "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")),
    +    "LAB": ("RGB", "L", ("L", "A", "B")),
    +    "HSV": ("RGB", "L", ("H", "S", "V")),
    +
    +    # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and
    +    # BGR;24.  Use these modes only if you know exactly what you're
    +    # doing...
    +
    +}
    +
    +if sys.byteorder == 'little':
    +    _ENDIAN = '<'
    +else:
    +    _ENDIAN = '>'
    +
    +_MODE_CONV = {
    +    # official modes
    +    "1": ('|b1', None),  # broken
    +    "L": ('|u1', None),
    +    "LA": ('|u1', 2),
    +    "I": (_ENDIAN + 'i4', None),
    +    "F": (_ENDIAN + 'f4', None),
    +    "P": ('|u1', None),
    +    "RGB": ('|u1', 3),
    +    "RGBX": ('|u1', 4),
    +    "RGBA": ('|u1', 4),
    +    "CMYK": ('|u1', 4),
    +    "YCbCr": ('|u1', 3),
    +    "LAB": ('|u1', 3),  # UNDONE - unsigned |u1i1i1
    +    "HSV": ('|u1', 3),
    +    # I;16 == I;16L, and I;32 == I;32L
    +    "I;16": ('u2', None),
    +    "I;16L": ('i2', None),
    +    "I;16LS": ('u4', None),
    +    "I;32L": ('i4', None),
    +    "I;32LS": ('= 1:
    +        return
    +
    +    try:
    +        from PIL import BmpImagePlugin
    +    except ImportError:
    +        pass
    +    try:
    +        from PIL import GifImagePlugin
    +    except ImportError:
    +        pass
    +    try:
    +        from PIL import JpegImagePlugin
    +    except ImportError:
    +        pass
    +    try:
    +        from PIL import PpmImagePlugin
    +    except ImportError:
    +        pass
    +    try:
    +        from PIL import PngImagePlugin
    +    except ImportError:
    +        pass
    +#   try:
    +#       import TiffImagePlugin
    +#   except ImportError:
    +#       pass
    +
    +    _initialized = 1
    +
    +
    +def init():
    +    """
    +    Explicitly initializes the Python Imaging Library. This function
    +    loads all available file format drivers.
    +    """
    +
    +    global _initialized
    +    if _initialized >= 2:
    +        return 0
    +
    +    for plugin in _plugins:
    +        try:
    +            logger.debug("Importing %s", plugin)
    +            __import__("PIL.%s" % plugin, globals(), locals(), [])
    +        except ImportError as e:
    +            logger.debug("Image: failed to import %s: %s", plugin, e)
    +
    +    if OPEN or SAVE:
    +        _initialized = 2
    +        return 1
    +
    +
    +# --------------------------------------------------------------------
    +# Codec factories (used by tobytes/frombytes and ImageFile.load)
    +
    +def _getdecoder(mode, decoder_name, args, extra=()):
    +
    +    # tweak arguments
    +    if args is None:
    +        args = ()
    +    elif not isinstance(args, tuple):
    +        args = (args,)
    +
    +    try:
    +        # get decoder
    +        decoder = getattr(core, decoder_name + "_decoder")
    +        # print(decoder, mode, args + extra)
    +        return decoder(mode, *args + extra)
    +    except AttributeError:
    +        raise IOError("decoder %s not available" % decoder_name)
    +
    +
    +def _getencoder(mode, encoder_name, args, extra=()):
    +
    +    # tweak arguments
    +    if args is None:
    +        args = ()
    +    elif not isinstance(args, tuple):
    +        args = (args,)
    +
    +    try:
    +        # get encoder
    +        encoder = getattr(core, encoder_name + "_encoder")
    +        # print(encoder, mode, args + extra)
    +        return encoder(mode, *args + extra)
    +    except AttributeError:
    +        raise IOError("encoder %s not available" % encoder_name)
    +
    +
    +# --------------------------------------------------------------------
    +# Simple expression analyzer
    +
    +def coerce_e(value):
    +    return value if isinstance(value, _E) else _E(value)
    +
    +
    +class _E(object):
    +    def __init__(self, data):
    +        self.data = data
    +
    +    def __add__(self, other):
    +        return _E((self.data, "__add__", coerce_e(other).data))
    +
    +    def __mul__(self, other):
    +        return _E((self.data, "__mul__", coerce_e(other).data))
    +
    +
    +def _getscaleoffset(expr):
    +    stub = ["stub"]
    +    data = expr(_E(stub)).data
    +    try:
    +        (a, b, c) = data  # simplified syntax
    +        if (a is stub and b == "__mul__" and isinstance(c, numbers.Number)):
    +            return c, 0.0
    +        if a is stub and b == "__add__" and isinstance(c, numbers.Number):
    +            return 1.0, c
    +    except TypeError:
    +        pass
    +    try:
    +        ((a, b, c), d, e) = data  # full syntax
    +        if (a is stub and b == "__mul__" and isinstance(c, numbers.Number) and
    +                d == "__add__" and isinstance(e, numbers.Number)):
    +            return c, e
    +    except TypeError:
    +        pass
    +    raise ValueError("illegal expression")
    +
    +
    +# --------------------------------------------------------------------
    +# Implementation wrapper
    +
    +class Image(object):
    +    """
    +    This class represents an image object.  To create
    +    :py:class:`~PIL.Image.Image` objects, use the appropriate factory
    +    functions.  There's hardly ever any reason to call the Image constructor
    +    directly.
    +
    +    * :py:func:`~PIL.Image.open`
    +    * :py:func:`~PIL.Image.new`
    +    * :py:func:`~PIL.Image.frombytes`
    +    """
    +    format = None
    +    format_description = None
    +
    +    def __init__(self):
    +        # FIXME: take "new" parameters / other image?
    +        # FIXME: turn mode and size into delegating properties?
    +        self.im = None
    +        self.mode = ""
    +        self.size = (0, 0)
    +        self.palette = None
    +        self.info = {}
    +        self.category = NORMAL
    +        self.readonly = 0
    +        self.pyaccess = None
    +
    +    @property
    +    def width(self):
    +        return self.size[0]
    +
    +    @property
    +    def height(self):
    +        return self.size[1]
    +
    +    def _new(self, im):
    +        new = Image()
    +        new.im = im
    +        new.mode = im.mode
    +        new.size = im.size
    +        if self.palette:
    +            new.palette = self.palette.copy()
    +        if im.mode == "P" and not new.palette:
    +            from PIL import ImagePalette
    +            new.palette = ImagePalette.ImagePalette()
    +        new.info = self.info.copy()
    +        return new
    +
    +    _makeself = _new  # compatibility
    +
    +    # Context Manager Support
    +    def __enter__(self):
    +        return self
    +
    +    def __exit__(self, *args):
    +        self.close()
    +
    +    def close(self):
    +        """
    +        Closes the file pointer, if possible.
    +
    +        This operation will destroy the image core and release its memory.
    +        The image data will be unusable afterward.
    +
    +        This function is only required to close images that have not
    +        had their file read and closed by the
    +        :py:meth:`~PIL.Image.Image.load` method.
    +        """
    +        try:
    +            self.fp.close()
    +        except Exception as msg:
    +            logger.debug("Error closing: %s", msg)
    +
    +        # Instead of simply setting to None, we're setting up a
    +        # deferred error that will better explain that the core image
    +        # object is gone.
    +        self.im = deferred_error(ValueError("Operation on closed image"))
    +
    +    def _copy(self):
    +        self.load()
    +        self.im = self.im.copy()
    +        self.pyaccess = None
    +        self.readonly = 0
    +
    +    def _dump(self, file=None, format=None):
    +        import tempfile
    +        suffix = ''
    +        if format:
    +            suffix = '.'+format
    +        if not file:
    +            f, file = tempfile.mkstemp(suffix)
    +            os.close(f)
    +
    +        self.load()
    +        if not format or format == "PPM":
    +            self.im.save_ppm(file)
    +        else:
    +            if not file.endswith(format):
    +                file = file + "." + format
    +            self.save(file, format)
    +        return file
    +
    +    def __eq__(self, other):
    +        return (self.__class__.__name__ == other.__class__.__name__ and
    +                self.mode == other.mode and
    +                self.size == other.size and
    +                self.info == other.info and
    +                self.category == other.category and
    +                self.readonly == other.readonly and
    +                self.getpalette() == other.getpalette() and
    +                self.tobytes() == other.tobytes())
    +
    +    def __ne__(self, other):
    +        eq = (self == other)
    +        return not eq
    +
    +    def __repr__(self):
    +        return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % (
    +            self.__class__.__module__, self.__class__.__name__,
    +            self.mode, self.size[0], self.size[1],
    +            id(self)
    +            )
    +
    +    def _repr_png_(self):
    +        """ iPython display hook support
    +
    +        :returns: png version of the image as bytes
    +        """
    +        from io import BytesIO
    +        b = BytesIO()
    +        self.save(b, 'PNG')
    +        return b.getvalue()
    +
    +    def __getattr__(self, name):
    +        if name == "__array_interface__":
    +            # numpy array interface support
    +            new = {}
    +            shape, typestr = _conv_type_shape(self)
    +            new['shape'] = shape
    +            new['typestr'] = typestr
    +            new['data'] = self.tobytes()
    +            new['version'] = 3
    +            return new
    +        raise AttributeError(name)
    +
    +    def __getstate__(self):
    +        return [
    +            self.info,
    +            self.mode,
    +            self.size,
    +            self.getpalette(),
    +            self.tobytes()]
    +
    +    def __setstate__(self, state):
    +        Image.__init__(self)
    +        self.tile = []
    +        info, mode, size, palette, data = state
    +        self.info = info
    +        self.mode = mode
    +        self.size = size
    +        self.im = core.new(mode, size)
    +        if mode in ("L", "P") and palette:
    +            self.putpalette(palette)
    +        self.frombytes(data)
    +
    +    def tobytes(self, encoder_name="raw", *args):
    +        """
    +        Return image as a bytes object.
    +
    +        .. warning::
    +
    +            This method returns the raw image data from the internal
    +            storage.  For compressed image data (e.g. PNG, JPEG) use
    +            :meth:`~.save`, with a BytesIO parameter for in-memory
    +            data.
    +
    +        :param encoder_name: What encoder to use.  The default is to
    +                             use the standard "raw" encoder.
    +        :param args: Extra arguments to the encoder.
    +        :rtype: A bytes object.
    +        """
    +
    +        # may pass tuple instead of argument list
    +        if len(args) == 1 and isinstance(args[0], tuple):
    +            args = args[0]
    +
    +        if encoder_name == "raw" and args == ():
    +            args = self.mode
    +
    +        self.load()
    +
    +        # unpack data
    +        e = _getencoder(self.mode, encoder_name, args)
    +        e.setimage(self.im)
    +
    +        bufsize = max(65536, self.size[0] * 4)  # see RawEncode.c
    +
    +        data = []
    +        while True:
    +            l, s, d = e.encode(bufsize)
    +            data.append(d)
    +            if s:
    +                break
    +        if s < 0:
    +            raise RuntimeError("encoder error %d in tobytes" % s)
    +
    +        return b"".join(data)
    +
    +    def tostring(self, *args, **kw):
    +        raise NotImplementedError("tostring() has been removed. " +
    +                        "Please call tobytes() instead.")
    +
    +    def tobitmap(self, name="image"):
    +        """
    +        Returns the image converted to an X11 bitmap.
    +
    +        .. note:: This method only works for mode "1" images.
    +
    +        :param name: The name prefix to use for the bitmap variables.
    +        :returns: A string containing an X11 bitmap.
    +        :raises ValueError: If the mode is not "1"
    +        """
    +
    +        self.load()
    +        if self.mode != "1":
    +            raise ValueError("not a bitmap")
    +        data = self.tobytes("xbm")
    +        return b"".join([
    +            ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'),
    +            ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'),
    +            ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};"
    +            ])
    +
    +    def frombytes(self, data, decoder_name="raw", *args):
    +        """
    +        Loads this image with pixel data from a bytes object.
    +
    +        This method is similar to the :py:func:`~PIL.Image.frombytes` function,
    +        but loads data into this image instead of creating a new image object.
    +        """
    +
    +        # may pass tuple instead of argument list
    +        if len(args) == 1 and isinstance(args[0], tuple):
    +            args = args[0]
    +
    +        # default format
    +        if decoder_name == "raw" and args == ():
    +            args = self.mode
    +
    +        # unpack data
    +        d = _getdecoder(self.mode, decoder_name, args)
    +        d.setimage(self.im)
    +        s = d.decode(data)
    +
    +        if s[0] >= 0:
    +            raise ValueError("not enough image data")
    +        if s[1] != 0:
    +            raise ValueError("cannot decode image data")
    +
    +    def fromstring(self, *args, **kw):
    +        raise NotImplementedError("fromstring() has been removed. " +
    +                        "Please call frombytes() instead.")
    +
    +    def load(self):
    +        """
    +        Allocates storage for the image and loads the pixel data.  In
    +        normal cases, you don't need to call this method, since the
    +        Image class automatically loads an opened image when it is
    +        accessed for the first time. This method will close the file
    +        associated with the image.
    +
    +        :returns: An image access object.
    +        :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess`
    +        """
    +        if self.im and self.palette and self.palette.dirty:
    +            # realize palette
    +            self.im.putpalette(*self.palette.getdata())
    +            self.palette.dirty = 0
    +            self.palette.mode = "RGB"
    +            self.palette.rawmode = None
    +            if "transparency" in self.info:
    +                if isinstance(self.info["transparency"], int):
    +                    self.im.putpalettealpha(self.info["transparency"], 0)
    +                else:
    +                    self.im.putpalettealphas(self.info["transparency"])
    +                self.palette.mode = "RGBA"
    +
    +        if self.im:
    +            if HAS_CFFI and USE_CFFI_ACCESS:
    +                if self.pyaccess:
    +                    return self.pyaccess
    +                from PIL import PyAccess
    +                self.pyaccess = PyAccess.new(self, self.readonly)
    +                if self.pyaccess:
    +                    return self.pyaccess
    +            return self.im.pixel_access(self.readonly)
    +
    +    def verify(self):
    +        """
    +        Verifies the contents of a file. For data read from a file, this
    +        method attempts to determine if the file is broken, without
    +        actually decoding the image data.  If this method finds any
    +        problems, it raises suitable exceptions.  If you need to load
    +        the image after using this method, you must reopen the image
    +        file.
    +        """
    +        pass
    +
    +    def convert(self, mode=None, matrix=None, dither=None,
    +                palette=WEB, colors=256):
    +        """
    +        Returns a converted copy of this image. For the "P" mode, this
    +        method translates pixels through the palette.  If mode is
    +        omitted, a mode is chosen so that all information in the image
    +        and the palette can be represented without a palette.
    +
    +        The current version supports all possible conversions between
    +        "L", "RGB" and "CMYK." The **matrix** argument only supports "L"
    +        and "RGB".
    +
    +        When translating a color image to black and white (mode "L"),
    +        the library uses the ITU-R 601-2 luma transform::
    +
    +            L = R * 299/1000 + G * 587/1000 + B * 114/1000
    +
    +        The default method of converting a greyscale ("L") or "RGB"
    +        image into a bilevel (mode "1") image uses Floyd-Steinberg
    +        dither to approximate the original image luminosity levels. If
    +        dither is NONE, all non-zero values are set to 255 (white). To
    +        use other thresholds, use the :py:meth:`~PIL.Image.Image.point`
    +        method.
    +
    +        :param mode: The requested mode. See: :ref:`concept-modes`.
    +        :param matrix: An optional conversion matrix.  If given, this
    +           should be 4- or 12-tuple containing floating point values.
    +        :param dither: Dithering method, used when converting from
    +           mode "RGB" to "P" or from "RGB" or "L" to "1".
    +           Available methods are NONE or FLOYDSTEINBERG (default).
    +        :param palette: Palette to use when converting from mode "RGB"
    +           to "P".  Available palettes are WEB or ADAPTIVE.
    +        :param colors: Number of colors to use for the ADAPTIVE palette.
    +           Defaults to 256.
    +        :rtype: :py:class:`~PIL.Image.Image`
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +
    +        if not mode:
    +            # determine default mode
    +            if self.mode == "P":
    +                self.load()
    +                if self.palette:
    +                    mode = self.palette.mode
    +                else:
    +                    mode = "RGB"
    +            else:
    +                return self.copy()
    +
    +        self.load()
    +
    +        if matrix:
    +            # matrix conversion
    +            if mode not in ("L", "RGB"):
    +                raise ValueError("illegal conversion")
    +            im = self.im.convert_matrix(mode, matrix)
    +            return self._new(im)
    +
    +        if mode == "P" and self.mode == "RGBA":
    +            return self.quantize(colors)
    +
    +        trns = None
    +        delete_trns = False
    +        # transparency handling
    +        if "transparency" in self.info and \
    +                self.info['transparency'] is not None:
    +            if self.mode in ('L', 'RGB') and mode == 'RGBA':
    +                # Use transparent conversion to promote from transparent
    +                # color to an alpha channel.
    +                return self._new(self.im.convert_transparent(
    +                    mode, self.info['transparency']))
    +            elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'):
    +                t = self.info['transparency']
    +                if isinstance(t, bytes):
    +                    # Dragons. This can't be represented by a single color
    +                    warnings.warn('Palette images with Transparency  ' +
    +                                  ' expressed in bytes should be converted ' +
    +                                  'to RGBA images')
    +                    delete_trns = True
    +                else:
    +                    # get the new transparency color.
    +                    # use existing conversions
    +                    trns_im = Image()._new(core.new(self.mode, (1, 1)))
    +                    if self.mode == 'P':
    +                        trns_im.putpalette(self.palette)
    +                        if type(t) == tuple:
    +                            try:
    +                                t = trns_im.palette.getcolor(t)
    +                            except:
    +                                raise ValueError("Couldn't allocate a palette "+
    +                                                 "color for transparency")
    +                    trns_im.putpixel((0, 0), t)
    +
    +                    if mode in ('L', 'RGB'):
    +                        trns_im = trns_im.convert(mode)
    +                    else:
    +                        # can't just retrieve the palette number, got to do it
    +                        # after quantization.
    +                        trns_im = trns_im.convert('RGB')
    +                    trns = trns_im.getpixel((0, 0))
    +
    +            elif self.mode == 'P' and mode == 'RGBA':
    +                t = self.info['transparency']
    +                delete_trns = True
    +
    +                if isinstance(t, bytes):
    +                    self.im.putpalettealphas(t)
    +                elif isinstance(t, int):
    +                    self.im.putpalettealpha(t, 0)
    +                else:
    +                    raise ValueError("Transparency for P mode should" +
    +                                     " be bytes or int")
    +
    +        if mode == "P" and palette == ADAPTIVE:
    +            im = self.im.quantize(colors)
    +            new = self._new(im)
    +            from PIL import ImagePalette
    +            new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB"))
    +            if delete_trns:
    +                # This could possibly happen if we requantize to fewer colors.
    +                # The transparency would be totally off in that case.
    +                del(new.info['transparency'])
    +            if trns is not None:
    +                try:
    +                    new.info['transparency'] = new.palette.getcolor(trns)
    +                except:
    +                    # if we can't make a transparent color, don't leave the old
    +                    # transparency hanging around to mess us up.
    +                    del(new.info['transparency'])
    +                    warnings.warn("Couldn't allocate palette entry " +
    +                                  "for transparency")
    +            return new
    +
    +        # colorspace conversion
    +        if dither is None:
    +            dither = FLOYDSTEINBERG
    +
    +        try:
    +            im = self.im.convert(mode, dither)
    +        except ValueError:
    +            try:
    +                # normalize source image and try again
    +                im = self.im.convert(getmodebase(self.mode))
    +                im = im.convert(mode, dither)
    +            except KeyError:
    +                raise ValueError("illegal conversion")
    +
    +        new_im = self._new(im)
    +        if delete_trns:
    +            # crash fail if we leave a bytes transparency in an rgb/l mode.
    +            del(new_im.info['transparency'])
    +        if trns is not None:
    +            if new_im.mode == 'P':
    +                try:
    +                    new_im.info['transparency'] = new_im.palette.getcolor(trns)
    +                except:
    +                    del(new_im.info['transparency'])
    +                    warnings.warn("Couldn't allocate palette entry " +
    +                                  "for transparency")
    +            else:
    +                new_im.info['transparency'] = trns
    +        return new_im
    +
    +    def quantize(self, colors=256, method=None, kmeans=0, palette=None):
    +        """
    +        Convert the image to 'P' mode with the specified number
    +        of colors.
    +
    +        :param colors: The desired number of colors, <= 256
    +        :param method: 0 = median cut
    +                       1 = maximum coverage
    +                       2 = fast octree
    +                       3 = libimagequant
    +        :param kmeans: Integer
    +        :param palette: Quantize to the :py:class:`PIL.ImagingPalette` palette.
    +        :returns: A new image
    +
    +        """
    +
    +        self.load()
    +
    +        if method is None:
    +            # defaults:
    +            method = 0
    +            if self.mode == 'RGBA':
    +                method = 2
    +
    +        if self.mode == 'RGBA' and method not in (2, 3):
    +            # Caller specified an invalid mode.
    +            raise ValueError(
    +                'Fast Octree (method == 2) and libimagequant (method == 3) ' +
    +                'are the only valid methods for quantizing RGBA images')
    +
    +        if palette:
    +            # use palette from reference image
    +            palette.load()
    +            if palette.mode != "P":
    +                raise ValueError("bad mode for palette image")
    +            if self.mode != "RGB" and self.mode != "L":
    +                raise ValueError(
    +                    "only RGB or L mode images can be quantized to a palette"
    +                    )
    +            im = self.im.convert("P", 1, palette.im)
    +            return self._makeself(im)
    +
    +        return self._new(self.im.quantize(colors, method, kmeans))
    +
    +    def copy(self):
    +        """
    +        Copies this image. Use this method if you wish to paste things
    +        into an image, but still retain the original.
    +
    +        :rtype: :py:class:`~PIL.Image.Image`
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +        self.load()
    +        return self._new(self.im.copy())
    +
    +    __copy__ = copy
    +
    +    def crop(self, box=None):
    +        """
    +        Returns a rectangular region from this image. The box is a
    +        4-tuple defining the left, upper, right, and lower pixel
    +        coordinate.
    +
    +        This is a lazy operation.  Changes to the source image may or
    +        may not be reflected in the cropped image.  To break the
    +        connection, call the :py:meth:`~PIL.Image.Image.load` method on
    +        the cropped copy.
    +
    +        :param box: The crop rectangle, as a (left, upper, right, lower)-tuple.
    +        :rtype: :py:class:`~PIL.Image.Image`
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +
    +        self.load()
    +        if box is None:
    +            return self.copy()
    +
    +        # lazy operation
    +        return _ImageCrop(self, box)
    +
    +    def draft(self, mode, size):
    +        """
    +        Configures the image file loader so it returns a version of the
    +        image that as closely as possible matches the given mode and
    +        size.  For example, you can use this method to convert a color
    +        JPEG to greyscale while loading it, or to extract a 128x192
    +        version from a PCD file.
    +
    +        Note that this method modifies the :py:class:`~PIL.Image.Image` object
    +        in place.  If the image has already been loaded, this method has no
    +        effect.
    +
    +        :param mode: The requested mode.
    +        :param size: The requested size.
    +        """
    +        pass
    +
    +    def _expand(self, xmargin, ymargin=None):
    +        if ymargin is None:
    +            ymargin = xmargin
    +        self.load()
    +        return self._new(self.im.expand(xmargin, ymargin, 0))
    +
    +    def filter(self, filter):
    +        """
    +        Filters this image using the given filter.  For a list of
    +        available filters, see the :py:mod:`~PIL.ImageFilter` module.
    +
    +        :param filter: Filter kernel.
    +        :returns: An :py:class:`~PIL.Image.Image` object.  """
    +
    +        self.load()
    +
    +        if isinstance(filter, collections.Callable):
    +            filter = filter()
    +        if not hasattr(filter, "filter"):
    +            raise TypeError("filter argument should be ImageFilter.Filter " +
    +                            "instance or class")
    +
    +        if self.im.bands == 1:
    +            return self._new(filter.filter(self.im))
    +        # fix to handle multiband images since _imaging doesn't
    +        ims = []
    +        for c in range(self.im.bands):
    +            ims.append(self._new(filter.filter(self.im.getband(c))))
    +        return merge(self.mode, ims)
    +
    +    def getbands(self):
    +        """
    +        Returns a tuple containing the name of each band in this image.
    +        For example, **getbands** on an RGB image returns ("R", "G", "B").
    +
    +        :returns: A tuple containing band names.
    +        :rtype: tuple
    +        """
    +        return ImageMode.getmode(self.mode).bands
    +
    +    def getbbox(self):
    +        """
    +        Calculates the bounding box of the non-zero regions in the
    +        image.
    +
    +        :returns: The bounding box is returned as a 4-tuple defining the
    +           left, upper, right, and lower pixel coordinate. If the image
    +           is completely empty, this method returns None.
    +
    +        """
    +
    +        self.load()
    +        return self.im.getbbox()
    +
    +    def getcolors(self, maxcolors=256):
    +        """
    +        Returns a list of colors used in this image.
    +
    +        :param maxcolors: Maximum number of colors.  If this number is
    +           exceeded, this method returns None.  The default limit is
    +           256 colors.
    +        :returns: An unsorted list of (count, pixel) values.
    +        """
    +
    +        self.load()
    +        if self.mode in ("1", "L", "P"):
    +            h = self.im.histogram()
    +            out = []
    +            for i in range(256):
    +                if h[i]:
    +                    out.append((h[i], i))
    +            if len(out) > maxcolors:
    +                return None
    +            return out
    +        return self.im.getcolors(maxcolors)
    +
    +    def getdata(self, band=None):
    +        """
    +        Returns the contents of this image as a sequence object
    +        containing pixel values.  The sequence object is flattened, so
    +        that values for line one follow directly after the values of
    +        line zero, and so on.
    +
    +        Note that the sequence object returned by this method is an
    +        internal PIL data type, which only supports certain sequence
    +        operations.  To convert it to an ordinary sequence (e.g. for
    +        printing), use **list(im.getdata())**.
    +
    +        :param band: What band to return.  The default is to return
    +           all bands.  To return a single band, pass in the index
    +           value (e.g. 0 to get the "R" band from an "RGB" image).
    +        :returns: A sequence-like object.
    +        """
    +
    +        self.load()
    +        if band is not None:
    +            return self.im.getband(band)
    +        return self.im  # could be abused
    +
    +    def getextrema(self):
    +        """
    +        Gets the the minimum and maximum pixel values for each band in
    +        the image.
    +
    +        :returns: For a single-band image, a 2-tuple containing the
    +           minimum and maximum pixel value.  For a multi-band image,
    +           a tuple containing one 2-tuple for each band.
    +        """
    +
    +        self.load()
    +        if self.im.bands > 1:
    +            extrema = []
    +            for i in range(self.im.bands):
    +                extrema.append(self.im.getband(i).getextrema())
    +            return tuple(extrema)
    +        return self.im.getextrema()
    +
    +    def getim(self):
    +        """
    +        Returns a capsule that points to the internal image memory.
    +
    +        :returns: A capsule object.
    +        """
    +
    +        self.load()
    +        return self.im.ptr
    +
    +    def getpalette(self):
    +        """
    +        Returns the image palette as a list.
    +
    +        :returns: A list of color values [r, g, b, ...], or None if the
    +           image has no palette.
    +        """
    +
    +        self.load()
    +        try:
    +            if bytes is str:
    +                return [i8(c) for c in self.im.getpalette()]
    +            else:
    +                return list(self.im.getpalette())
    +        except ValueError:
    +            return None  # no palette
    +
    +    def getpixel(self, xy):
    +        """
    +        Returns the pixel value at a given position.
    +
    +        :param xy: The coordinate, given as (x, y).
    +        :returns: The pixel value.  If the image is a multi-layer image,
    +           this method returns a tuple.
    +        """
    +
    +        self.load()
    +        if self.pyaccess:
    +            return self.pyaccess.getpixel(xy)
    +        return self.im.getpixel(xy)
    +
    +    def getprojection(self):
    +        """
    +        Get projection to x and y axes
    +
    +        :returns: Two sequences, indicating where there are non-zero
    +            pixels along the X-axis and the Y-axis, respectively.
    +        """
    +
    +        self.load()
    +        x, y = self.im.getprojection()
    +        return [i8(c) for c in x], [i8(c) for c in y]
    +
    +    def histogram(self, mask=None, extrema=None):
    +        """
    +        Returns a histogram for the image. The histogram is returned as
    +        a list of pixel counts, one for each pixel value in the source
    +        image. If the image has more than one band, the histograms for
    +        all bands are concatenated (for example, the histogram for an
    +        "RGB" image contains 768 values).
    +
    +        A bilevel image (mode "1") is treated as a greyscale ("L") image
    +        by this method.
    +
    +        If a mask is provided, the method returns a histogram for those
    +        parts of the image where the mask image is non-zero. The mask
    +        image must have the same size as the image, and be either a
    +        bi-level image (mode "1") or a greyscale image ("L").
    +
    +        :param mask: An optional mask.
    +        :returns: A list containing pixel counts.
    +        """
    +        self.load()
    +        if mask:
    +            mask.load()
    +            return self.im.histogram((0, 0), mask.im)
    +        if self.mode in ("I", "F"):
    +            if extrema is None:
    +                extrema = self.getextrema()
    +            return self.im.histogram(extrema)
    +        return self.im.histogram()
    +
    +    def offset(self, xoffset, yoffset=None):
    +        raise NotImplementedError("offset() has been removed. " +
    +                        "Please call ImageChops.offset() instead.")
    +
    +    def paste(self, im, box=None, mask=None):
    +        """
    +        Pastes another image into this image. The box argument is either
    +        a 2-tuple giving the upper left corner, a 4-tuple defining the
    +        left, upper, right, and lower pixel coordinate, or None (same as
    +        (0, 0)).  If a 4-tuple is given, the size of the pasted image
    +        must match the size of the region.
    +
    +        If the modes don't match, the pasted image is converted to the mode of
    +        this image (see the :py:meth:`~PIL.Image.Image.convert` method for
    +        details).
    +
    +        Instead of an image, the source can be a integer or tuple
    +        containing pixel values.  The method then fills the region
    +        with the given color.  When creating RGB images, you can
    +        also use color strings as supported by the ImageColor module.
    +
    +        If a mask is given, this method updates only the regions
    +        indicated by the mask.  You can use either "1", "L" or "RGBA"
    +        images (in the latter case, the alpha band is used as mask).
    +        Where the mask is 255, the given image is copied as is.  Where
    +        the mask is 0, the current value is preserved.  Intermediate
    +        values will mix the two images together, including their alpha
    +        channels if they have them.
    +
    +        See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to
    +        combine images with respect to their alpha channels.
    +
    +        :param im: Source image or pixel value (integer or tuple).
    +        :param box: An optional 4-tuple giving the region to paste into.
    +           If a 2-tuple is used instead, it's treated as the upper left
    +           corner.  If omitted or None, the source is pasted into the
    +           upper left corner.
    +
    +           If an image is given as the second argument and there is no
    +           third, the box defaults to (0, 0), and the second argument
    +           is interpreted as a mask image.
    +        :param mask: An optional mask image.
    +        """
    +
    +        if isImageType(box) and mask is None:
    +            # abbreviated paste(im, mask) syntax
    +            mask = box
    +            box = None
    +
    +        if box is None:
    +            # cover all of self
    +            box = (0, 0) + self.size
    +
    +        if len(box) == 2:
    +            # upper left corner given; get size from image or mask
    +            if isImageType(im):
    +                size = im.size
    +            elif isImageType(mask):
    +                size = mask.size
    +            else:
    +                # FIXME: use self.size here?
    +                raise ValueError(
    +                    "cannot determine region size; use 4-item box"
    +                    )
    +            box = box + (box[0]+size[0], box[1]+size[1])
    +
    +        if isStringType(im):
    +            from PIL import ImageColor
    +            im = ImageColor.getcolor(im, self.mode)
    +
    +        elif isImageType(im):
    +            im.load()
    +            if self.mode != im.mode:
    +                if self.mode != "RGB" or im.mode not in ("RGBA", "RGBa"):
    +                    # should use an adapter for this!
    +                    im = im.convert(self.mode)
    +            im = im.im
    +
    +        self.load()
    +        if self.readonly:
    +            self._copy()
    +
    +        if mask:
    +            mask.load()
    +            self.im.paste(im, box, mask.im)
    +        else:
    +            self.im.paste(im, box)
    +
    +    def point(self, lut, mode=None):
    +        """
    +        Maps this image through a lookup table or function.
    +
    +        :param lut: A lookup table, containing 256 (or 65336 if
    +           self.mode=="I" and mode == "L") values per band in the
    +           image.  A function can be used instead, it should take a
    +           single argument. The function is called once for each
    +           possible pixel value, and the resulting table is applied to
    +           all bands of the image.
    +        :param mode: Output mode (default is same as input).  In the
    +           current version, this can only be used if the source image
    +           has mode "L" or "P", and the output has mode "1" or the
    +           source image mode is "I" and the output mode is "L".
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +
    +        self.load()
    +
    +        if isinstance(lut, ImagePointHandler):
    +            return lut.point(self)
    +
    +        if callable(lut):
    +            # if it isn't a list, it should be a function
    +            if self.mode in ("I", "I;16", "F"):
    +                # check if the function can be used with point_transform
    +                # UNDONE wiredfool -- I think this prevents us from ever doing
    +                # a gamma function point transform on > 8bit images.
    +                scale, offset = _getscaleoffset(lut)
    +                return self._new(self.im.point_transform(scale, offset))
    +            # for other modes, convert the function to a table
    +            lut = [lut(i) for i in range(256)] * self.im.bands
    +
    +        if self.mode == "F":
    +            # FIXME: _imaging returns a confusing error message for this case
    +            raise ValueError("point operation not supported for this mode")
    +
    +        return self._new(self.im.point(lut, mode))
    +
    +    def putalpha(self, alpha):
    +        """
    +        Adds or replaces the alpha layer in this image.  If the image
    +        does not have an alpha layer, it's converted to "LA" or "RGBA".
    +        The new layer must be either "L" or "1".
    +
    +        :param alpha: The new alpha layer.  This can either be an "L" or "1"
    +           image having the same size as this image, or an integer or
    +           other color value.
    +        """
    +
    +        self.load()
    +        if self.readonly:
    +            self._copy()
    +
    +        if self.mode not in ("LA", "RGBA"):
    +            # attempt to promote self to a matching alpha mode
    +            try:
    +                mode = getmodebase(self.mode) + "A"
    +                try:
    +                    self.im.setmode(mode)
    +                    self.pyaccess = None
    +                except (AttributeError, ValueError):
    +                    # do things the hard way
    +                    im = self.im.convert(mode)
    +                    if im.mode not in ("LA", "RGBA"):
    +                        raise ValueError  # sanity check
    +                    self.im = im
    +                    self.pyaccess = None
    +                self.mode = self.im.mode
    +            except (KeyError, ValueError):
    +                raise ValueError("illegal image mode")
    +
    +        if self.mode == "LA":
    +            band = 1
    +        else:
    +            band = 3
    +
    +        if isImageType(alpha):
    +            # alpha layer
    +            if alpha.mode not in ("1", "L"):
    +                raise ValueError("illegal image mode")
    +            alpha.load()
    +            if alpha.mode == "1":
    +                alpha = alpha.convert("L")
    +        else:
    +            # constant alpha
    +            try:
    +                self.im.fillband(band, alpha)
    +            except (AttributeError, ValueError):
    +                # do things the hard way
    +                alpha = new("L", self.size, alpha)
    +            else:
    +                return
    +
    +        self.im.putband(alpha.im, band)
    +
    +    def putdata(self, data, scale=1.0, offset=0.0):
    +        """
    +        Copies pixel data to this image.  This method copies data from a
    +        sequence object into the image, starting at the upper left
    +        corner (0, 0), and continuing until either the image or the
    +        sequence ends.  The scale and offset values are used to adjust
    +        the sequence values: **pixel = value*scale + offset**.
    +
    +        :param data: A sequence object.
    +        :param scale: An optional scale value.  The default is 1.0.
    +        :param offset: An optional offset value.  The default is 0.0.
    +        """
    +
    +        self.load()
    +        if self.readonly:
    +            self._copy()
    +
    +        self.im.putdata(data, scale, offset)
    +
    +    def putpalette(self, data, rawmode="RGB"):
    +        """
    +        Attaches a palette to this image.  The image must be a "P" or
    +        "L" image, and the palette sequence must contain 768 integer
    +        values, where each group of three values represent the red,
    +        green, and blue values for the corresponding pixel
    +        index. Instead of an integer sequence, you can use an 8-bit
    +        string.
    +
    +        :param data: A palette sequence (either a list or a string).
    +        """
    +        from PIL import ImagePalette
    +
    +        if self.mode not in ("L", "P"):
    +            raise ValueError("illegal image mode")
    +        self.load()
    +        if isinstance(data, ImagePalette.ImagePalette):
    +            palette = ImagePalette.raw(data.rawmode, data.palette)
    +        else:
    +            if not isinstance(data, bytes):
    +                if bytes is str:
    +                    data = "".join(chr(x) for x in data)
    +                else:
    +                    data = bytes(data)
    +            palette = ImagePalette.raw(rawmode, data)
    +        self.mode = "P"
    +        self.palette = palette
    +        self.palette.mode = "RGB"
    +        self.load()  # install new palette
    +
    +    def putpixel(self, xy, value):
    +        """
    +        Modifies the pixel at the given position. The color is given as
    +        a single numerical value for single-band images, and a tuple for
    +        multi-band images.
    +
    +        Note that this method is relatively slow.  For more extensive changes,
    +        use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw`
    +        module instead.
    +
    +        See:
    +
    +        * :py:meth:`~PIL.Image.Image.paste`
    +        * :py:meth:`~PIL.Image.Image.putdata`
    +        * :py:mod:`~PIL.ImageDraw`
    +
    +        :param xy: The pixel coordinate, given as (x, y).
    +        :param value: The pixel value.
    +        """
    +
    +        self.load()
    +        if self.readonly:
    +            self._copy()
    +            self.pyaccess = None
    +            self.load()
    +
    +        if self.pyaccess:
    +            return self.pyaccess.putpixel(xy, value)
    +        return self.im.putpixel(xy, value)
    +
    +    def resize(self, size, resample=NEAREST):
    +        """
    +        Returns a resized copy of this image.
    +
    +        :param size: The requested size in pixels, as a 2-tuple:
    +           (width, height).
    +        :param resample: An optional resampling filter.  This can be
    +           one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour),
    +           :py:attr:`PIL.Image.BILINEAR` (linear interpolation),
    +           :py:attr:`PIL.Image.BICUBIC` (cubic spline interpolation), or
    +           :py:attr:`PIL.Image.LANCZOS` (a high-quality downsampling filter).
    +           If omitted, or if the image has mode "1" or "P", it is
    +           set :py:attr:`PIL.Image.NEAREST`.
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +
    +        if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS):
    +            raise ValueError("unknown resampling filter")
    +
    +        self.load()
    +
    +        size = tuple(size)
    +        if self.size == size:
    +            return self._new(self.im)
    +
    +        if self.mode in ("1", "P"):
    +            resample = NEAREST
    +
    +        if self.mode == 'LA':
    +            return self.convert('La').resize(size, resample).convert('LA')
    +
    +        if self.mode == 'RGBA':
    +            return self.convert('RGBa').resize(size, resample).convert('RGBA')
    +
    +        return self._new(self.im.resize(size, resample))
    +
    +    def rotate(self, angle, resample=NEAREST, expand=0):
    +        """
    +        Returns a rotated copy of this image.  This method returns a
    +        copy of this image, rotated the given number of degrees counter
    +        clockwise around its centre.
    +
    +        :param angle: In degrees counter clockwise.
    +        :param resample: An optional resampling filter.  This can be
    +           one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour),
    +           :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2
    +           environment), or :py:attr:`PIL.Image.BICUBIC`
    +           (cubic spline interpolation in a 4x4 environment).
    +           If omitted, or if the image has mode "1" or "P", it is
    +           set :py:attr:`PIL.Image.NEAREST`.
    +        :param expand: Optional expansion flag.  If true, expands the output
    +           image to make it large enough to hold the entire rotated image.
    +           If false or omitted, make the output image the same size as the
    +           input image.
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +
    +        angle = angle % 360.0
    +
    +        # Fast paths regardless of filter
    +        if angle == 0:
    +            return self._new(self.im)
    +        if angle == 180:
    +            return self.transpose(ROTATE_180)
    +        if angle == 90 and expand:
    +            return self.transpose(ROTATE_90)
    +        if angle == 270 and expand:
    +            return self.transpose(ROTATE_270)
    +
    +        angle = - math.radians(angle)
    +        matrix = [
    +            round(math.cos(angle), 15), round(math.sin(angle), 15), 0.0,
    +            round(-math.sin(angle), 15), round(math.cos(angle), 15), 0.0
    +            ]
    +
    +        def transform(x, y, matrix=matrix):
    +            (a, b, c, d, e, f) = matrix
    +            return a*x + b*y + c, d*x + e*y + f
    +
    +        w, h = self.size
    +        if expand:
    +            # calculate output size
    +            xx = []
    +            yy = []
    +            for x, y in ((0, 0), (w, 0), (w, h), (0, h)):
    +                x, y = transform(x, y)
    +                xx.append(x)
    +                yy.append(y)
    +            w = int(math.ceil(max(xx)) - math.floor(min(xx)))
    +            h = int(math.ceil(max(yy)) - math.floor(min(yy)))
    +
    +        # adjust center
    +        x, y = transform(w / 2.0, h / 2.0)
    +        matrix[2] = self.size[0] / 2.0 - x
    +        matrix[5] = self.size[1] / 2.0 - y
    +
    +        return self.transform((w, h), AFFINE, matrix, resample)
    +
    +    def save(self, fp, format=None, **params):
    +        """
    +        Saves this image under the given filename.  If no format is
    +        specified, the format to use is determined from the filename
    +        extension, if possible.
    +
    +        Keyword options can be used to provide additional instructions
    +        to the writer. If a writer doesn't recognise an option, it is
    +        silently ignored. The available options are described in the
    +        :doc:`image format documentation
    +        <../handbook/image-file-formats>` for each writer.
    +
    +        You can use a file object instead of a filename. In this case,
    +        you must always specify the format. The file object must
    +        implement the ``seek``, ``tell``, and ``write``
    +        methods, and be opened in binary mode.
    +
    +        :param fp: A filename (string), pathlib.Path object or file object.
    +        :param format: Optional format override.  If omitted, the
    +           format to use is determined from the filename extension.
    +           If a file object was used instead of a filename, this
    +           parameter should always be used.
    +        :param options: Extra parameters to the image writer.
    +        :returns: None
    +        :exception KeyError: If the output format could not be determined
    +           from the file name.  Use the format option to solve this.
    +        :exception IOError: If the file could not be written.  The file
    +           may have been created, and may contain partial data.
    +        """
    +
    +        filename = ""
    +        open_fp = False
    +        if isPath(fp):
    +            filename = fp
    +            open_fp = True
    +        elif sys.version_info >= (3, 4):
    +            from pathlib import Path
    +            if isinstance(fp, Path):
    +                filename = str(fp)
    +                open_fp = True
    +        elif hasattr(fp, "name") and isPath(fp.name):
    +            # only set the name for metadata purposes
    +            filename = fp.name
    +
    +        # may mutate self!
    +        self.load()
    +
    +        save_all = False
    +        if 'save_all' in params:
    +            save_all = params.pop('save_all')
    +        self.encoderinfo = params
    +        self.encoderconfig = ()
    +
    +        preinit()
    +
    +        ext = os.path.splitext(filename)[1].lower()
    +
    +        if not format:
    +            if ext not in EXTENSION:
    +                init()
    +            format = EXTENSION[ext]
    +
    +        if format.upper() not in SAVE:
    +            init()
    +        if save_all:
    +            save_handler = SAVE_ALL[format.upper()]
    +        else:
    +            save_handler = SAVE[format.upper()]
    +
    +        if open_fp:
    +            fp = builtins.open(filename, "wb")
    +
    +        try:
    +            save_handler(self, fp, filename)
    +        finally:
    +            # do what we can to clean up
    +            if open_fp:
    +                fp.close()
    +
    +    def seek(self, frame):
    +        """
    +        Seeks to the given frame in this sequence file. If you seek
    +        beyond the end of the sequence, the method raises an
    +        **EOFError** exception. When a sequence file is opened, the
    +        library automatically seeks to frame 0.
    +
    +        Note that in the current version of the library, most sequence
    +        formats only allows you to seek to the next frame.
    +
    +        See :py:meth:`~PIL.Image.Image.tell`.
    +
    +        :param frame: Frame number, starting at 0.
    +        :exception EOFError: If the call attempts to seek beyond the end
    +            of the sequence.
    +        """
    +
    +        # overridden by file handlers
    +        if frame != 0:
    +            raise EOFError
    +
    +    def show(self, title=None, command=None):
    +        """
    +        Displays this image. This method is mainly intended for
    +        debugging purposes.
    +
    +        On Unix platforms, this method saves the image to a temporary
    +        PPM file, and calls either the **xv** utility or the **display**
    +        utility, depending on which one can be found.
    +
    +        On OS X, this method saves the image to a temporary BMP file, and opens
    +        it with the native Preview application.
    +
    +        On Windows, it saves the image to a temporary BMP file, and uses
    +        the standard BMP display utility to show it (usually Paint).
    +
    +        :param title: Optional title to use for the image window,
    +           where possible.
    +        :param command: command used to show the image
    +        """
    +
    +        _show(self, title=title, command=command)
    +
    +    def split(self):
    +        """
    +        Split this image into individual bands. This method returns a
    +        tuple of individual image bands from an image. For example,
    +        splitting an "RGB" image creates three new images each
    +        containing a copy of one of the original bands (red, green,
    +        blue).
    +
    +        :returns: A tuple containing bands.
    +        """
    +
    +        self.load()
    +        if self.im.bands == 1:
    +            ims = [self.copy()]
    +        else:
    +            ims = []
    +            for i in range(self.im.bands):
    +                ims.append(self._new(self.im.getband(i)))
    +        return tuple(ims)
    +
    +    def tell(self):
    +        """
    +        Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`.
    +
    +        :returns: Frame number, starting with 0.
    +        """
    +        return 0
    +
    +    def thumbnail(self, size, resample=BICUBIC):
    +        """
    +        Make this image into a thumbnail.  This method modifies the
    +        image to contain a thumbnail version of itself, no larger than
    +        the given size.  This method calculates an appropriate thumbnail
    +        size to preserve the aspect of the image, calls the
    +        :py:meth:`~PIL.Image.Image.draft` method to configure the file reader
    +        (where applicable), and finally resizes the image.
    +
    +        Note that this function modifies the :py:class:`~PIL.Image.Image`
    +        object in place.  If you need to use the full resolution image as well,
    +        apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original
    +        image.
    +
    +        :param size: Requested size.
    +        :param resample: Optional resampling filter.  This can be one
    +           of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`,
    +           :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`.
    +           If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`.
    +           (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0)
    +        :returns: None
    +        """
    +
    +        # preserve aspect ratio
    +        x, y = self.size
    +        if x > size[0]:
    +            y = int(max(y * size[0] / x, 1))
    +            x = int(size[0])
    +        if y > size[1]:
    +            x = int(max(x * size[1] / y, 1))
    +            y = int(size[1])
    +        size = x, y
    +
    +        if size == self.size:
    +            return
    +
    +        self.draft(None, size)
    +
    +        im = self.resize(size, resample)
    +
    +        self.im = im.im
    +        self.mode = im.mode
    +        self.size = size
    +
    +        self.readonly = 0
    +        self.pyaccess = None
    +
    +    # FIXME: the different transform methods need further explanation
    +    # instead of bloating the method docs, add a separate chapter.
    +    def transform(self, size, method, data=None, resample=NEAREST, fill=1):
    +        """
    +        Transforms this image.  This method creates a new image with the
    +        given size, and the same mode as the original, and copies data
    +        to the new image using the given transform.
    +
    +        :param size: The output size.
    +        :param method: The transformation method.  This is one of
    +          :py:attr:`PIL.Image.EXTENT` (cut out a rectangular subregion),
    +          :py:attr:`PIL.Image.AFFINE` (affine transform),
    +          :py:attr:`PIL.Image.PERSPECTIVE` (perspective transform),
    +          :py:attr:`PIL.Image.QUAD` (map a quadrilateral to a rectangle), or
    +          :py:attr:`PIL.Image.MESH` (map a number of source quadrilaterals
    +          in one operation).
    +        :param data: Extra data to the transformation method.
    +        :param resample: Optional resampling filter.  It can be one of
    +           :py:attr:`PIL.Image.NEAREST` (use nearest neighbour),
    +           :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2
    +           environment), or :py:attr:`PIL.Image.BICUBIC` (cubic spline
    +           interpolation in a 4x4 environment). If omitted, or if the image
    +           has mode "1" or "P", it is set to :py:attr:`PIL.Image.NEAREST`.
    +        :returns: An :py:class:`~PIL.Image.Image` object.
    +        """
    +
    +        if self.mode == 'LA':
    +            return self.convert('La').transform(
    +                size, method, data, resample, fill).convert('LA')
    +
    +        if self.mode == 'RGBA':
    +            return self.convert('RGBa').transform(
    +                size, method, data, resample, fill).convert('RGBA')
    +
    +        if isinstance(method, ImageTransformHandler):
    +            return method.transform(size, self, resample=resample, fill=fill)
    +
    +        if hasattr(method, "getdata"):
    +            # compatibility w. old-style transform objects
    +            method, data = method.getdata()
    +
    +        if data is None:
    +            raise ValueError("missing method data")
    +
    +        im = new(self.mode, size, None)
    +        if method == MESH:
    +            # list of quads
    +            for box, quad in data:
    +                im.__transformer(box, self, QUAD, quad, resample, fill)
    +        else:
    +            im.__transformer((0, 0)+size, self, method, data, resample, fill)
    +
    +        return im
    +
    +    def __transformer(self, box, image, method, data,
    +                      resample=NEAREST, fill=1):
    +        w = box[2] - box[0]
    +        h = box[3] - box[1]
    +
    +        if method == AFFINE:
    +            data = data[0:6]
    +
    +        elif method == EXTENT:
    +            # convert extent to an affine transform
    +            x0, y0, x1, y1 = data
    +            xs = float(x1 - x0) / w
    +            ys = float(y1 - y0) / h
    +            method = AFFINE
    +            data = (xs, 0, x0 + xs/2, 0, ys, y0 + ys/2)
    +
    +        elif method == PERSPECTIVE:
    +            data = data[0:8]
    +
    +        elif method == QUAD:
    +            # quadrilateral warp.  data specifies the four corners
    +            # given as NW, SW, SE, and NE.
    +            nw = data[0:2]
    +            sw = data[2:4]
    +            se = data[4:6]
    +            ne = data[6:8]
    +            x0, y0 = nw
    +            As = 1.0 / w
    +            At = 1.0 / h
    +            data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At,
    +                    (se[0]-sw[0]-ne[0]+x0)*As*At,
    +                    y0, (ne[1]-y0)*As, (sw[1]-y0)*At,
    +                    (se[1]-sw[1]-ne[1]+y0)*As*At)
    +
    +        else:
    +            raise ValueError("unknown transformation method")
    +
    +        if resample not in (NEAREST, BILINEAR, BICUBIC):
    +            raise ValueError("unknown resampling filter")
    +
    +        image.load()
    +
    +        self.load()
    +
    +        if image.mode in ("1", "P"):
    +            resample = NEAREST
    +
    +        self.im.transform2(box, image.im, method, data, resample, fill)
    +
    +    def transpose(self, method):
    +        """
    +        Transpose image (flip or rotate in 90 degree steps)
    +
    +        :param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`,
    +          :py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`,
    +          :py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270` or
    +          :py:attr:`PIL.Image.TRANSPOSE`.
    +        :returns: Returns a flipped or rotated copy of this image.
    +        """
    +
    +        self.load()
    +        return self._new(self.im.transpose(method))
    +
    +    def effect_spread(self, distance):
    +        """
    +        Randomly spread pixels in an image.
    +
    +        :param distance: Distance to spread pixels.
    +        """
    +        self.load()
    +        return self._new(self.im.effect_spread(distance))
    +
    +    def toqimage(self):
    +        """Returns a QImage copy of this image"""
    +        from PIL import ImageQt
    +        if not ImageQt.qt_is_installed:
    +            raise ImportError("Qt bindings are not installed")
    +        return ImageQt.toqimage(self)
    +
    +    def toqpixmap(self):
    +        """Returns a QPixmap copy of this image"""
    +        from PIL import ImageQt
    +        if not ImageQt.qt_is_installed:
    +            raise ImportError("Qt bindings are not installed")
    +        return ImageQt.toqpixmap(self)
    +
    +
    +# --------------------------------------------------------------------
    +# Lazy operations
    +
    +class _ImageCrop(Image):
    +
    +    def __init__(self, im, box):
    +
    +        Image.__init__(self)
    +
    +        # Round to nearest integer, runs int(round(x)) when unpacking
    +        x0, y0, x1, y1 = map(int, map(round, box))
    +
    +        if x1 < x0:
    +            x1 = x0
    +        if y1 < y0:
    +            y1 = y0
    +
    +        self.mode = im.mode
    +        self.size = x1-x0, y1-y0
    +
    +        self.__crop = x0, y0, x1, y1
    +
    +        self.im = im.im
    +
    +    def load(self):
    +
    +        # lazy evaluation!
    +        if self.__crop:
    +            self.im = self.im.crop(self.__crop)
    +            self.__crop = None
    +
    +        if self.im:
    +            return self.im.pixel_access(self.readonly)
    +
    +        # FIXME: future versions should optimize crop/paste
    +        # sequences!
    +
    +
    +# --------------------------------------------------------------------
    +# Abstract handlers.
    +
    +class ImagePointHandler(object):
    +    # used as a mixin by point transforms (for use with im.point)
    +    pass
    +
    +
    +class ImageTransformHandler(object):
    +    # used as a mixin by geometry transforms (for use with im.transform)
    +    pass
    +
    +
    +# --------------------------------------------------------------------
    +# Factories
    +
    +#
    +# Debugging
    +
    +def _wedge():
    +    "Create greyscale wedge (for debugging only)"
    +
    +    return Image()._new(core.wedge("L"))
    +
    +
    +def new(mode, size, color=0):
    +    """
    +    Creates a new image with the given mode and size.
    +
    +    :param mode: The mode to use for the new image. See:
    +       :ref:`concept-modes`.
    +    :param size: A 2-tuple, containing (width, height) in pixels.
    +    :param color: What color to use for the image.  Default is black.
    +       If given, this should be a single integer or floating point value
    +       for single-band modes, and a tuple for multi-band modes (one value
    +       per band).  When creating RGB images, you can also use color
    +       strings as supported by the ImageColor module.  If the color is
    +       None, the image is not initialised.
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    """
    +
    +    if color is None:
    +        # don't initialize
    +        return Image()._new(core.new(mode, size))
    +
    +    if isStringType(color):
    +        # css3-style specifier
    +
    +        from PIL import ImageColor
    +        color = ImageColor.getcolor(color, mode)
    +
    +    return Image()._new(core.fill(mode, size, color))
    +
    +
    +def frombytes(mode, size, data, decoder_name="raw", *args):
    +    """
    +    Creates a copy of an image memory from pixel data in a buffer.
    +
    +    In its simplest form, this function takes three arguments
    +    (mode, size, and unpacked pixel data).
    +
    +    You can also use any pixel decoder supported by PIL.  For more
    +    information on available decoders, see the section
    +    :ref:`Writing Your Own File Decoder `.
    +
    +    Note that this function decodes pixel data only, not entire images.
    +    If you have an entire image in a string, wrap it in a
    +    :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load
    +    it.
    +
    +    :param mode: The image mode. See: :ref:`concept-modes`.
    +    :param size: The image size.
    +    :param data: A byte buffer containing raw data for the given mode.
    +    :param decoder_name: What decoder to use.
    +    :param args: Additional parameters for the given decoder.
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    """
    +
    +    # may pass tuple instead of argument list
    +    if len(args) == 1 and isinstance(args[0], tuple):
    +        args = args[0]
    +
    +    if decoder_name == "raw" and args == ():
    +        args = mode
    +
    +    im = new(mode, size)
    +    im.frombytes(data, decoder_name, args)
    +    return im
    +
    +
    +def fromstring(*args, **kw):
    +    raise NotImplementedError("fromstring() has been removed. " +
    +                    "Please call frombytes() instead.")
    +
    +
    +def frombuffer(mode, size, data, decoder_name="raw", *args):
    +    """
    +    Creates an image memory referencing pixel data in a byte buffer.
    +
    +    This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data
    +    in the byte buffer, where possible.  This means that changes to the
    +    original buffer object are reflected in this image).  Not all modes can
    +    share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK".
    +
    +    Note that this function decodes pixel data only, not entire images.
    +    If you have an entire image file in a string, wrap it in a
    +    **BytesIO** object, and use :py:func:`~PIL.Image.open` to load it.
    +
    +    In the current version, the default parameters used for the "raw" decoder
    +    differs from that used for :py:func:`~PIL.Image.frombytes`.  This is a
    +    bug, and will probably be fixed in a future release.  The current release
    +    issues a warning if you do this; to disable the warning, you should provide
    +    the full set of parameters.  See below for details.
    +
    +    :param mode: The image mode. See: :ref:`concept-modes`.
    +    :param size: The image size.
    +    :param data: A bytes or other buffer object containing raw
    +        data for the given mode.
    +    :param decoder_name: What decoder to use.
    +    :param args: Additional parameters for the given decoder.  For the
    +        default encoder ("raw"), it's recommended that you provide the
    +        full set of parameters::
    +
    +            frombuffer(mode, size, data, "raw", mode, 0, 1)
    +
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +
    +    .. versionadded:: 1.1.4
    +    """
    +
    +    # may pass tuple instead of argument list
    +    if len(args) == 1 and isinstance(args[0], tuple):
    +        args = args[0]
    +
    +    if decoder_name == "raw":
    +        if args == ():
    +            warnings.warn(
    +                "the frombuffer defaults may change in a future release; "
    +                "for portability, change the call to read:\n"
    +                "  frombuffer(mode, size, data, 'raw', mode, 0, 1)",
    +                RuntimeWarning, stacklevel=2
    +            )
    +            args = mode, 0, -1  # may change to (mode, 0, 1) post-1.1.6
    +        if args[0] in _MAPMODES:
    +            im = new(mode, (1, 1))
    +            im = im._new(
    +                core.map_buffer(data, size, decoder_name, None, 0, args)
    +                )
    +            im.readonly = 1
    +            return im
    +
    +    return frombytes(mode, size, data, decoder_name, args)
    +
    +
    +def fromarray(obj, mode=None):
    +    """
    +    Creates an image memory from an object exporting the array interface
    +    (using the buffer protocol).
    +
    +    If obj is not contiguous, then the tobytes method is called
    +    and :py:func:`~PIL.Image.frombuffer` is used.
    +
    +    :param obj: Object with array interface
    +    :param mode: Mode to use (will be determined from type if None)
    +      See: :ref:`concept-modes`.
    +    :returns: An image object.
    +
    +    .. versionadded:: 1.1.6
    +    """
    +    arr = obj.__array_interface__
    +    shape = arr['shape']
    +    ndim = len(shape)
    +    try:
    +        strides = arr['strides']
    +    except KeyError:
    +        strides = None
    +    if mode is None:
    +        try:
    +            typekey = (1, 1) + shape[2:], arr['typestr']
    +            mode, rawmode = _fromarray_typemap[typekey]
    +        except KeyError:
    +            # print typekey
    +            raise TypeError("Cannot handle this data type")
    +    else:
    +        rawmode = mode
    +    if mode in ["1", "L", "I", "P", "F"]:
    +        ndmax = 2
    +    elif mode == "RGB":
    +        ndmax = 3
    +    else:
    +        ndmax = 4
    +    if ndim > ndmax:
    +        raise ValueError("Too many dimensions: %d > %d." % (ndim, ndmax))
    +
    +    size = shape[1], shape[0]
    +    if strides is not None:
    +        if hasattr(obj, 'tobytes'):
    +            obj = obj.tobytes()
    +        else:
    +            obj = obj.tostring()
    +
    +    return frombuffer(mode, size, obj, "raw", rawmode, 0, 1)
    +
    +
    +def fromqimage(im):
    +    """Creates an image instance from a QImage image"""
    +    from PIL import ImageQt
    +    if not ImageQt.qt_is_installed:
    +        raise ImportError("Qt bindings are not installed")
    +    return ImageQt.fromqimage(im)
    +
    +
    +def fromqpixmap(im):
    +    """Creates an image instance from a QPixmap image"""
    +    from PIL import ImageQt
    +    if not ImageQt.qt_is_installed:
    +        raise ImportError("Qt bindings are not installed")
    +    return ImageQt.fromqpixmap(im)
    +
    +_fromarray_typemap = {
    +    # (shape, typestr) => mode, rawmode
    +    # first two members of shape are set to one
    +    # ((1, 1), "|b1"): ("1", "1"), # broken
    +    ((1, 1), "|u1"): ("L", "L"),
    +    ((1, 1), "|i1"): ("I", "I;8"),
    +    ((1, 1), "u2"): ("I", "I;16B"),
    +    ((1, 1), "i2"): ("I", "I;16BS"),
    +    ((1, 1), "u4"): ("I", "I;32B"),
    +    ((1, 1), "i4"): ("I", "I;32BS"),
    +    ((1, 1), "f4"): ("F", "F;32BF"),
    +    ((1, 1), "f8"): ("F", "F;64BF"),
    +    ((1, 1, 2), "|u1"): ("LA", "LA"),
    +    ((1, 1, 3), "|u1"): ("RGB", "RGB"),
    +    ((1, 1, 4), "|u1"): ("RGBA", "RGBA"),
    +    }
    +
    +# shortcuts
    +_fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I")
    +_fromarray_typemap[((1, 1), _ENDIAN + "f4")] = ("F", "F")
    +
    +
    +def _decompression_bomb_check(size):
    +    if MAX_IMAGE_PIXELS is None:
    +        return
    +
    +    pixels = size[0] * size[1]
    +
    +    if pixels > MAX_IMAGE_PIXELS:
    +        warnings.warn(
    +            "Image size (%d pixels) exceeds limit of %d pixels, "
    +            "could be decompression bomb DOS attack." %
    +            (pixels, MAX_IMAGE_PIXELS),
    +            DecompressionBombWarning)
    +
    +
    +def open(fp, mode="r"):
    +    """
    +    Opens and identifies the given image file.
    +
    +    This is a lazy operation; this function identifies the file, but
    +    the file remains open and the actual image data is not read from
    +    the file until you try to process the data (or call the
    +    :py:meth:`~PIL.Image.Image.load` method).  See
    +    :py:func:`~PIL.Image.new`.
    +
    +    :param fp: A filename (string), pathlib.Path object or a file object.
    +       The file object must implement :py:meth:`~file.read`,
    +       :py:meth:`~file.seek`, and :py:meth:`~file.tell` methods,
    +       and be opened in binary mode.
    +    :param mode: The mode.  If given, this argument must be "r".
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    :exception IOError: If the file cannot be found, or the image cannot be
    +       opened and identified.
    +    """
    +
    +    if mode != "r":
    +        raise ValueError("bad mode %r" % mode)
    +
    +    filename = ""
    +    if isPath(fp):
    +        filename = fp
    +    elif sys.version_info >= (3, 4):
    +        from pathlib import Path
    +        if isinstance(fp, Path):
    +            filename = str(fp.resolve())
    +    if filename:
    +        fp = builtins.open(filename, "rb")
    +
    +    try:
    +        fp.seek(0)
    +    except (AttributeError, io.UnsupportedOperation):
    +        fp = io.BytesIO(fp.read())
    +
    +    prefix = fp.read(16)
    +
    +    preinit()
    +
    +    def _open_core(fp, filename, prefix):
    +        for i in ID:
    +            try:
    +                factory, accept = OPEN[i]
    +                if not accept or accept(prefix):
    +                    fp.seek(0)
    +                    im = factory(fp, filename)
    +                    _decompression_bomb_check(im.size)
    +                    return im
    +            except (SyntaxError, IndexError, TypeError, struct.error):
    +                # Leave disabled by default, spams the logs with image
    +                # opening failures that are entirely expected.
    +                # logger.debug("", exc_info=True)
    +                continue
    +        return None
    +
    +    im = _open_core(fp, filename, prefix)
    +
    +    if im is None:
    +        if init():
    +            im = _open_core(fp, filename, prefix)
    +
    +    if im:
    +        return im
    +
    +    raise IOError("cannot identify image file %r"
    +                  % (filename if filename else fp))
    +
    +#
    +# Image processing.
    +
    +
    +def alpha_composite(im1, im2):
    +    """
    +    Alpha composite im2 over im1.
    +
    +    :param im1: The first image. Must have mode RGBA.
    +    :param im2: The second image.  Must have mode RGBA, and the same size as
    +       the first image.
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    """
    +
    +    im1.load()
    +    im2.load()
    +    return im1._new(core.alpha_composite(im1.im, im2.im))
    +
    +
    +def blend(im1, im2, alpha):
    +    """
    +    Creates a new image by interpolating between two input images, using
    +    a constant alpha.::
    +
    +        out = image1 * (1.0 - alpha) + image2 * alpha
    +
    +    :param im1: The first image.
    +    :param im2: The second image.  Must have the same mode and size as
    +       the first image.
    +    :param alpha: The interpolation alpha factor.  If alpha is 0.0, a
    +       copy of the first image is returned. If alpha is 1.0, a copy of
    +       the second image is returned. There are no restrictions on the
    +       alpha value. If necessary, the result is clipped to fit into
    +       the allowed output range.
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    """
    +
    +    im1.load()
    +    im2.load()
    +    return im1._new(core.blend(im1.im, im2.im, alpha))
    +
    +
    +def composite(image1, image2, mask):
    +    """
    +    Create composite image by blending images using a transparency mask.
    +
    +    :param image1: The first image.
    +    :param image2: The second image.  Must have the same mode and
    +       size as the first image.
    +    :param mask: A mask image.  This image can have mode
    +       "1", "L", or "RGBA", and must have the same size as the
    +       other two images.
    +    """
    +
    +    image = image2.copy()
    +    image.paste(image1, None, mask)
    +    return image
    +
    +
    +def eval(image, *args):
    +    """
    +    Applies the function (which should take one argument) to each pixel
    +    in the given image. If the image has more than one band, the same
    +    function is applied to each band. Note that the function is
    +    evaluated once for each possible pixel value, so you cannot use
    +    random components or other generators.
    +
    +    :param image: The input image.
    +    :param function: A function object, taking one integer argument.
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    """
    +
    +    return image.point(args[0])
    +
    +
    +def merge(mode, bands):
    +    """
    +    Merge a set of single band images into a new multiband image.
    +
    +    :param mode: The mode to use for the output image. See:
    +        :ref:`concept-modes`.
    +    :param bands: A sequence containing one single-band image for
    +        each band in the output image.  All bands must have the
    +        same size.
    +    :returns: An :py:class:`~PIL.Image.Image` object.
    +    """
    +
    +    if getmodebands(mode) != len(bands) or "*" in mode:
    +        raise ValueError("wrong number of bands")
    +    for im in bands[1:]:
    +        if im.mode != getmodetype(mode):
    +            raise ValueError("mode mismatch")
    +        if im.size != bands[0].size:
    +            raise ValueError("size mismatch")
    +    im = core.new(mode, bands[0].size)
    +    for i in range(getmodebands(mode)):
    +        bands[i].load()
    +        im.putband(bands[i].im, i)
    +    return bands[0]._new(im)
    +
    +
    +# --------------------------------------------------------------------
    +# Plugin registry
    +
    +def register_open(id, factory, accept=None):
    +    """
    +    Register an image file plugin.  This function should not be used
    +    in application code.
    +
    +    :param id: An image format identifier.
    +    :param factory: An image file factory method.
    +    :param accept: An optional function that can be used to quickly
    +       reject images having another format.
    +    """
    +    id = id.upper()
    +    ID.append(id)
    +    OPEN[id] = factory, accept
    +
    +
    +def register_mime(id, mimetype):
    +    """
    +    Registers an image MIME type.  This function should not be used
    +    in application code.
    +
    +    :param id: An image format identifier.
    +    :param mimetype: The image MIME type for this format.
    +    """
    +    MIME[id.upper()] = mimetype
    +
    +
    +def register_save(id, driver):
    +    """
    +    Registers an image save function.  This function should not be
    +    used in application code.
    +
    +    :param id: An image format identifier.
    +    :param driver: A function to save images in this format.
    +    """
    +    SAVE[id.upper()] = driver
    +
    +
    +def register_save_all(id, driver):
    +    """
    +    Registers an image function to save all the frames
    +    of a multiframe format.  This function should not be
    +    used in application code.
    +
    +    :param id: An image format identifier.
    +    :param driver: A function to save images in this format.
    +    """
    +    SAVE_ALL[id.upper()] = driver
    +
    +
    +def register_extension(id, extension):
    +    """
    +    Registers an image extension.  This function should not be
    +    used in application code.
    +
    +    :param id: An image format identifier.
    +    :param extension: An extension used for this format.
    +    """
    +    EXTENSION[extension.lower()] = id.upper()
    +
    +
    +# --------------------------------------------------------------------
    +# Simple display support.  User code may override this.
    +
    +def _show(image, **options):
    +    # override me, as necessary
    +    _showxv(image, **options)
    +
    +
    +def _showxv(image, title=None, **options):
    +    from PIL import ImageShow
    +    ImageShow.show(image, title, **options)
    +
    +
    +# --------------------------------------------------------------------
    +# Effects
    +
    +def effect_mandelbrot(size, extent, quality):
    +    """
    +    Generate a Mandelbrot set covering the given extent.
    +
    +    :param size: The requested size in pixels, as a 2-tuple:
    +       (width, height).
    +    :param extent: The extent to cover, as a 4-tuple:
    +       (x0, y0, x1, y2).
    +    :param quality: Quality.
    +    """
    +    return Image()._new(core.effect_mandelbrot(size, extent, quality))
    +
    +
    +def effect_noise(size, sigma):
    +    """
    +    Generate Gaussian noise centered around 128.
    +
    +    :param size: The requested size in pixels, as a 2-tuple:
    +       (width, height).
    +    :param sigma: Standard deviation of noise.
    +    """
    +    return Image()._new(core.effect_noise(size, sigma))
    +
    +# End of file
    diff --git a/server/www/packages/packages-linux/x64/PIL/ImageChops.py b/server/www/packages/packages-linux/x64/PIL/ImageChops.py
    new file mode 100644
    index 0000000..ba5350e
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ImageChops.py
    @@ -0,0 +1,283 @@
    +#
    +# The Python Imaging Library.
    +# $Id$
    +#
    +# standard channel operations
    +#
    +# History:
    +# 1996-03-24 fl   Created
    +# 1996-08-13 fl   Added logical operations (for "1" images)
    +# 2000-10-12 fl   Added offset method (from Image.py)
    +#
    +# Copyright (c) 1997-2000 by Secret Labs AB
    +# Copyright (c) 1996-2000 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image
    +
    +
    +def constant(image, value):
    +    """Fill a channel with a given grey level.
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    return Image.new("L", image.size, value)
    +
    +
    +def duplicate(image):
    +    """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`.
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    return image.copy()
    +
    +
    +def invert(image):
    +    """
    +    Invert an image (channel).
    +
    +    .. code-block:: python
    +
    +        out = MAX - image
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image.load()
    +    return image._new(image.im.chop_invert())
    +
    +
    +def lighter(image1, image2):
    +    """
    +    Compares the two images, pixel by pixel, and returns a new image containing
    +    the lighter values.
    +
    +    .. code-block:: python
    +
    +        out = max(image1, image2)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_lighter(image2.im))
    +
    +
    +def darker(image1, image2):
    +    """
    +    Compares the two images, pixel by pixel, and returns a new image
    +    containing the darker values.
    +
    +    .. code-block:: python
    +
    +        out = min(image1, image2)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_darker(image2.im))
    +
    +
    +def difference(image1, image2):
    +    """
    +    Returns the absolute value of the pixel-by-pixel difference between the two
    +    images.
    +
    +    .. code-block:: python
    +
    +        out = abs(image1 - image2)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_difference(image2.im))
    +
    +
    +def multiply(image1, image2):
    +    """
    +    Superimposes two images on top of each other.
    +
    +    If you multiply an image with a solid black image, the result is black. If
    +    you multiply with a solid white image, the image is unaffected.
    +
    +    .. code-block:: python
    +
    +        out = image1 * image2 / MAX
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_multiply(image2.im))
    +
    +
    +def screen(image1, image2):
    +    """
    +    Superimposes two inverted images on top of each other.
    +
    +    .. code-block:: python
    +
    +        out = MAX - ((MAX - image1) * (MAX - image2) / MAX)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_screen(image2.im))
    +
    +
    +def add(image1, image2, scale=1.0, offset=0):
    +    """
    +    Adds two images, dividing the result by scale and adding the
    +    offset. If omitted, scale defaults to 1.0, and offset to 0.0.
    +
    +    .. code-block:: python
    +
    +        out = ((image1 + image2) / scale + offset)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_add(image2.im, scale, offset))
    +
    +
    +def subtract(image1, image2, scale=1.0, offset=0):
    +    """
    +    Subtracts two images, dividing the result by scale and adding the
    +    offset. If omitted, scale defaults to 1.0, and offset to 0.0.
    +
    +    .. code-block:: python
    +
    +        out = ((image1 - image2) / scale + offset)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_subtract(image2.im, scale, offset))
    +
    +
    +def add_modulo(image1, image2):
    +    """Add two images, without clipping the result.
    +
    +    .. code-block:: python
    +
    +        out = ((image1 + image2) % MAX)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_add_modulo(image2.im))
    +
    +
    +def subtract_modulo(image1, image2):
    +    """Subtract two images, without clipping the result.
    +
    +    .. code-block:: python
    +
    +        out = ((image1 - image2) % MAX)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_subtract_modulo(image2.im))
    +
    +
    +def logical_and(image1, image2):
    +    """Logical AND between two images.
    +
    +    .. code-block:: python
    +
    +        out = ((image1 and image2) % MAX)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_and(image2.im))
    +
    +
    +def logical_or(image1, image2):
    +    """Logical OR between two images.
    +
    +    .. code-block:: python
    +
    +        out = ((image1 or image2) % MAX)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_or(image2.im))
    +
    +
    +def logical_xor(image1, image2):
    +    """Logical XOR between two images.
    +
    +    .. code-block:: python
    +
    +        out = ((bool(image1) != bool(image2)) % MAX)
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    image1.load()
    +    image2.load()
    +    return image1._new(image1.im.chop_xor(image2.im))
    +
    +
    +def blend(image1, image2, alpha):
    +    """Blend images using constant transparency weight. Alias for
    +    :py:meth:`PIL.Image.Image.blend`.
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    return Image.blend(image1, image2, alpha)
    +
    +
    +def composite(image1, image2, mask):
    +    """Create composite using transparency mask. Alias for
    +    :py:meth:`PIL.Image.Image.composite`.
    +
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    return Image.composite(image1, image2, mask)
    +
    +
    +def offset(image, xoffset, yoffset=None):
    +    """Returns a copy of the image where data has been offset by the given
    +    distances. Data wraps around the edges. If **yoffset** is omitted, it
    +    is assumed to be equal to **xoffset**.
    +
    +    :param xoffset: The horizontal distance.
    +    :param yoffset: The vertical distance.  If omitted, both
    +        distances are set to the same value.
    +    :rtype: :py:class:`~PIL.Image.Image`
    +    """
    +
    +    if yoffset is None:
    +        yoffset = xoffset
    +    image.load()
    +    return image._new(image.im.offset(xoffset, yoffset))
    diff --git a/server/www/packages/packages-linux/x64/PIL/ImageCms.py b/server/www/packages/packages-linux/x64/PIL/ImageCms.py
    new file mode 100644
    index 0000000..6d5801a
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ImageCms.py
    @@ -0,0 +1,973 @@
    +# The Python Imaging Library.
    +# $Id$
    +
    +# Optional color management support, based on Kevin Cazabon's PyCMS
    +# library.
    +
    +# History:
    +
    +# 2009-03-08 fl   Added to PIL.
    +
    +# Copyright (C) 2002-2003 Kevin Cazabon
    +# Copyright (c) 2009 by Fredrik Lundh
    +# Copyright (c) 2013 by Eric Soroos
    +
    +# See the README file for information on usage and redistribution.  See
    +# below for the original description.
    +
    +from __future__ import print_function
    +import sys
    +
    +from PIL import Image
    +try:
    +    from PIL import _imagingcms
    +except ImportError as ex:
    +    # Allow error import for doc purposes, but error out when accessing
    +    # anything in core.
    +    from _util import deferred_error
    +    _imagingcms = deferred_error(ex)
    +from PIL._util import isStringType
    +
    +DESCRIPTION = """
    +pyCMS
    +
    +    a Python / PIL interface to the littleCMS ICC Color Management System
    +    Copyright (C) 2002-2003 Kevin Cazabon
    +    kevin@cazabon.com
    +    http://www.cazabon.com
    +
    +    pyCMS home page:  http://www.cazabon.com/pyCMS
    +    littleCMS home page:  http://www.littlecms.com
    +    (littleCMS is Copyright (C) 1998-2001 Marti Maria)
    +
    +    Originally released under LGPL.  Graciously donated to PIL in
    +    March 2009, for distribution under the standard PIL license
    +
    +    The pyCMS.py module provides a "clean" interface between Python/PIL and
    +    pyCMSdll, taking care of some of the more complex handling of the direct
    +    pyCMSdll functions, as well as error-checking and making sure that all
    +    relevant data is kept together.
    +
    +    While it is possible to call pyCMSdll functions directly, it's not highly
    +    recommended.
    +
    +    Version History:
    +
    +        1.0.0 pil       Oct 2013 Port to LCMS 2.
    +
    +        0.1.0 pil mod   March 10, 2009
    +
    +                        Renamed display profile to proof profile. The proof
    +                        profile is the profile of the device that is being
    +                        simulated, not the profile of the device which is
    +                        actually used to display/print the final simulation
    +                        (that'd be the output profile) - also see LCMSAPI.txt
    +                        input colorspace -> using 'renderingIntent' -> proof
    +                        colorspace -> using 'proofRenderingIntent' -> output
    +                        colorspace
    +
    +                        Added LCMS FLAGS support.
    +                        Added FLAGS["SOFTPROOFING"] as default flag for
    +                        buildProofTransform (otherwise the proof profile/intent
    +                        would be ignored).
    +
    +        0.1.0 pil       March 2009 - added to PIL, as PIL.ImageCms
    +
    +        0.0.2 alpha     Jan 6, 2002
    +
    +                        Added try/except statements around type() checks of
    +                        potential CObjects... Python won't let you use type()
    +                        on them, and raises a TypeError (stupid, if you ask
    +                        me!)
    +
    +                        Added buildProofTransformFromOpenProfiles() function.
    +                        Additional fixes in DLL, see DLL code for details.
    +
    +        0.0.1 alpha     first public release, Dec. 26, 2002
    +
    +    Known to-do list with current version (of Python interface, not pyCMSdll):
    +
    +        none
    +
    +"""
    +
    +VERSION = "1.0.0 pil"
    +
    +# --------------------------------------------------------------------.
    +
    +core = _imagingcms
    +
    +#
    +# intent/direction values
    +
    +INTENT_PERCEPTUAL = 0
    +INTENT_RELATIVE_COLORIMETRIC = 1
    +INTENT_SATURATION = 2
    +INTENT_ABSOLUTE_COLORIMETRIC = 3
    +
    +DIRECTION_INPUT = 0
    +DIRECTION_OUTPUT = 1
    +DIRECTION_PROOF = 2
    +
    +#
    +# flags
    +
    +FLAGS = {
    +    "MATRIXINPUT": 1,
    +    "MATRIXOUTPUT": 2,
    +    "MATRIXONLY": (1 | 2),
    +    "NOWHITEONWHITEFIXUP": 4,  # Don't hot fix scum dot
    +    # Don't create prelinearization tables on precalculated transforms
    +    # (internal use):
    +    "NOPRELINEARIZATION": 16,
    +    "GUESSDEVICECLASS": 32,  # Guess device class (for transform2devicelink)
    +    "NOTCACHE": 64,  # Inhibit 1-pixel cache
    +    "NOTPRECALC": 256,
    +    "NULLTRANSFORM": 512,  # Don't transform anyway
    +    "HIGHRESPRECALC": 1024,  # Use more memory to give better accuracy
    +    "LOWRESPRECALC": 2048,  # Use less memory to minimize resources
    +    "WHITEBLACKCOMPENSATION": 8192,
    +    "BLACKPOINTCOMPENSATION": 8192,
    +    "GAMUTCHECK": 4096,  # Out of Gamut alarm
    +    "SOFTPROOFING": 16384,  # Do softproofing
    +    "PRESERVEBLACK": 32768,  # Black preservation
    +    "NODEFAULTRESOURCEDEF": 16777216,  # CRD special
    +    "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16  # Gridpoints
    +}
    +
    +_MAX_FLAG = 0
    +for flag in FLAGS.values():
    +    if isinstance(flag, int):
    +        _MAX_FLAG = _MAX_FLAG | flag
    +
    +
    +# --------------------------------------------------------------------.
    +# Experimental PIL-level API
    +# --------------------------------------------------------------------.
    +
    +##
    +# Profile.
    +
    +class ImageCmsProfile(object):
    +
    +    def __init__(self, profile):
    +        """
    +        :param profile: Either a string representing a filename,
    +            a file like object containing a profile or a
    +            low-level profile object
    +
    +        """
    +
    +        if isStringType(profile):
    +            self._set(core.profile_open(profile), profile)
    +        elif hasattr(profile, "read"):
    +            self._set(core.profile_frombytes(profile.read()))
    +        else:
    +            self._set(profile)  # assume it's already a profile
    +
    +    def _set(self, profile, filename=None):
    +        self.profile = profile
    +        self.filename = filename
    +        if profile:
    +            self.product_name = None  # profile.product_name
    +            self.product_info = None  # profile.product_info
    +        else:
    +            self.product_name = None
    +            self.product_info = None
    +
    +    def tobytes(self):
    +        """
    +        Returns the profile in a format suitable for embedding in
    +        saved images.
    +
    +        :returns: a bytes object containing the ICC profile.
    +        """
    +
    +        return core.profile_tobytes(self.profile)
    +
    +
    +class ImageCmsTransform(Image.ImagePointHandler):
    +
    +    """
    +    Transform.  This can be used with the procedural API, or with the standard
    +    Image.point() method.
    +
    +    Will return the output profile in the output.info['icc_profile'].
    +    """
    +
    +    def __init__(self, input, output, input_mode, output_mode,
    +                 intent=INTENT_PERCEPTUAL, proof=None,
    +                 proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0):
    +        if proof is None:
    +            self.transform = core.buildTransform(
    +                input.profile, output.profile,
    +                input_mode, output_mode,
    +                intent,
    +                flags
    +            )
    +        else:
    +            self.transform = core.buildProofTransform(
    +                input.profile, output.profile, proof.profile,
    +                input_mode, output_mode,
    +                intent, proof_intent,
    +                flags
    +            )
    +        # Note: inputMode and outputMode are for pyCMS compatibility only
    +        self.input_mode = self.inputMode = input_mode
    +        self.output_mode = self.outputMode = output_mode
    +
    +        self.output_profile = output
    +
    +    def point(self, im):
    +        return self.apply(im)
    +
    +    def apply(self, im, imOut=None):
    +        im.load()
    +        if imOut is None:
    +            imOut = Image.new(self.output_mode, im.size, None)
    +        self.transform.apply(im.im.id, imOut.im.id)
    +        imOut.info['icc_profile'] = self.output_profile.tobytes()
    +        return imOut
    +
    +    def apply_in_place(self, im):
    +        im.load()
    +        if im.mode != self.output_mode:
    +            raise ValueError("mode mismatch")  # wrong output mode
    +        self.transform.apply(im.im.id, im.im.id)
    +        im.info['icc_profile'] = self.output_profile.tobytes()
    +        return im
    +
    +
    +def get_display_profile(handle=None):
    +    """ (experimental) Fetches the profile for the current display device.
    +    :returns: None if the profile is not known.
    +    """
    +
    +    if sys.platform == "win32":
    +        from PIL import ImageWin
    +        if isinstance(handle, ImageWin.HDC):
    +            profile = core.get_display_profile_win32(handle, 1)
    +        else:
    +            profile = core.get_display_profile_win32(handle or 0)
    +    else:
    +        try:
    +            get = _imagingcms.get_display_profile
    +        except AttributeError:
    +            return None
    +        else:
    +            profile = get()
    +    return ImageCmsProfile(profile)
    +
    +
    +# --------------------------------------------------------------------.
    +# pyCMS compatible layer
    +# --------------------------------------------------------------------.
    +
    +class PyCMSError(Exception):
    +
    +    """ (pyCMS) Exception class.
    +    This is used for all errors in the pyCMS API. """
    +    pass
    +
    +
    +def profileToProfile(
    +        im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL,
    +        outputMode=None, inPlace=0, flags=0):
    +    """
    +    (pyCMS) Applies an ICC transformation to a given image, mapping from
    +    inputProfile to outputProfile.
    +
    +    If the input or output profiles specified are not valid filenames, a
    +    PyCMSError will be raised.  If inPlace == TRUE and outputMode != im.mode,
    +    a PyCMSError will be raised.  If an error occurs during application of
    +    the profiles, a PyCMSError will be raised.  If outputMode is not a mode
    +    supported by the outputProfile (or by pyCMS), a PyCMSError will be
    +    raised.
    +
    +    This function applies an ICC transformation to im from inputProfile's
    +    color space to outputProfile's color space using the specified rendering
    +    intent to decide how to handle out-of-gamut colors.
    +
    +    OutputMode can be used to specify that a color mode conversion is to
    +    be done using these profiles, but the specified profiles must be able
    +    to handle that mode.  I.e., if converting im from RGB to CMYK using
    +    profiles, the input profile must handle RGB data, and the output
    +    profile must handle CMYK data.
    +
    +    :param im: An open PIL image object (i.e. Image.new(...) or
    +        Image.open(...), etc.)
    +    :param inputProfile: String, as a valid filename path to the ICC input
    +        profile you wish to use for this image, or a profile object
    +    :param outputProfile: String, as a valid filename path to the ICC output
    +        profile you wish to use for this image, or a profile object
    +    :param renderingIntent: Integer (0-3) specifying the rendering intent you
    +        wish to use for the transform
    +
    +            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    +            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    +            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    +            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +
    +        see the pyCMS documentation for details on rendering intents and what
    +        they do.
    +    :param outputMode: A valid PIL mode for the output image (i.e. "RGB",
    +        "CMYK", etc.).  Note: if rendering the image "inPlace", outputMode
    +        MUST be the same mode as the input, or omitted completely.  If
    +        omitted, the outputMode will be the same as the mode of the input
    +        image (im.mode)
    +    :param inPlace: Boolean (1 = True, None or 0 = False).  If True, the
    +        original image is modified in-place, and None is returned.  If False
    +        (default), a new Image object is returned with the transform applied.
    +    :param flags: Integer (0-...) specifying additional flags
    +    :returns: Either None or a new PIL image object, depending on value of
    +        inPlace
    +    :exception PyCMSError:
    +    """
    +
    +    if outputMode is None:
    +        outputMode = im.mode
    +
    +    if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
    +        raise PyCMSError("renderingIntent must be an integer between 0 and 3")
    +
    +    if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
    +        raise PyCMSError(
    +            "flags must be an integer between 0 and %s" + _MAX_FLAG)
    +
    +    try:
    +        if not isinstance(inputProfile, ImageCmsProfile):
    +            inputProfile = ImageCmsProfile(inputProfile)
    +        if not isinstance(outputProfile, ImageCmsProfile):
    +            outputProfile = ImageCmsProfile(outputProfile)
    +        transform = ImageCmsTransform(
    +            inputProfile, outputProfile, im.mode, outputMode,
    +            renderingIntent, flags=flags
    +        )
    +        if inPlace:
    +            transform.apply_in_place(im)
    +            imOut = None
    +        else:
    +            imOut = transform.apply(im)
    +    except (IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +    return imOut
    +
    +
    +def getOpenProfile(profileFilename):
    +    """
    +    (pyCMS) Opens an ICC profile file.
    +
    +    The PyCMSProfile object can be passed back into pyCMS for use in creating
    +    transforms and such (as in ImageCms.buildTransformFromOpenProfiles()).
    +
    +    If profileFilename is not a vaild filename for an ICC profile, a PyCMSError
    +    will be raised.
    +
    +    :param profileFilename: String, as a valid filename path to the ICC profile
    +        you wish to open, or a file-like object.
    +    :returns: A CmsProfile class object.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        return ImageCmsProfile(profileFilename)
    +    except (IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def buildTransform(
    +        inputProfile, outputProfile, inMode, outMode,
    +        renderingIntent=INTENT_PERCEPTUAL, flags=0):
    +    """
    +    (pyCMS) Builds an ICC transform mapping from the inputProfile to the
    +    outputProfile.  Use applyTransform to apply the transform to a given
    +    image.
    +
    +    If the input or output profiles specified are not valid filenames, a
    +    PyCMSError will be raised.  If an error occurs during creation of the
    +    transform, a PyCMSError will be raised.
    +
    +    If inMode or outMode are not a mode supported by the outputProfile (or
    +    by pyCMS), a PyCMSError will be raised.
    +
    +    This function builds and returns an ICC transform from the inputProfile
    +    to the outputProfile using the renderingIntent to determine what to do
    +    with out-of-gamut colors.  It will ONLY work for converting images that
    +    are in inMode to images that are in outMode color format (PIL mode,
    +    i.e. "RGB", "RGBA", "CMYK", etc.).
    +
    +    Building the transform is a fair part of the overhead in
    +    ImageCms.profileToProfile(), so if you're planning on converting multiple
    +    images using the same input/output settings, this can save you time.
    +    Once you have a transform object, it can be used with
    +    ImageCms.applyProfile() to convert images without the need to re-compute
    +    the lookup table for the transform.
    +
    +    The reason pyCMS returns a class object rather than a handle directly
    +    to the transform is that it needs to keep track of the PIL input/output
    +    modes that the transform is meant for.  These attributes are stored in
    +    the "inMode" and "outMode" attributes of the object (which can be
    +    manually overridden if you really want to, but I don't know of any
    +    time that would be of use, or would even work).
    +
    +    :param inputProfile: String, as a valid filename path to the ICC input
    +        profile you wish to use for this transform, or a profile object
    +    :param outputProfile: String, as a valid filename path to the ICC output
    +        profile you wish to use for this transform, or a profile object
    +    :param inMode: String, as a valid PIL mode that the appropriate profile
    +        also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
    +    :param outMode: String, as a valid PIL mode that the appropriate profile
    +        also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
    +    :param renderingIntent: Integer (0-3) specifying the rendering intent you
    +        wish to use for the transform
    +
    +            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    +            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    +            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    +            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +
    +        see the pyCMS documentation for details on rendering intents and what
    +        they do.
    +    :param flags: Integer (0-...) specifying additional flags
    +    :returns: A CmsTransform class object.
    +    :exception PyCMSError:
    +    """
    +
    +    if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
    +        raise PyCMSError("renderingIntent must be an integer between 0 and 3")
    +
    +    if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
    +        raise PyCMSError(
    +            "flags must be an integer between 0 and %s" + _MAX_FLAG)
    +
    +    try:
    +        if not isinstance(inputProfile, ImageCmsProfile):
    +            inputProfile = ImageCmsProfile(inputProfile)
    +        if not isinstance(outputProfile, ImageCmsProfile):
    +            outputProfile = ImageCmsProfile(outputProfile)
    +        return ImageCmsTransform(
    +            inputProfile, outputProfile, inMode, outMode,
    +            renderingIntent, flags=flags)
    +    except (IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def buildProofTransform(
    +        inputProfile, outputProfile, proofProfile, inMode, outMode,
    +        renderingIntent=INTENT_PERCEPTUAL,
    +        proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC,
    +        flags=FLAGS["SOFTPROOFING"]):
    +    """
    +    (pyCMS) Builds an ICC transform mapping from the inputProfile to the
    +    outputProfile, but tries to simulate the result that would be
    +    obtained on the proofProfile device.
    +
    +    If the input, output, or proof profiles specified are not valid
    +    filenames, a PyCMSError will be raised.
    +
    +    If an error occurs during creation of the transform, a PyCMSError will
    +    be raised.
    +
    +    If inMode or outMode are not a mode supported by the outputProfile
    +    (or by pyCMS), a PyCMSError will be raised.
    +
    +    This function builds and returns an ICC transform from the inputProfile
    +    to the outputProfile, but tries to simulate the result that would be
    +    obtained on the proofProfile device using renderingIntent and
    +    proofRenderingIntent to determine what to do with out-of-gamut
    +    colors.  This is known as "soft-proofing".  It will ONLY work for
    +    converting images that are in inMode to images that are in outMode
    +    color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.).
    +
    +    Usage of the resulting transform object is exactly the same as with
    +    ImageCms.buildTransform().
    +
    +    Proof profiling is generally used when using an output device to get a
    +    good idea of what the final printed/displayed image would look like on
    +    the proofProfile device when it's quicker and easier to use the
    +    output device for judging color.  Generally, this means that the
    +    output device is a monitor, or a dye-sub printer (etc.), and the simulated
    +    device is something more expensive, complicated, or time consuming
    +    (making it difficult to make a real print for color judgement purposes).
    +
    +    Soft-proofing basically functions by adjusting the colors on the
    +    output device to match the colors of the device being simulated. However,
    +    when the simulated device has a much wider gamut than the output
    +    device, you may obtain marginal results.
    +
    +    :param inputProfile: String, as a valid filename path to the ICC input
    +        profile you wish to use for this transform, or a profile object
    +    :param outputProfile: String, as a valid filename path to the ICC output
    +        (monitor, usually) profile you wish to use for this transform, or a
    +        profile object
    +    :param proofProfile: String, as a valid filename path to the ICC proof
    +        profile you wish to use for this transform, or a profile object
    +    :param inMode: String, as a valid PIL mode that the appropriate profile
    +        also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
    +    :param outMode: String, as a valid PIL mode that the appropriate profile
    +        also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
    +    :param renderingIntent: Integer (0-3) specifying the rendering intent you
    +        wish to use for the input->proof (simulated) transform
    +
    +            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    +            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    +            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    +            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +
    +        see the pyCMS documentation for details on rendering intents and what
    +        they do.
    +    :param proofRenderingIntent: Integer (0-3) specifying the rendering intent you
    +        wish to use for proof->output transform
    +
    +            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    +            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    +            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    +            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +
    +        see the pyCMS documentation for details on rendering intents and what
    +        they do.
    +    :param flags: Integer (0-...) specifying additional flags
    +    :returns: A CmsTransform class object.
    +    :exception PyCMSError:
    +    """
    +
    +    if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
    +        raise PyCMSError("renderingIntent must be an integer between 0 and 3")
    +
    +    if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
    +        raise PyCMSError(
    +            "flags must be an integer between 0 and %s" + _MAX_FLAG)
    +
    +    try:
    +        if not isinstance(inputProfile, ImageCmsProfile):
    +            inputProfile = ImageCmsProfile(inputProfile)
    +        if not isinstance(outputProfile, ImageCmsProfile):
    +            outputProfile = ImageCmsProfile(outputProfile)
    +        if not isinstance(proofProfile, ImageCmsProfile):
    +            proofProfile = ImageCmsProfile(proofProfile)
    +        return ImageCmsTransform(
    +            inputProfile, outputProfile, inMode, outMode, renderingIntent,
    +            proofProfile, proofRenderingIntent, flags)
    +    except (IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +buildTransformFromOpenProfiles = buildTransform
    +buildProofTransformFromOpenProfiles = buildProofTransform
    +
    +
    +def applyTransform(im, transform, inPlace=0):
    +    """
    +    (pyCMS) Applies a transform to a given image.
    +
    +    If im.mode != transform.inMode, a PyCMSError is raised.
    +
    +    If inPlace == TRUE and transform.inMode != transform.outMode, a
    +    PyCMSError is raised.
    +
    +    If im.mode, transfer.inMode, or transfer.outMode is not supported by
    +    pyCMSdll or the profiles you used for the transform, a PyCMSError is
    +    raised.
    +
    +    If an error occurs while the transform is being applied, a PyCMSError
    +    is raised.
    +
    +    This function applies a pre-calculated transform (from
    +    ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles())
    +    to an image.  The transform can be used for multiple images, saving
    +    considerable calculation time if doing the same conversion multiple times.
    +
    +    If you want to modify im in-place instead of receiving a new image as
    +    the return value, set inPlace to TRUE.  This can only be done if
    +    transform.inMode and transform.outMode are the same, because we can't
    +    change the mode in-place (the buffer sizes for some modes are
    +    different).  The  default behavior is to return a new Image object of
    +    the same dimensions in mode transform.outMode.
    +
    +    :param im: A PIL Image object, and im.mode must be the same as the inMode
    +        supported by the transform.
    +    :param transform: A valid CmsTransform class object
    +    :param inPlace: Bool (1 == True, 0 or None == False).  If True, im is
    +        modified in place and None is returned, if False, a new Image object
    +        with the transform applied is returned (and im is not changed). The
    +        default is False.
    +    :returns: Either None, or a new PIL Image object, depending on the value of
    +        inPlace. The profile will be returned in the image's
    +        info['icc_profile'].
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        if inPlace:
    +            transform.apply_in_place(im)
    +            imOut = None
    +        else:
    +            imOut = transform.apply(im)
    +    except (TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +    return imOut
    +
    +
    +def createProfile(colorSpace, colorTemp=-1):
    +    """
    +    (pyCMS) Creates a profile.
    +
    +    If colorSpace not in ["LAB", "XYZ", "sRGB"], a PyCMSError is raised
    +
    +    If using LAB and colorTemp != a positive integer, a PyCMSError is raised.
    +
    +    If an error occurs while creating the profile, a PyCMSError is raised.
    +
    +    Use this function to create common profiles on-the-fly instead of
    +    having to supply a profile on disk and knowing the path to it.  It
    +    returns a normal CmsProfile object that can be passed to
    +    ImageCms.buildTransformFromOpenProfiles() to create a transform to apply
    +    to images.
    +
    +    :param colorSpace: String, the color space of the profile you wish to
    +        create.
    +        Currently only "LAB", "XYZ", and "sRGB" are supported.
    +    :param colorTemp: Positive integer for the white point for the profile, in
    +        degrees Kelvin (i.e. 5000, 6500, 9600, etc.).  The default is for D50
    +        illuminant if omitted (5000k).  colorTemp is ONLY applied to LAB
    +        profiles, and is ignored for XYZ and sRGB.
    +    :returns: A CmsProfile class object
    +    :exception PyCMSError:
    +    """
    +
    +    if colorSpace not in ["LAB", "XYZ", "sRGB"]:
    +        raise PyCMSError(
    +            "Color space not supported for on-the-fly profile creation (%s)"
    +            % colorSpace)
    +
    +    if colorSpace == "LAB":
    +        try:
    +            colorTemp = float(colorTemp)
    +        except:
    +            raise PyCMSError(
    +                "Color temperature must be numeric, \"%s\" not valid"
    +                % colorTemp)
    +
    +    try:
    +        return core.createProfile(colorSpace, colorTemp)
    +    except (TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getProfileName(profile):
    +    """
    +
    +    (pyCMS) Gets the internal product name for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised If an error occurs while trying to obtain the
    +    name tag, a PyCMSError is raised.
    +
    +    Use this function to obtain the INTERNAL name of the profile (stored
    +    in an ICC tag in the profile itself), usually the one used when the
    +    profile was originally created.  Sometimes this tag also contains
    +    additional information supplied by the creator.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: A string containing the internal name of the profile as stored
    +        in an ICC tag.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        # add an extra newline to preserve pyCMS compatibility
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        # do it in python, not c.
    +        #    // name was "%s - %s" (model, manufacturer) || Description ,
    +        #    // but if the Model and Manufacturer were the same or the model
    +        #    // was long, Just the model,  in 1.x
    +        model = profile.profile.product_model
    +        manufacturer = profile.profile.product_manufacturer
    +
    +        if not (model or manufacturer):
    +            return profile.profile.product_description + "\n"
    +        if not manufacturer or len(model) > 30:
    +            return model + "\n"
    +        return "%s - %s\n" % (model, manufacturer)
    +
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getProfileInfo(profile):
    +    """
    +    (pyCMS) Gets the internal product information for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised.
    +
    +    If an error occurs while trying to obtain the info tag, a PyCMSError
    +    is raised
    +
    +    Use this function to obtain the information stored in the profile's
    +    info tag.  This often contains details about the profile, and how it
    +    was created, as supplied by the creator.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: A string containing the internal profile information stored in
    +        an ICC tag.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        # add an extra newline to preserve pyCMS compatibility
    +        # Python, not C. the white point bits weren't working well,
    +        # so skipping.
    +        #    // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint
    +        description = profile.profile.product_description
    +        cpright = profile.profile.product_copyright
    +        arr = []
    +        for elt in (description, cpright):
    +            if elt:
    +                arr.append(elt)
    +        return "\r\n\r\n".join(arr) + "\r\n\r\n"
    +
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getProfileCopyright(profile):
    +    """
    +    (pyCMS) Gets the copyright for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised.
    +
    +    If an error occurs while trying to obtain the copyright tag, a PyCMSError
    +    is raised
    +
    +    Use this function to obtain the information stored in the profile's
    +    copyright tag.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: A string containing the internal profile information stored in
    +        an ICC tag.
    +    :exception PyCMSError:
    +    """
    +    try:
    +        # add an extra newline to preserve pyCMS compatibility
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        return profile.profile.product_copyright + "\n"
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getProfileManufacturer(profile):
    +    """
    +    (pyCMS) Gets the manufacturer for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised.
    +
    +    If an error occurs while trying to obtain the manufacturer tag, a
    +    PyCMSError is raised
    +
    +    Use this function to obtain the information stored in the profile's
    +    manufacturer tag.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: A string containing the internal profile information stored in
    +        an ICC tag.
    +    :exception PyCMSError:
    +    """
    +    try:
    +        # add an extra newline to preserve pyCMS compatibility
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        return profile.profile.product_manufacturer + "\n"
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getProfileModel(profile):
    +    """
    +    (pyCMS) Gets the model for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised.
    +
    +    If an error occurs while trying to obtain the model tag, a PyCMSError
    +    is raised
    +
    +    Use this function to obtain the information stored in the profile's
    +    model tag.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: A string containing the internal profile information stored in
    +        an ICC tag.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        # add an extra newline to preserve pyCMS compatibility
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        return profile.profile.product_model + "\n"
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getProfileDescription(profile):
    +    """
    +    (pyCMS) Gets the description for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised.
    +
    +    If an error occurs while trying to obtain the description tag, a PyCMSError
    +    is raised
    +
    +    Use this function to obtain the information stored in the profile's
    +    description tag.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: A string containing the internal profile information stored in an
    +        ICC tag.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        # add an extra newline to preserve pyCMS compatibility
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        return profile.profile.product_description + "\n"
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def getDefaultIntent(profile):
    +    """
    +    (pyCMS) Gets the default intent name for the given profile.
    +
    +    If profile isn't a valid CmsProfile object or filename to a profile,
    +    a PyCMSError is raised.
    +
    +    If an error occurs while trying to obtain the default intent, a
    +    PyCMSError is raised.
    +
    +    Use this function to determine the default (and usually best optimized)
    +    rendering intent for this profile.  Most profiles support multiple
    +    rendering intents, but are intended mostly for one type of conversion.
    +    If you wish to use a different intent than returned, use
    +    ImageCms.isIntentSupported() to verify it will work first.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :returns: Integer 0-3 specifying the default rendering intent for this
    +        profile.
    +
    +            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    +            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    +            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    +            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +
    +        see the pyCMS documentation for details on rendering intents and what
    +            they do.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        return profile.profile.rendering_intent
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def isIntentSupported(profile, intent, direction):
    +    """
    +    (pyCMS) Checks if a given intent is supported.
    +
    +    Use this function to verify that you can use your desired
    +    renderingIntent with profile, and that profile can be used for the
    +    input/output/proof profile as you desire.
    +
    +    Some profiles are created specifically for one "direction", can cannot
    +    be used for others.  Some profiles can only be used for certain
    +    rendering intents... so it's best to either verify this before trying
    +    to create a transform with them (using this function), or catch the
    +    potential PyCMSError that will occur if they don't support the modes
    +    you select.
    +
    +    :param profile: EITHER a valid CmsProfile object, OR a string of the
    +        filename of an ICC profile.
    +    :param intent: Integer (0-3) specifying the rendering intent you wish to
    +        use with this profile
    +
    +            INTENT_PERCEPTUAL            = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
    +            INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
    +            INTENT_SATURATION            = 2 (ImageCms.INTENT_SATURATION)
    +            INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
    +
    +        see the pyCMS documentation for details on rendering intents and what
    +            they do.
    +    :param direction: Integer specifying if the profile is to be used for input,
    +        output, or proof
    +
    +            INPUT  = 0 (or use ImageCms.DIRECTION_INPUT)
    +            OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT)
    +            PROOF  = 2 (or use ImageCms.DIRECTION_PROOF)
    +
    +    :returns: 1 if the intent/direction are supported, -1 if they are not.
    +    :exception PyCMSError:
    +    """
    +
    +    try:
    +        if not isinstance(profile, ImageCmsProfile):
    +            profile = ImageCmsProfile(profile)
    +        # FIXME: I get different results for the same data w. different
    +        # compilers.  Bug in LittleCMS or in the binding?
    +        if profile.profile.is_intent_supported(intent, direction):
    +            return 1
    +        else:
    +            return -1
    +    except (AttributeError, IOError, TypeError, ValueError) as v:
    +        raise PyCMSError(v)
    +
    +
    +def versions():
    +    """
    +    (pyCMS) Fetches versions.
    +    """
    +
    +    return (
    +        VERSION, core.littlecms_version,
    +        sys.version.split()[0], Image.VERSION
    +    )
    +
    +# --------------------------------------------------------------------
    +
    +if __name__ == "__main__":
    +    # create a cheap manual from the __doc__ strings for the functions above
    +
    +    print(__doc__)
    +
    +    for f in dir(sys.modules[__name__]):
    +        doc = None
    +        try:
    +            exec("doc = %s.__doc__" % (f))
    +            if "pyCMS" in doc:
    +                # so we don't get the __doc__ string for imported modules
    +                print("=" * 80)
    +                print("%s" % f)
    +                print(doc)
    +        except (AttributeError, TypeError):
    +            pass
    +
    +# End of file
    diff --git a/server/www/packages/packages-linux/x64/PIL/ImageColor.py b/server/www/packages/packages-linux/x64/PIL/ImageColor.py
    new file mode 100644
    index 0000000..56c38e4
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ImageColor.py
    @@ -0,0 +1,280 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# map CSS3-style colour description strings to RGB
    +#
    +# History:
    +# 2002-10-24 fl   Added support for CSS-style color strings
    +# 2002-12-15 fl   Added RGBA support
    +# 2004-03-27 fl   Fixed remaining int() problems for Python 1.5.2
    +# 2004-07-19 fl   Fixed gray/grey spelling issues
    +# 2009-03-05 fl   Fixed rounding error in grayscale calculation
    +#
    +# Copyright (c) 2002-2004 by Secret Labs AB
    +# Copyright (c) 2002-2004 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +from PIL import Image
    +import re
    +
    +
    +def getrgb(color):
    +    """
    +     Convert a color string to an RGB tuple. If the string cannot be parsed,
    +     this function raises a :py:exc:`ValueError` exception.
    +
    +    .. versionadded:: 1.1.4
    +
    +    :param color: A color string
    +    :return: ``(red, green, blue[, alpha])``
    +    """
    +    try:
    +        rgb = colormap[color]
    +    except KeyError:
    +        try:
    +            # fall back on case-insensitive lookup
    +            rgb = colormap[color.lower()]
    +        except KeyError:
    +            rgb = None
    +    # found color in cache
    +    if rgb:
    +        if isinstance(rgb, tuple):
    +            return rgb
    +        colormap[color] = rgb = getrgb(rgb)
    +        return rgb
    +    # check for known string formats
    +    m = re.match("#\w\w\w$", color)
    +    if m:
    +        return (
    +            int(color[1]*2, 16),
    +            int(color[2]*2, 16),
    +            int(color[3]*2, 16)
    +            )
    +    m = re.match("#\w\w\w\w\w\w$", color)
    +    if m:
    +        return (
    +            int(color[1:3], 16),
    +            int(color[3:5], 16),
    +            int(color[5:7], 16)
    +            )
    +    m = re.match("rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
    +    if m:
    +        return (
    +            int(m.group(1)),
    +            int(m.group(2)),
    +            int(m.group(3))
    +            )
    +    m = re.match("rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color)
    +    if m:
    +        return (
    +            int((int(m.group(1)) * 255) / 100.0 + 0.5),
    +            int((int(m.group(2)) * 255) / 100.0 + 0.5),
    +            int((int(m.group(3)) * 255) / 100.0 + 0.5)
    +            )
    +    m = re.match("hsl\(\s*(\d+)\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color)
    +    if m:
    +        from colorsys import hls_to_rgb
    +        rgb = hls_to_rgb(
    +            float(m.group(1)) / 360.0,
    +            float(m.group(3)) / 100.0,
    +            float(m.group(2)) / 100.0,
    +            )
    +        return (
    +            int(rgb[0] * 255 + 0.5),
    +            int(rgb[1] * 255 + 0.5),
    +            int(rgb[2] * 255 + 0.5)
    +            )
    +    m = re.match("rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$",
    +                 color)
    +    if m:
    +        return (
    +            int(m.group(1)),
    +            int(m.group(2)),
    +            int(m.group(3)),
    +            int(m.group(4))
    +            )
    +    raise ValueError("unknown color specifier: %r" % color)
    +
    +
    +def getcolor(color, mode):
    +    """
    +    Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a
    +    greyscale value if the mode is not color or a palette image. If the string
    +    cannot be parsed, this function raises a :py:exc:`ValueError` exception.
    +
    +    .. versionadded:: 1.1.4
    +
    +    :param color: A color string
    +    :return: ``(graylevel [, alpha]) or (red, green, blue[, alpha])``
    +    """
    +    # same as getrgb, but converts the result to the given mode
    +    color, alpha = getrgb(color), 255
    +    if len(color) == 4:
    +        color, alpha = color[0:3], color[3]
    +
    +    if Image.getmodebase(mode) == "L":
    +        r, g, b = color
    +        color = (r*299 + g*587 + b*114)//1000
    +        if mode[-1] == 'A':
    +            return (color, alpha)
    +    else:
    +        if mode[-1] == 'A':
    +            return color + (alpha,)
    +    return color
    +
    +colormap = {
    +    # X11 colour table from https://drafts.csswg.org/css-color-4/, with
    +    # gray/grey spelling issues fixed.  This is a superset of HTML 4.0
    +    # colour names used in CSS 1.
    +    "aliceblue": "#f0f8ff",
    +    "antiquewhite": "#faebd7",
    +    "aqua": "#00ffff",
    +    "aquamarine": "#7fffd4",
    +    "azure": "#f0ffff",
    +    "beige": "#f5f5dc",
    +    "bisque": "#ffe4c4",
    +    "black": "#000000",
    +    "blanchedalmond": "#ffebcd",
    +    "blue": "#0000ff",
    +    "blueviolet": "#8a2be2",
    +    "brown": "#a52a2a",
    +    "burlywood": "#deb887",
    +    "cadetblue": "#5f9ea0",
    +    "chartreuse": "#7fff00",
    +    "chocolate": "#d2691e",
    +    "coral": "#ff7f50",
    +    "cornflowerblue": "#6495ed",
    +    "cornsilk": "#fff8dc",
    +    "crimson": "#dc143c",
    +    "cyan": "#00ffff",
    +    "darkblue": "#00008b",
    +    "darkcyan": "#008b8b",
    +    "darkgoldenrod": "#b8860b",
    +    "darkgray": "#a9a9a9",
    +    "darkgrey": "#a9a9a9",
    +    "darkgreen": "#006400",
    +    "darkkhaki": "#bdb76b",
    +    "darkmagenta": "#8b008b",
    +    "darkolivegreen": "#556b2f",
    +    "darkorange": "#ff8c00",
    +    "darkorchid": "#9932cc",
    +    "darkred": "#8b0000",
    +    "darksalmon": "#e9967a",
    +    "darkseagreen": "#8fbc8f",
    +    "darkslateblue": "#483d8b",
    +    "darkslategray": "#2f4f4f",
    +    "darkslategrey": "#2f4f4f",
    +    "darkturquoise": "#00ced1",
    +    "darkviolet": "#9400d3",
    +    "deeppink": "#ff1493",
    +    "deepskyblue": "#00bfff",
    +    "dimgray": "#696969",
    +    "dimgrey": "#696969",
    +    "dodgerblue": "#1e90ff",
    +    "firebrick": "#b22222",
    +    "floralwhite": "#fffaf0",
    +    "forestgreen": "#228b22",
    +    "fuchsia": "#ff00ff",
    +    "gainsboro": "#dcdcdc",
    +    "ghostwhite": "#f8f8ff",
    +    "gold": "#ffd700",
    +    "goldenrod": "#daa520",
    +    "gray": "#808080",
    +    "grey": "#808080",
    +    "green": "#008000",
    +    "greenyellow": "#adff2f",
    +    "honeydew": "#f0fff0",
    +    "hotpink": "#ff69b4",
    +    "indianred": "#cd5c5c",
    +    "indigo": "#4b0082",
    +    "ivory": "#fffff0",
    +    "khaki": "#f0e68c",
    +    "lavender": "#e6e6fa",
    +    "lavenderblush": "#fff0f5",
    +    "lawngreen": "#7cfc00",
    +    "lemonchiffon": "#fffacd",
    +    "lightblue": "#add8e6",
    +    "lightcoral": "#f08080",
    +    "lightcyan": "#e0ffff",
    +    "lightgoldenrodyellow": "#fafad2",
    +    "lightgreen": "#90ee90",
    +    "lightgray": "#d3d3d3",
    +    "lightgrey": "#d3d3d3",
    +    "lightpink": "#ffb6c1",
    +    "lightsalmon": "#ffa07a",
    +    "lightseagreen": "#20b2aa",
    +    "lightskyblue": "#87cefa",
    +    "lightslategray": "#778899",
    +    "lightslategrey": "#778899",
    +    "lightsteelblue": "#b0c4de",
    +    "lightyellow": "#ffffe0",
    +    "lime": "#00ff00",
    +    "limegreen": "#32cd32",
    +    "linen": "#faf0e6",
    +    "magenta": "#ff00ff",
    +    "maroon": "#800000",
    +    "mediumaquamarine": "#66cdaa",
    +    "mediumblue": "#0000cd",
    +    "mediumorchid": "#ba55d3",
    +    "mediumpurple": "#9370db",
    +    "mediumseagreen": "#3cb371",
    +    "mediumslateblue": "#7b68ee",
    +    "mediumspringgreen": "#00fa9a",
    +    "mediumturquoise": "#48d1cc",
    +    "mediumvioletred": "#c71585",
    +    "midnightblue": "#191970",
    +    "mintcream": "#f5fffa",
    +    "mistyrose": "#ffe4e1",
    +    "moccasin": "#ffe4b5",
    +    "navajowhite": "#ffdead",
    +    "navy": "#000080",
    +    "oldlace": "#fdf5e6",
    +    "olive": "#808000",
    +    "olivedrab": "#6b8e23",
    +    "orange": "#ffa500",
    +    "orangered": "#ff4500",
    +    "orchid": "#da70d6",
    +    "palegoldenrod": "#eee8aa",
    +    "palegreen": "#98fb98",
    +    "paleturquoise": "#afeeee",
    +    "palevioletred": "#db7093",
    +    "papayawhip": "#ffefd5",
    +    "peachpuff": "#ffdab9",
    +    "peru": "#cd853f",
    +    "pink": "#ffc0cb",
    +    "plum": "#dda0dd",
    +    "powderblue": "#b0e0e6",
    +    "purple": "#800080",
    +    "rebeccapurple": "#663399",
    +    "red": "#ff0000",
    +    "rosybrown": "#bc8f8f",
    +    "royalblue": "#4169e1",
    +    "saddlebrown": "#8b4513",
    +    "salmon": "#fa8072",
    +    "sandybrown": "#f4a460",
    +    "seagreen": "#2e8b57",
    +    "seashell": "#fff5ee",
    +    "sienna": "#a0522d",
    +    "silver": "#c0c0c0",
    +    "skyblue": "#87ceeb",
    +    "slateblue": "#6a5acd",
    +    "slategray": "#708090",
    +    "slategrey": "#708090",
    +    "snow": "#fffafa",
    +    "springgreen": "#00ff7f",
    +    "steelblue": "#4682b4",
    +    "tan": "#d2b48c",
    +    "teal": "#008080",
    +    "thistle": "#d8bfd8",
    +    "tomato": "#ff6347",
    +    "turquoise": "#40e0d0",
    +    "violet": "#ee82ee",
    +    "wheat": "#f5deb3",
    +    "white": "#ffffff",
    +    "whitesmoke": "#f5f5f5",
    +    "yellow": "#ffff00",
    +    "yellowgreen": "#9acd32",
    +}
    diff --git a/server/www/packages/packages-linux/x64/PIL/ImageDraw.py b/server/www/packages/packages-linux/x64/PIL/ImageDraw.py
    new file mode 100644
    index 0000000..a3e5270
    --- /dev/null
    +++ b/server/www/packages/packages-linux/x64/PIL/ImageDraw.py
    @@ -0,0 +1,384 @@
    +#
    +# The Python Imaging Library
    +# $Id$
    +#
    +# drawing interface operations
    +#
    +# History:
    +# 1996-04-13 fl   Created (experimental)
    +# 1996-08-07 fl   Filled polygons, ellipses.
    +# 1996-08-13 fl   Added text support
    +# 1998-06-28 fl   Handle I and F images
    +# 1998-12-29 fl   Added arc; use arc primitive to draw ellipses
    +# 1999-01-10 fl   Added shape stuff (experimental)
    +# 1999-02-06 fl   Added bitmap support
    +# 1999-02-11 fl   Changed all primitives to take options
    +# 1999-02-20 fl   Fixed backwards compatibility
    +# 2000-10-12 fl   Copy on write, when necessary
    +# 2001-02-18 fl   Use default ink for bitmap/text also in fill mode
    +# 2002-10-24 fl   Added support for CSS-style color strings
    +# 2002-12-10 fl   Added experimental support for RGBA-on-RGB drawing
    +# 2002-12-11 fl   Refactored low-level drawing API (work in progress)
    +# 2004-08-26 fl   Made Draw() a factory function, added getdraw() support
    +# 2004-09-04 fl   Added width support to line primitive
    +# 2004-09-10 fl   Added font mode handling
    +# 2006-06-19 fl   Added font bearing support (getmask2)
    +#
    +# Copyright (c) 1997-2006 by Secret Labs AB
    +# Copyright (c) 1996-2006 by Fredrik Lundh
    +#
    +# See the README file for information on usage and redistribution.
    +#
    +
    +import numbers
    +import warnings
    +
    +from PIL import Image, ImageColor
    +from PIL._util import isStringType
    +
    +"""
    +A simple 2D drawing interface for PIL images.
    +

    +Application code should use the Draw factory, instead of +directly. +""" + + +class ImageDraw(object): + + def __init__(self, im, mode=None): + """ + Create a drawing instance. + + @param im The image to draw in. + @param mode Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1, mode) + else: + self.ink = self.draw.draw_ink(-1, mode) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = 0 + self.font = None + + def setink(self, ink): + raise NotImplementedError("setink() has been removed. " + + "Please use keyword arguments instead.") + + def setfill(self, onoff): + raise NotImplementedError("setfill() has been removed. " + + "Please use keyword arguments instead.") + + def setfont(self, font): + warnings.warn("setfont() is deprecated. " + + "Please set the attribute directly instead.") + # compatibility + self.font = font + + def getfont(self): + """Get the current default font.""" + if not self.font: + # FIXME: should add a font repository + from PIL import ImageFont + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isStringType(ink): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink) + ink = self.draw.draw_ink(ink, self.mode) + if fill is not None: + if isStringType(fill): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill) + fill = self.draw.draw_ink(fill, self.mode) + return ink, fill + + def arc(self, xy, start, end, fill=None): + """Draw an arc.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink) + + def bitmap(self, xy, bitmap, fill=None): + """Draw a bitmap.""" + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + def chord(self, xy, start, end, fill=None, outline=None): + """Draw a chord.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_chord(xy, start, end, ink, 0) + + def ellipse(self, xy, fill=None, outline=None): + """Draw an ellipse.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None: + self.draw.draw_ellipse(xy, ink, 0) + + def line(self, xy, fill=None, width=0): + """Draw a line, or a connected sequence of line segments.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_lines(xy, ink, width) + + def shape(self, shape, fill=None, outline=None): + """(Experimental) Draw a shape.""" + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None: + self.draw.draw_outline(shape, ink, 0) + + def pieslice(self, xy, start, end, fill=None, outline=None): + """Draw a pieslice.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_pieslice(xy, start, end, ink, 0) + + def point(self, xy, fill=None): + """Draw one or more individual pixels.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + def polygon(self, xy, fill=None, outline=None): + """Draw a polygon.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None: + self.draw.draw_polygon(xy, ink, 0) + + def rectangle(self, xy, fill=None, outline=None): + """Draw a rectangle.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None: + self.draw.draw_rectangle(xy, ink, 0) + + def _multiline_check(self, text): + """Draw text.""" + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return text.split(split_character) + + def text(self, xy, text, fill=None, font=None, anchor=None, + *args, **kwargs): + if self._multiline_check(text): + return self.multiline_text(xy, text, fill, font, anchor, + *args, **kwargs) + + ink, fill = self._getink(fill) + if font is None: + font = self.getfont() + if ink is None: + ink = fill + if ink is not None: + try: + mask, offset = font.getmask2(text, self.fontmode) + xy = xy[0] + offset[0], xy[1] + offset[1] + except AttributeError: + try: + mask = font.getmask(text, self.fontmode) + except TypeError: + mask = font.getmask(text) + self.draw.draw_bitmap(xy, mask, ink) + + def multiline_text(self, xy, text, fill=None, font=None, anchor=None, + spacing=4, align="left"): + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + widths.append(line_width) + max_width = max(max_width, line_width) + left, top = xy + for idx, line in enumerate(lines): + if align == "left": + pass # left = x + elif align == "center": + left += (max_width - widths[idx]) / 2.0 + elif align == "right": + left += (max_width - widths[idx]) + else: + assert False, 'align must be "left", "center" or "right"' + self.text((left, top), line, fill, font, anchor) + top += line_spacing + left = xy[0] + + def textsize(self, text, font=None, *args, **kwargs): + """Get the size of a given string, in pixels.""" + if self._multiline_check(text): + return self.multiline_textsize(text, font, *args, **kwargs) + + if font is None: + font = self.getfont() + return font.getsize(text) + + def multiline_textsize(self, text, font=None, spacing=4): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + max_width = max(max_width, line_width) + return max_width, len(lines)*line_spacing + + +def Draw(im, mode=None): + """ + A simple 2D drawing interface for PIL images. + + @param im The image to draw in. + @param mode Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +def getdraw(im=None, hints=None): + """ + (Experimental) A more advanced 2D drawing interface for PIL images, + based on the WCK interface. + + @param im The image to draw in. + @param hints An optional list of hints. + @return A (drawing context, drawing resource factory) tuple. + """ + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from PIL import _imagingagg as handler + except ImportError: + pass + if handler is None: + from PIL import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +def floodfill(image, xy, value, border=None): + """ + (experimental) Fills a bounded region with a given color. + + @param image Target image. + @param xy Seed position (a 2-item coordinate tuple). + @param value Fill color. + @param border Optional border value. If given, the region consists of + pixels with a color different from the border color. If not given, + the region consists of pixels having the same color as the seed + pixel. + """ + # based on an implementation by Eric S. Raymond + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if background == value: + return # seed point already has fill color + pixel[x, y] = value + except IndexError: + return # seed point outside image + edge = [(x, y)] + if border is None: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p == background: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge + else: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p != value and p != border: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py b/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py new file mode 100644 index 0000000..62ee116 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageDraw2.py @@ -0,0 +1,111 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageColor, ImageDraw, ImageFont, ImagePath + + +class Pen(object): + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush(object): + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font(object): + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw(object): + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + self.render("rectangle", xy, *options) + + def symbol(self, xy, symbol, *options): + raise NotImplementedError("not in this version") + + def text(self, xy, text, font): + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + return self.draw.textsize(text, font=font.font) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py b/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py new file mode 100644 index 0000000..56b5c01 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageEnhance.py @@ -0,0 +1,100 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFilter, ImageStat + + +class _Enhance(object): + + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + def __init__(self, image): + self.image = image + self.intermediate_mode = 'L' + if 'A' in image.getbands(): + self.intermediate_mode = 'LA' + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageFile.py b/server/www/packages/packages-linux/x64/PIL/ImageFile.py new file mode 100644 index 0000000..b21e9e3 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageFile.py @@ -0,0 +1,526 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +import io +import os +import sys +import struct + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024*1024 + +LOAD_TRUNCATED_IMAGES = False + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error" +} + + +def raise_ioerror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = "decoder error %d" % error + raise IOError(message + " when reading image file") + + +# +# -------------------------------------------------------------------- +# Helpers + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + +class ImageFile(Image.Image): + "Base class for image file format handlers." + + def __init__(self, fp=None, filename=None): + Image.Image.__init__(self) + + self.tile = None + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if isPath(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + else: + # stream + self.fp = fp + self.filename = filename + + try: + self._open() + except (IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error) as v: + raise SyntaxError(v) + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + + def draft(self, mode, size): + "Set draft mode" + + pass + + def verify(self): + "Check file integrity" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + self.fp = None + + def load(self): + "Load image data based on tile list" + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + d, e, o, a = self.tile[0] + if d == "raw" and a[0] == self.mode and a[0] in Image._MAPMODES: + try: + if hasattr(Image.core, "map"): + # use built-in mapper + self.map = Image.core.map(self.filename) + self.map.seek(o) + self.im = self.map.readimage( + self.mode, self.size, a[1], a[2] + ) + else: + # use mmap, if possible + import mmap + fp = open(self.filename, "r") + size = os.path.getsize(self.filename) + self.map = mmap.mmap(fp.fileno(), size, access=mmap.ACCESS_READ) + self.im = Image.core.map_buffer( + self.map, self.size, d, e, o, a + ) + readonly = 1 + except (AttributeError, EnvironmentError, ImportError): + self.map = None + + self.load_prepare() + + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + for decoder_name, extents, offset, args in self.tile: + decoder = Image._getdecoder(self.mode, decoder_name, + args, self.decoderconfig) + seek(offset) + try: + decoder.setimage(self.im, extents) + except ValueError: + continue + if decoder.pulls_fd: + decoder.setfd(self.fp) + status, err_code = decoder.decode(b"") + else: + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error): # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated") + + if not s and not decoder.handles_eof: # truncated jpeg + self.tile = [] + + # JpegDecode needs to clean things up here either way + # If we don't destroy the decompressor, + # we have a memory leak. + decoder.cleanup() + + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated " + "(%d bytes not processed)" % len(b)) + + b = b + s + n, err_code = decoder.decode(b) + if n < 0: + break + b = b[n:] + + # Need to cleanup here to prevent leaks in PyPy + decoder.cleanup() + + self.tile = [] + self.readonly = readonly + + self.fp = None # might be shared + + if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: + # still raised if decoder fails to return anything + raise_ioerror(err_code) + + # post processing + if hasattr(self, "tile_post_rotate"): + # FIXME: This is a hack to handle rotated PCD's + self.im = self.im.rotate(self.tile_post_rotate) + self.size = self.im.size + + self.load_end() + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError( + "StubImageFile subclass must implement _open" + ) + + def load(self): + loader = self._load() + if loader is None: + raise IOError("cannot find loader for this %s file" % self.format) + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + + def _load(self): + "(Hook) Find actual image loader." + raise NotImplementedError( + "StubImageFile subclass must implement _load" + ) + + +class Parser(object): + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + + In Python 2.x, this is an old-style class. + """ + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception IOError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_ioerror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + try: + fp = io.BytesIO(self.data) + im = Image.open(fp) + finally: + fp.close() # explicitly close the virtual file + except IOError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder( + im.mode, d, a, im.decoderconfig + ) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset:] + self.offset = 0 + + self.image = im + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception IOError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise IOError("image was incomplete") + if not self.image: + raise IOError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + try: + fp = io.BytesIO(self.data) + self.image = Image.open(fp) + finally: + self.image.load() + fp.close() # explicitly close the virtual file + return self.image + + +# -------------------------------------------------------------------- + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return + try: + fh = fp.fileno() + fp.flush() + except (AttributeError, io.UnsupportedOperation): + # compress to Python file-compatible object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l,s = e.encode_to_pyfd() + else: + while True: + l, s, d = e.encode(bufsize) + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + else: + # slight speedup: compress to real file object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l,s = e.encode_to_pyfd() + else: + s = e.encode_to_file(fh, bufsize) + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + if hasattr(fp, "flush"): + fp.flush() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing up to size bytes of data. + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + return fp.read(size) + data = [] + while size > 0: + block = fp.read(min(size, SAFEBLOCK)) + if not block: + break + data.append(block) + size -= len(block) + return b"".join(data) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageFilter.py b/server/www/packages/packages-linux/x64/PIL/ImageFilter.py new file mode 100644 index 0000000..baa168a --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageFilter.py @@ -0,0 +1,275 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import functools + + +class Filter(object): + pass + + +class Kernel(Filter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. the default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a+b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class BuiltinFilter(Kernel): + def __init__(self): + pass + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the **rank**'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size//2, self.size//2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size*size//2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size*size-1 + + +class ModeFilter(Filter): + """ + + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(Filter): + """Gaussian blur filter. + + :param radius: Blur radius. + """ + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class UnsharpMask(Filter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1 + ) + + +class CONTOUR(BuiltinFilter): + name = "Contour" + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class DETAIL(BuiltinFilter): + name = "Detail" + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0 + ) + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1 + ) + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1 + ) + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0 + ) + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1 + ) + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1 + ) + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2 + ) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageFont.py b/server/www/packages/packages-linux/x64/PIL/ImageFont.py new file mode 100644 index 0000000..af1166d --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageFont.py @@ -0,0 +1,437 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isDirectory, isPath +import os +import sys + + +class _imagingft_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + +try: + from PIL import _imagingft as core +except ImportError: + core = _imagingft_not_installed() + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont(object): + "PIL font wrapper" + + def _load_pilfont(self, filename): + + fp = open(filename, "rb") + + for ext in (".png", ".gif", ".pbm"): + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + raise IOError("cannot find glyph data file") + + self.file = fullname + + return self._load_pilfont_data(fp, image) + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256*20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + # delegate critical operations to internal type + self.getsize = self.font.getsize + self.getmask = self.font.getmask + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + +class FreeTypeFont(object): + "FreeType font wrapper (requires _imagingft service)" + + def __init__(self, font=None, size=10, index=0, encoding=""): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if isPath(font): + self.font = core.getfont(font, size, index, encoding) + else: + self.font_bytes = font.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes) + + def getname(self): + return self.font.family, self.font.style + + def getmetrics(self): + return self.font.ascent, self.font.descent + + def getsize(self, text): + size, offset = self.font.getsize(text) + return (size[0] + offset[0], size[1] + offset[1]) + + def getoffset(self, text): + return self.font.getsize(text)[1] + + def getmask(self, text, mode=""): + return self.getmask2(text, mode)[0] + + def getmask2(self, text, mode="", fill=Image.core.fill): + size, offset = self.font.getsize(text) + im = fill("L", size, 0) + self.font.render(text, im.id, mode == "1") + return im, offset + + def font_variant(self, font=None, size=None, index=None, encoding=None): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont(font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else + encoding) + +## +# Wrapper that creates a transposed font from any existing font +# object. +# +# @param font A font object. +# @param orientation An optional orientation. If given, this should +# be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, +# Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. + + +class TransposedFont(object): + "Wrapper for writing rotated or mirrored text" + + def __init__(self, font, orientation=None): + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text): + w, h = self.font.getsize(text) + if self.orientation in (Image.ROTATE_90, Image.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode=""): + im = self.font.getmask(text, mode) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding=""): + """ + Load a TrueType or OpenType font file, and create a font object. + This function loads a font object from the given file, and creates + a font object for a font of the given size. + + This function requires the _imagingft service. + + :param font: A truetype font file. Under Windows, if the file + is not found in this filename, the loader also looks in + Windows :file:`fonts/` directory. + :param size: The requested size, in points. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Common + encodings are "unic" (Unicode), "symb" (Microsoft + Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), + and "armn" (Apple Roman). See the FreeType documentation + for more information. + :return: A font object. + :exception IOError: If the file could not be read. + """ + + try: + return FreeTypeFont(font, size, index, encoding) + except IOError: + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ('linux', 'linux2'): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = '/usr/share' + dirs += [os.path.join(lindir, "fonts") + for lindir in lindirs.split(":")] + elif sys.platform == 'darwin': + dirs += ['/Library/Fonts', '/System/Library/Fonts', + os.path.expanduser('~/Library/Fonts')] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + return FreeTypeFont(fontpath, size, index, encoding) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == '.ttf': + return FreeTypeFont(fontpath, size, index, encoding) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return FreeTypeFont(first_font_with_a_different_extension, size, + index, encoding) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + for directory in sys.path: + if isDirectory(directory): + if not isinstance(filename, str): + if bytes is str: + filename = filename.encode("utf-8") + else: + filename = filename.decode("utf-8") + try: + return load(os.path.join(directory, filename)) + except IOError: + pass + raise IOError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + from io import BytesIO + import base64 + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO(base64.decodestring(b''' +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +''')), Image.open(BytesIO(base64.decodestring(b''' +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +''')))) + return f + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageGrab.py b/server/www/packages/packages-linux/x64/PIL/ImageGrab.py new file mode 100644 index 0000000..85bc474 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageGrab.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber (OS X and Windows only) +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + +import sys +if sys.platform not in ["win32", "darwin"]: + raise ImportError("ImageGrab is OS X and Windows only") + +if sys.platform == "win32": + grabber = Image.core.grabscreen +elif sys.platform == "darwin": + import os + import tempfile + import subprocess + + +def grab(bbox=None): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp('.png') + os.close(fh) + subprocess.call(['screencapture', '-x', filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + else: + size, data = grabber() + im = Image.frombytes( + "RGB", size, data, + # RGB, 32-bit line padding, origo in lower left corner + "raw", "BGR", (size[0]*3 + 3) & -4, -1 + ) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp('.jpg') + os.close(fh) + commands = [ + "set theFile to (open for access POSIX file \""+filepath+"\" with write permission)", + "try", + "write (the clipboard as JPEG picture) to theFile", + "end try", + "close access theFile" + ] + script = ["osascript"] + for command in commands: + script += ["-e", command] + subprocess.call(script) + + im = None + if os.stat(filepath).st_size != 0: + im = Image.open(filepath) + im.load() + os.unlink(filepath) + return im + else: + debug = 0 # temporary interface + data = Image.core.grabclipboard(debug) + if isinstance(data, bytes): + from PIL import BmpImagePlugin + import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) + return data diff --git a/server/www/packages/packages-linux/x64/PIL/ImageMath.py b/server/www/packages/packages-linux/x64/PIL/ImageMath.py new file mode 100644 index 0000000..c0f3820 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageMath.py @@ -0,0 +1,272 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import _imagingmath + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +VERBOSE = 0 + + +def _isconstant(v): + return isinstance(v, int) or isinstance(v, float) + + +class _Operand(object): + """Wraps an image operand, providing standard operators""" + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError("unsupported mode: %s" % im1.im.mode) + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.mode != im2.mode: + raise ValueError("mode mismatch") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), + min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, size, None) + else: + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + if bytes is str: + # Provide __nonzero__ for pre-Py3k + __nonzero__ = __bool__ + del __bool__ + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + if bytes is str: + # Provide __div__ and __rdiv__ for pre-Py3k + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + del __truediv__ + del __rtruediv__ + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = builtins.eval(expression, args) + try: + return out.im + except AttributeError: + return out + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageMode.py b/server/www/packages/packages-linux/x64/PIL/ImageMode.py new file mode 100644 index 0000000..583fd7e --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageMode.py @@ -0,0 +1,52 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# mode descriptor cache +_modes = {} + + +class ModeDescriptor(object): + """Wrapper for mode strings.""" + + def __init__(self, mode, bands, basemode, basetype): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + + def __str__(self): + return self.mode + + +def getmode(mode): + """Gets a mode descriptor for the given mode.""" + if not _modes: + # initialize mode cache + from PIL import Image + # core modes + for m, (basemode, basetype, bands) in Image._MODEINFO.items(): + _modes[m] = ModeDescriptor(m, bands, basemode, basetype) + # extra experimental modes + _modes["RGBa"] = ModeDescriptor("RGBa", ("R", "G", "B", "a"), "RGB", "L") + _modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") + _modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L") + _modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") + # mapping modes + _modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L") + _modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L") + _modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L") + return _modes[mode] + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageMorph.py b/server/www/packages/packages-linux/x64/PIL/ImageMorph.py new file mode 100644 index 0000000..902ed8d --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageMorph.py @@ -0,0 +1,251 @@ +# A binary morphology add-on for the Python Imaging Library +# +# History: +# 2014-06-04 Initial version. +# +# Copyright (c) 2014 Dov Grobgeld + +from PIL import Image +from PIL import _imagingmorph +import re + +LUT_SIZE = 1 << 9 + + +class LutBuilder(object): + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + 'corner': ['1:(... ... ...)->0', + '4:(00. 01. ...)->1'], + 'dilation4': ['4:(... .0. .1.)->1'], + 'dilation8': ['4:(... .0. .1.)->1', + '4:(... .0. ..1)->1'], + 'erosion4': ['4:(... .1. .0.)->0'], + 'erosion8': ['4:(... .1. .0.)->0', + '4:(... .1. ..0)->0'], + 'edge': ['1:(... ... ...)->0', + '4:(.0. .1. ...)->1', + '4:(01. .1. ...)->1'] + } + if op_name not in known_patterns: + raise Exception('Unknown pattern '+op_name+'!') + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray([symbols[(i & m) > 0] for i in range(LUT_SIZE)]) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert(len(permutation) == 9) + return ''.join([pattern[p] for p in permutation]) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if '4' in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], [6, 3, 0, + 7, 4, 1, + 8, 5, 2]), res)) + # mirror + if 'M' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + patterns.append( + (self._string_permute(pattern, [2, 1, 0, + 5, 4, 3, + 8, 7, 6]), res)) + + # negate + if 'N' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + # Swap 0 and 1 + pattern = (pattern + .replace('0', 'Z') + .replace('1', '0') + .replace('Z', '1')) + res = '%d' % (1-int(res)) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search( + r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', '')) + if not m: + raise Exception('Syntax error in pattern "'+p+'"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(' ', '').replace('\n', '') + + patterns += self._pattern_permute(pattern, options, result) + +# # Debugging +# for p,r in patterns: +# print p,r +# print '--' + + # compile the patterns into regular expressions for speed + for i in range(len(patterns)): + p = patterns[i][0].replace('.', 'X').replace('X', '[01]') + p = re.compile(p) + patterns[i] = (p, patterns[i][1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp(object): + """A class for binary morphological operators""" + + def __init__(self, + lut=None, + op_name=None, + patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply( + bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, 'rb') as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != 8192: + self.lut = None + raise Exception('Wrong size operator file!') + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception('No operator loaded') + with open(filename, 'wb') as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageOps.py b/server/www/packages/packages-linux/x64/PIL/ImageOps.py new file mode 100644 index 0000000..f317645 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageOps.py @@ -0,0 +1,461 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isStringType +import operator +import functools + + +# +# helpers + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isStringType(color): + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise IOError("not supported for this image mode") + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image, removes **cutoff** percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: How many percent to cut off from the histogram. + :param ignore: The background pixel value (use None for no background). + :return: An image. + """ + histogram = image.histogram() + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer:layer+256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the hi end + cut = n * cutoff // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white): + """ + Colorize grayscale image. The **black** and **white** + arguments should be RGB tuples; this function calculates a color + wedge mapping all black pixels in the source image to the first + color, and all white pixels to the second color. + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :return: An image. + """ + assert image.mode == "L" + black = _color(black, "RGB") + white = _color(white, "RGB") + red = [] + green = [] + blue = [] + for i in range(256): + red.append(black[0]+i*(white[0]-black[0])//255) + green.append(black[1]+i*(white[1]-black[1])//255) + blue.append(black[2]+i*(white[2]-black[2])//255) + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop( + (left, top, image.size[0]-right, image.size[1]-bottom) + ) + + +def deform(image, deformer, resample=Image.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + **getmesh** method can be used. + :param resample: What resampling filter to use. + :return: An image. + """ + return image.transform( + image.size, Image.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b:b+256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i+b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + out = Image.new(image.mode, (width, height), _color(fill, image.mode)) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a sized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.NEAREST`. + :param bleed: Remove a border around the outside of the image (from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # http://www.cazabon.com + + # ensure inputs are valid + if not isinstance(centering, list): + centering = [centering[0], centering[1]] + + if centering[0] > 1.0 or centering[0] < 0.0: + centering[0] = 0.50 + if centering[1] > 1.0 or centering[1] < 0.0: + centering[1] = 0.50 + + if bleed > 0.49999 or bleed < 0.0: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleedPixels = ( + int((float(bleed) * float(image.size[0])) + 0.5), + int((float(bleed) * float(image.size[1])) + 0.5) + ) + + liveArea = (0, 0, image.size[0], image.size[1]) + if bleed > 0.0: + liveArea = ( + bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1, + image.size[1] - bleedPixels[1] - 1 + ) + + liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1]) + + # calculate the aspect ratio of the liveArea + liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1]) + + # calculate the aspect ratio of the output image + aspectRatio = float(size[0]) / float(size[1]) + + # figure out if the sides or top/bottom will be cropped off + if liveAreaAspectRatio >= aspectRatio: + # liveArea is wider than what's needed, crop the sides + cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5) + cropHeight = liveSize[1] + else: + # liveArea is taller than what's needed, crop the top and bottom + cropWidth = liveSize[0] + cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5) + + # make the crop + leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0])) + if leftSide < 0: + leftSide = 0 + topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1])) + if topSide < 0: + topSide = 0 + + out = image.crop( + (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight) + ) + + # resize the image and return it + return out.resize(size, method) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255-i) + return _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2**(8-bits)-1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255-i) + return _lut(image, lut) + + +# -------------------------------------------------------------------- +# PIL USM components, from Kevin Cazabon. + +def gaussian_blur(im, radius=None): + """ PIL_usm.gblur(im, [radius])""" + + if radius is None: + radius = 5.0 + + im.load() + + return im.im.gaussian_blur(radius) + +gblur = gaussian_blur + + +def unsharp_mask(im, radius=None, percent=None, threshold=None): + """ PIL_usm.usm(im, [radius, percent, threshold])""" + + if radius is None: + radius = 5.0 + if percent is None: + percent = 150 + if threshold is None: + threshold = 3 + + im.load() + + return im.im.unsharp_mask(radius, percent, threshold) + +usm = unsharp_mask + + +def box_blur(image, radius): + """ + Blur the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param image: The image to blur. + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + :return: An image. + """ + image.load() + + return image._new(image.im.box_blur(radius)) diff --git a/server/www/packages/packages-linux/x64/PIL/ImagePalette.py b/server/www/packages/packages-linux/x64/PIL/ImagePalette.py new file mode 100644 index 0000000..3b60068 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImagePalette.py @@ -0,0 +1,219 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array +from PIL import ImageColor +from PIL import GimpPaletteFile +from PIL import GimpGradientFile +from PIL import PaletteFile + + +class ImagePalette(object): + """ + Color palette for palette mapped images + + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or bytearray(range(256))*len(self.mode) + self.colors = {} + self.dirty = None + if ((size == 0 and len(self.mode)*256 != len(self.palette)) or + (size != 0 and size != len(self.palette))): + raise ValueError("wrong palette size") + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable # for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode + ";L", self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + if hasattr(arr, 'tobytes'): + return arr.tobytes() + return arr.tostring() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + try: + return self.colors[color] + except KeyError: + # allocate new color slot + if isinstance(self.palette, bytes): + self.palette = bytearray(self.palette) + index = len(self.colors) + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") + self.colors[color] = index + self.palette[index] = color[0] + self.palette[index+256] = color[1] + self.palette[index+512] = color[2] + self.dirty = 1 + return index + else: + raise ValueError("unknown color specifier: %r" % color) + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write("# Mode: %s\n" % self.mode) + for i in range(256): + fp.write("%d" % i) + for j in range(i*len(self.mode), (i+1)*len(self.mode)): + try: + fp.write(" %d" % self.palette[j]) + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white*i//255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256)) + palette.reverse() + return ImagePalette(mode, palette * len(mode)) + + +def random(mode="RGB"): + from random import randint + palette = [] + for i in range(256*len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + r, g, b = ImageColor.getrgb(white) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) + return ImagePalette("RGB", r + g + b) + + +def wedge(mode="RGB"): + return ImagePalette(mode, list(range(256)) * len(mode)) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + fp = open(filename, "rb") + + for paletteHandler in [ + GimpPaletteFile.GimpPaletteFile, + GimpGradientFile.GimpGradientFile, + PaletteFile.PaletteFile + ]: + try: + fp.seek(0) + lut = paletteHandler(fp).getpalette() + if lut: + break + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + else: + raise IOError("cannot load palette") + + return lut # data, rawmode diff --git a/server/www/packages/packages-linux/x64/PIL/ImagePath.py b/server/www/packages/packages-linux/x64/PIL/ImagePath.py new file mode 100644 index 0000000..b308749 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImagePath.py @@ -0,0 +1,62 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +# the Python class below is overridden by the C implementation. + + +class Path(object): + + def __init__(self, xy): + pass + + def compact(self, distance=2): + """ + Compacts the path, by removing points that are close to each other. + This method modifies the path in place. + """ + pass + + def getbbox(self): + """Gets the bounding box.""" + pass + + def map(self, function): + """Maps the path through a function.""" + pass + + def tolist(self, flat=0): + """ + Converts the path to Python list. + # + @param flat By default, this function returns a list of 2-tuples + [(x, y), ...]. If this argument is true, it returns a flat list + [x, y, ...] instead. + @return A list of coordinates. + """ + pass + + def transform(self, matrix): + """Transforms the path.""" + pass + + +# override with C implementation +Path = Image.core.path + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageQt.py b/server/www/packages/packages-linux/x64/PIL/ImageQt.py new file mode 100644 index 0000000..4eb3654 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageQt.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +from io import BytesIO + +qt_is_installed = True +qt_version = None +try: + from PyQt5.QtGui import QImage, qRgba, QPixmap + from PyQt5.QtCore import QBuffer, QIODevice + qt_version = '5' +except (ImportError, RuntimeError): + try: + from PyQt4.QtGui import QImage, qRgba, QPixmap + from PyQt4.QtCore import QBuffer, QIODevice + qt_version = '4' + except (ImportError, RuntimeError): + try: + from PySide.QtGui import QImage, qRgba, QPixmap + from PySide.QtCore import QBuffer, QIODevice + qt_version = 'side' + except ImportError: + qt_is_installed = False + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return (qRgba(r, g, b, a) & 0xffffffff) + + +# :param im A PIL Image object, or a file name +# (given either as Python string or a PyQt string object) + +def fromqimage(im): + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, 'png') + else: + im.save(buffer, 'ppm') + + b = BytesIO() + try: + b.write(buffer.data()) + except TypeError: + # workaround for Python 2 + b.write(str(buffer.data())) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = { + '1': 1, + 'L': 8, + 'P': 8, + }[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding) + + return b''.join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + if str is bytes: + im = unicode(im.toUtf8(), "utf-8") + else: + im = str(im.toUtf8(), "utf-8") + if isPath(im): + im = Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i:i+3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + try: + data = im.tobytes("raw", "BGRA") + except SystemError: + # workaround for earlier versions + r, g, b, a = im.split() + im = Image.merge("RGBA", (b, g, r, a)) + format = QImage.Format_ARGB32 + else: + raise ValueError("unsupported image mode %r" % im.mode) + + # must keep a reference, or Qt will crash! + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return { + 'data': __data, 'im': im, 'format': format, 'colortable': colortable + } + +## +# An PIL image wrapper for Qt. This is a subclass of PyQt's QImage +# class. +# +# @param im A PIL Image object, or a file name (given either as Python +# string or a PyQt string object). + +if qt_is_installed: + class ImageQt(QImage): + + def __init__(self, im): + im_data = _toqclass_helper(im) + QImage.__init__(self, + im_data['data'], im_data['im'].size[0], + im_data['im'].size[1], im_data['format']) + if im_data['colortable']: + self.setColorTable(im_data['colortable']) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == 'RGB': + im = im.convert('RGBA') + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/server/www/packages/packages-linux/x64/PIL/ImageSequence.py b/server/www/packages/packages-linux/x64/PIL/ImageSequence.py new file mode 100644 index 0000000..1fc6e5d --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageSequence.py @@ -0,0 +1,56 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator(object): + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + self.position = 0 + + def __getitem__(self, ix): + try: + self.im.seek(ix) + return self.im + except EOFError: + raise IndexError # end of sequence + + def __iter__(self): + return self + + def __next__(self): + try: + self.im.seek(self.position) + self.position += 1 + return self.im + except EOFError: + raise StopIteration + + def next(self): + return self.__next__() diff --git a/server/www/packages/packages-linux/x64/PIL/ImageShow.py b/server/www/packages/packages-linux/x64/PIL/ImageShow.py new file mode 100644 index 0000000..c18ff22 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageShow.py @@ -0,0 +1,178 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image +import os +import sys + +if sys.version_info >= (3, 3): + from shlex import quote +else: + from pipes import quote + +_viewers = [] + + +def register(viewer, order=1): + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + elif order < 0: + _viewers.insert(0, viewer) + + +def show(image, title=None, **options): + """ + Display a given image. + + @param image An image object. + @param title Optional title. Not all viewers can display the title. + @param **options Additional viewer options. + @return True if a suitable viewer was found, false otherwise. + """ + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return 1 + return 0 + + +class Viewer(object): + """Base class for viewers.""" + + # main api + + def show(self, image, **options): + + # save temporary image to disk + if image.mode[:4] == "I;16": + # @PIL88 @PIL101 + # "I;16" isn't an 'official' mode, but we still want to + # provide a simple way to show 16-bit images. + base = "L" + # FIXME: auto-contrast if max() > 255? + else: + base = Image.getmodebase(image.mode) + if base != image.mode and image.mode != "1": + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + + def get_format(self, image): + """Return format name, or None to save as PGM/PPM""" + return self.format + + def get_command(self, file, **options): + raise NotImplementedError + + def save_image(self, image): + """Save to temporary file, and return filename""" + return image._dump(format=self.get_format(image)) + + def show_image(self, image, **options): + """Display given image""" + return self.show_file(self.save_image(image), **options) + + def show_file(self, file, **options): + """Display given file""" + os.system(self.get_command(file, **options)) + return 1 + +# -------------------------------------------------------------------- + +if sys.platform == "win32": + + class WindowsViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + return ('start "Pillow" /WAIT "%s" ' + '&& ping -n 2 127.0.0.1 >NUL ' + '&& del /f "%s"' % (file, file)) + + register(WindowsViewer) + +elif sys.platform == "darwin": + + class MacViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a /Applications/Preview.app" + command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), + quote(file)) + return command + + register(MacViewer) + +else: + + # unixoids + + def which(executable): + path = os.environ.get("PATH") + if not path: + return None + for dirname in path.split(os.pathsep): + filename = os.path.join(dirname, executable) + if os.path.isfile(filename) and os.access(filename, os.X_OK): + return filename + return None + + class UnixViewer(Viewer): + def show_file(self, file, **options): + command, executable = self.get_command_ex(file, **options) + command = "(%s %s; rm -f %s)&" % (command, quote(file), + quote(file)) + os.system(command) + return 1 + + # implementations + + class DisplayViewer(UnixViewer): + def get_command_ex(self, file, **options): + command = executable = "display" + return command, executable + + if which("display"): + register(DisplayViewer) + + class XVViewer(UnixViewer): + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += " -name %s" % quote(title) + return command, executable + + if which("xv"): + register(XVViewer) + +if __name__ == "__main__": + # usage: python ImageShow.py imagefile [title] + print(show(Image.open(sys.argv[1]), *sys.argv[2:])) + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageStat.py b/server/www/packages/packages-linux/x64/PIL/ImageStat.py new file mode 100644 index 0000000..f3c138b --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageStat.py @@ -0,0 +1,147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import math +import operator +import functools + + +class Stat(object): + + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + "Calculate missing attribute" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + "Get min/max values for each band in the image" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + "Get total number of pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i:i+256])) + return v + + def _getsum(self): + "Get sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + layerSum = 0.0 + for j in range(256): + layerSum += j * self.h[i + j] + v.append(layerSum) + return v + + def _getsum2(self): + "Get squared sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j ** 2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + "Get average pixel level for each layer" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + "Get median pixel level for each layer" + + v = [] + for i in self.bands: + s = 0 + l = self.count[i]//2 + b = i * 256 + for j in range(256): + s = s + self.h[b+j] + if s > l: + break + v.append(j) + return v + + def _getrms(self): + "Get RMS for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + "Get variance for each layer" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n) + return v + + def _getstddev(self): + "Get standard deviation for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + +Global = Stat # compatibility diff --git a/server/www/packages/packages-linux/x64/PIL/ImageTk.py b/server/www/packages/packages-linux/x64/PIL/ImageTk.py new file mode 100644 index 0000000..6d47130 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageTk.py @@ -0,0 +1,290 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +try: + import tkinter +except ImportError: + import Tkinter + tkinter = Tkinter + del Tkinter + +from PIL import Image +from io import BytesIO + + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data="PIL:%d" % im.im.id) + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +def _get_image_from_kw(kw): + source = None + if "file" in kw: + source = kw.pop("file") + elif "data" in kw: + source = BytesIO(kw.pop("data")) + if source: + return Image.open(source) + + +# -------------------------------------------------------------------- +# PhotoImage + +class PhotoImage(object): + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the **file** or **data** options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and lower pixel + coordinate. If None is given instead of a tuple, all of + the image is assumed. + """ + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + tk = self.__photo.tk + + try: + tk.call("PyImagingPhoto", self.__photo, block.id) + except tkinter.TclError: + # activate Tkinter hook + try: + from PIL import _imagingtk + try: + _imagingtk.tkinit(tk.interpaddr(), 1) + except AttributeError: + _imagingtk.tkinit(id(tk), 0) + tk.call("PyImagingPhoto", self.__photo, block.id) + except (ImportError, AttributeError, tkinter.TclError): + raise # configuration problem; cannot attach to Tkinter + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage(object): + """ + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is **foreground**, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = "PIL:%d" % image.im.id + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """Copies the contents of a PhotoImage to a PIL image memory.""" + photo.tk.call("PyImagingPhotoGet", photo) + + +def _show(image, title): + """Helper for the Image.show method.""" + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + tkinter.Label.__init__(self, master, image=self.image, + bg="black", bd=0) + + if not tkinter._default_root: + raise IOError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageTransform.py b/server/www/packages/packages-linux/x64/PIL/ImageTransform.py new file mode 100644 index 0000000..9f48833 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageTransform.py @@ -0,0 +1,100 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +class AffineTransform(Transform): + """ + Define an affine image transform. + + This function takes a 6-tuple (a, b, c, d, e, f) which contain the first + two rows from an affine transform matrix. For each pixel (x, y) in the + output image, the new value is taken from a position (a x + b y + c, + d x + e y + f) in the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + @def AffineTransform(matrix) + @param matrix A 6-tuple (a, b, c, d, e, f) containing the first two rows + from an affine transform matrix. + @see Image#Image.transform + """ + method = Image.AFFINE + + +class ExtentTransform(Transform): + """ + Define a transform to extract a subregion from an image. + + Maps a rectangle (defined by two corners) from the image to a rectangle of + the given size. The resulting image will contain data sampled from between + the corners, such that (x0, y0) in the input image will end up at (0,0) in + the output image, and (x1, y1) at size. + + This method can be used to crop, stretch, shrink, or mirror an arbitrary + rectangle in the current image. It is slightly slower than crop, but about + as fast as a corresponding resize operation. + + @def ExtentTransform(bbox) + @param bbox A 4-tuple (x0, y0, x1, y1) which specifies two points in the + input image's coordinate system. + @see Image#Image.transform + """ + method = Image.EXTENT + + +class QuadTransform(Transform): + """ + Define a quad image transform. + + Maps a quadrilateral (a region defined by four corners) from the image to a + rectangle of the given size. + + @def QuadTransform(xy) + @param xy An 8-tuple (x0, y0, x1, y1, x2, y2, y3, y3) which contain the + upper left, lower left, lower right, and upper right corner of the + source quadrilateral. + @see Image#Image.transform + """ + method = Image.QUAD + + +class MeshTransform(Transform): + """ + Define a mesh image transform. A mesh transform consists of one or more + individual quad transforms. + + @def MeshTransform(data) + @param data A list of (bbox, quad) tuples. + @see Image#Image.transform + """ + method = Image.MESH + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImageWin.py b/server/www/packages/packages-linux/x64/PIL/ImageWin.py new file mode 100644 index 0000000..1e408da --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImageWin.py @@ -0,0 +1,237 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class HDC(object): + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND(object): + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib(object): + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + **palette** method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use the + :py:meth:`CDC.GetHandleAttrib` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. If None is given instead of a + tuple, all of the image is assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from tobytes) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + def fromstring(self, *args, **kw): + raise NotImplementedError("fromstring() has been removed. " + + "Please use frombytes() instead.") + + def tostring(self, *args, **kw): + raise NotImplementedError("tostring() has been removed. " + + "Please use tobytes() instead.") + + +class Window(object): + """Create a Window with the given title size.""" + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +class ImageWindow(Window): + """Create an image window which displays the given image.""" + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + Window.__init__(self, title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..63e8924 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/ImtImagePlugin.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from PIL import Image, ImageFile + +__version__ = "0.2" + + +# +# -------------------------------------------------------------------- + +field = re.compile(br"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + xsize = ysize = 0 + + while True: + + s = self.fp.read(1) + if not s: + break + + if s == b'\x0C': + + # image data begins + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), + (self.mode, 0, 1))] + + break + + else: + + # read key/value pair + # FIXME: dangerous, may read whole file + s = s + self.fp.readline() + if len(s) == 1 or len(s) > 100: + break + if s[0] == b"*": + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == "width": + xsize = int(v) + self.size = xsize, ysize + elif k == "height": + ysize = int(v) + self.size = xsize, ysize + elif k == "pixel" and v == "n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..56d1de4 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/IptcImagePlugin.py @@ -0,0 +1,267 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image, ImageFile, _binary +import os +import tempfile + +__version__ = "0.3" + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be +o8 = _binary.o8 + +COMPRESSION = { + 1: "raw", + 5: "jpeg" +} + +PAD = o8(0) * 4 + + +# +# Helpers + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=' ') + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = i8(s[1]), i8(s[2]) + + # syntax + if i8(s[0]) != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = i8(s[3]) + if size > 132: + raise IOError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size-128)) + else: + size = i16(s[3:]) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in list(self.info.keys()): + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # print tag, self.info[tag] + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0])-1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self.size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError: + raise IOError("Unknown IPTC image compression") + + # tile + if tag == (8, 10): + self.tile = [("iptc", (compression, offset), + (0, 0, self.size[0], self.size[1]))] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + try: + # fast + self.im = Image.core.open_ppm(outfile) + except: + # slightly slower + im = Image.open(outfile) + im.load() + self.im = im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +## +# Get IPTC information from TIFF, JPEG, or IPTC file. +# +# @param im An image containing IPTC data. +# @return A dictionary containing IPTC information, or None if +# no IPTC information block was found. + +def getiptcinfo(im): + + from PIL import TiffImagePlugin, JpegImagePlugin + import io + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + try: + app = im.app["APP13"] + if app[:14] == b"Photoshop 3.0\x00": + app = app[14:] + # parse the image resource block + offset = 0 + while app[offset:offset+4] == b"8BIM": + offset += 4 + # resource code + code = i16(app, offset) + offset += 2 + # resource name (usually empty) + name_len = i8(app[offset]) + # name = app[offset+1:offset+1+name_len] + offset = 1 + offset + name_len + if offset & 1: + offset += 1 + # resource data block + size = i32(app, offset) + offset += 4 + if code == 0x0404: + # 0x0404 contains IPTC/NAA data + data = app[offset:offset+size] + break + offset = offset + size + if offset & 1: + offset += 1 + except (AttributeError, KeyError): + pass + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage(object): + pass + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..02b1e53 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,280 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +from PIL import Image, ImageFile +import struct +import os +import io + +__version__ = "0.1" + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack('>H', hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \ + xtosiz, ytosiz, csiz \ + = struct.unpack('>HHIIIIIIIIH', siz[:38]) + ssiz = [None]*csiz + xrsiz = [None]*csiz + yrsiz = [None]*csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] \ + = struct.unpack('>BBB', siz[36 + 3 * i:39 + 3 * i]) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7f) > 8: + mode = 'I;16' + else: + mode = 'L' + elif csiz == 2: + mode = 'LA' + elif csiz == 3: + mode = 'RGB' + elif csiz == 4: + mode = 'RGBA' + else: + mode = None + + return (size, mode) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count and + color space information, returning a PIL (size, mode) tuple.""" + + # Find the JP2 header box + header = None + while True: + lbox, tbox = struct.unpack('>I4s', fp.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', fp.read(8))[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen: + raise SyntaxError('Invalid JP2 header length') + + if tbox == b'jp2h': + header = fp.read(lbox - hlen) + break + else: + fp.seek(lbox - hlen, os.SEEK_CUR) + + if header is None: + raise SyntaxError('could not find JP2 header') + + size = None + mode = None + bpc = None + nc = None + + hio = io.BytesIO(header) + while True: + lbox, tbox = struct.unpack('>I4s', hio.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', hio.read(8))[0] + hlen = 16 + else: + hlen = 8 + + content = hio.read(lbox - hlen) + + if tbox == b'ihdr': + height, width, nc, bpc, c, unkc, ipr \ + = struct.unpack('>IIHBBBB', content) + size = (width, height) + if unkc: + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif tbox == b'colr': + meth, prec, approx = struct.unpack('>BBB', content[:3]) + if meth == 1: + cs = struct.unpack('>I', content[3:7])[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif cs == 17: # grayscale + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + break + elif cs == 18: # sYCC + if nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + + if size is None or mode is None: + raise SyntaxError("Malformed jp2 header") + + return (size, mode) + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b'\xff\x4f\xff\x51': + self.codec = "j2k" + self.size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + self.codec = "jp2" + self.size, self.mode = _parse_jp2_header(self.fp) + else: + raise SyntaxError('not a JPEG 2000 file') + + if self.size is None or self.mode is None: + raise SyntaxError('unable to determine size/mode') + + self.reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, 2) + length = self.fp.tell() + self.fp.seek(pos, 0) + except: + length = -1 + + self.tile = [('jpeg2k', (0, 0) + self.size, 0, + (self.codec, self.reduce, self.layers, fd, length, self.fp))] + + def load(self): + if self.reduce: + power = 1 << self.reduce + adjust = power >> 1 + self.size = (int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power)) + + if self.tile: + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return (prefix[:4] == b'\xff\x4f\xff\x51' or + prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + + +# ------------------------------------------------------------ +# Save support + +def _save(im, fp, filename): + if filename.endswith('.j2k'): + kind = 'j2k' + else: + kind = 'jp2' + + # Get the keyword arguments + info = im.encoderinfo + + offset = info.get('offset', None) + tile_offset = info.get('tile_offset', None) + tile_size = info.get('tile_size', None) + quality_mode = info.get('quality_mode', 'rates') + quality_layers = info.get('quality_layers', None) + num_resolutions = info.get('num_resolutions', 0) + cblk_size = info.get('codeblock_size', None) + precinct_size = info.get('precinct_size', None) + irreversible = info.get('irreversible', False) + progression = info.get('progression', 'LRCP') + cinema_mode = info.get('cinema_mode', 'no') + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + fd + ) + + ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)]) + +# ------------------------------------------------------------ +# Registry stuff + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extension(Jpeg2KImageFile.format, '.jp2') +Image.register_extension(Jpeg2KImageFile.format, '.j2k') +Image.register_extension(Jpeg2KImageFile.format, '.jpc') +Image.register_extension(Jpeg2KImageFile.format, '.jpf') +Image.register_extension(Jpeg2KImageFile.format, '.jpx') +Image.register_extension(Jpeg2KImageFile.format, '.j2c') + +Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') +Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') diff --git a/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..9d4eaab --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/JpegImagePlugin.py @@ -0,0 +1,755 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import array +import struct +import io +import warnings +from struct import unpack_from +from PIL import Image, ImageFile, TiffImagePlugin, _binary +from PIL.JpegPresets import presets +from PIL._util import isStringType + +i8 = _binary.i8 +o8 = _binary.o8 +i16 = _binary.i16be +i32 = _binary.i32be + +__version__ = "0.6" + + +# +# Parser + +def Skip(self, marker): + n = i16(self.fp.read(2))-2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = i8(s[7]) + jfif_density = i16(s, 8), i16(s, 10) + except: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + # extract Exif information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = i8(s[1]) + except: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + self.size = i16(s[3:]), i16(s[1:]) + + self.bits = i8(s[0]) + if self.bits != 8: + raise SyntaxError("cannot handle %d-bit layers" % self.bits) + + self.layers = i8(s[5]) + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError("cannot handle %d-layer images" % self.layers) + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if i8(self.icclist[0][13]) == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = None + + for i in range(6, len(s), 3): + t = s[i:i+3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2]))) + + +def DQT(self, marker): + # + # Define quantization table. Support baseline 8-bit tables + # only. Note that there might be more than one table in + # each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + if len(s) < 65: + raise SyntaxError("bad quantization table marker") + v = i8(s[0]) + if v//16 == 0: + self.quantization[v & 15] = array.array("B", s[1:65]) + s = s[65:] + else: + return # FIXME: add code to read 16-bit tables! + # raise SyntaxError, "bad quantization table element size" + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM) +} + + +def _accept(prefix): + return prefix[0:1] == b"\377" + + +## +# Image plugin for JPEG and JFIF images. + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(1) + + if i8(s) != 255: + raise SyntaxError("not a JPEG file") + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = i8(s) + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + # print hex(i), name, description + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, + (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) + s = self.fp.read(1) + else: + raise SyntaxError("no marker found") + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + d, e, o, a = self.tile[0] + scale = 0 + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = max(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1] + self.size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + return self + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + import subprocess + import tempfile + import os + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + self.im = Image.core.open_ppm(path) + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self.size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + +def _fixup_dict(src_dict): + # Helper function for _getexif() + # returns a dict with any single item tuples/lists as individual values + def _fixup(value): + try: + if len(value) == 1 and not isinstance(value, dict): + return value[0] + except: pass + return value + + return dict([(k, _fixup(v)) for k, v in src_dict.items()]) + + +def _getexif(self): + # Extract EXIF information. This method is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + try: + data = self.info["exif"] + except KeyError: + return None + file = io.BytesIO(data[6:]) + head = file.read(8) + # process dictionary + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif = dict(_fixup_dict(info)) + # get exif extension + try: + # exif field 0x8769 is an offset pointer to the location + # of the nested embedded exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8769]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif.update(_fixup_dict(info)) + # get gpsinfo extension + try: + # exif field 0x8825 is an offset pointer to the location + # of the nested embedded gps exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8825]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif[0x8825] = _fixup_dict(info) + + return exif + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plug-in. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + info.load(file_contents) + mp = dict(info) + except: + raise SyntaxError("malformed MP Index (unreadable directory)") + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError: + raise SyntaxError("malformed MP Index (no number of images)") + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = unpack_from( + '{0}LLLHH'.format(endianness), rawmpentries, entrynum * 16) + labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', + 'EntryNo2') + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + 'DependentParentImageFlag': bool(mpentry['Attribute'] & + (1 << 31)), + 'DependentChildImageFlag': bool(mpentry['Attribute'] & + (1 << 30)), + 'RepresentativeImageFlag': bool(mpentry['Attribute'] & + (1 << 29)), + 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, + 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, + 'MPType': mpentry['Attribute'] & 0x00FFFFFF + } + if mpentryattr['ImageDataFormat'] == 0: + mpentryattr['ImageDataFormat'] = 'JPEG' + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: 'Undefined', + 0x010001: 'Large Thumbnail (VGA Equivalent)', + 0x010002: 'Large Thumbnail (Full HD Equivalent)', + 0x020001: 'Multi-Frame Image (Panorama)', + 0x020002: 'Multi-Frame Image: (Disparity)', + 0x020003: 'Multi-Frame Image: (Multi-Angle)', + 0x030000: 'Baseline MP Primary Image' + } + mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], + 'Unknown') + mpentry['Attribute'] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError: + raise SyntaxError("malformed MP Index (bad MP Entry)") + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBA": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63) + +samplings = {(1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, + } + + +def convert_dict_qtables(qtables): + qtables = [qtables[key] for key in range(len(qtables)) if key in qtables] + for idx, table in enumerate(qtables): + qtables[idx] = [table[i] for i in zigzag_index] + return qtables + + +def get_sampling(im): + # There's no subsampling when image have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, 'layers') or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + + try: + rawmode = RAWMODE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as JPEG" % im.mode) + + info = im.encoderinfo + + dpi = info.get("dpi", (0, 0)) + + quality = info.get("quality", 0) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = 0 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = 0 + subsampling = preset.get('subsampling', -1) + qtables = preset.get('quantization') + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get('subsampling', -1) + if isStringType(qtables) and qtables in presets: + qtables = presets[qtables].get('quantization') + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:1:1": + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isStringType(qtables): + try: + lines = [int(num) for line in qtables.splitlines() + for num in line.split('#', 1)[0].split()] + except ValueError: + raise ValueError("Invalid quantization table") + else: + qtables = [lines[s:s+64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = convert_dict_qtables(qtables) + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise + table = array.array('B', table) + except TypeError: + raise ValueError("Invalid quantization table") + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = b"" + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + + o8(len(markers)) + marker) + i += 1 + + # get keyword arguments + im.encoderconfig = ( + quality, + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + "progressive" in info or "progression" in info, + info.get("smooth", 0), + "optimize" in info, + info.get("streamtype", 0), + dpi[0], dpi[1], + subsampling, + qtables, + extra, + info.get("exif", b"") + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pizel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if "optimize" in info or "progressive" in info or "progression" in info: + # keep sets quality to 0, but the actual value may be high. + if quality >= 95 or quality == 0: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The exif info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5) + + ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + import os + import subprocess + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + im = MpoImageFile(fp, filename) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn("Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file") + return im + + +# -------------------------------------------------------------------q- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extension(JpegImageFile.format, ".jfif") +Image.register_extension(JpegImageFile.format, ".jpe") +Image.register_extension(JpegImageFile.format, ".jpg") +Image.register_extension(JpegImageFile.format, ".jpeg") + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/server/www/packages/packages-linux/x64/PIL/JpegPresets.py b/server/www/packages/packages-linux/x64/PIL/JpegPresets.py new file mode 100644 index 0000000..ece33bb --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/JpegPresets.py @@ -0,0 +1,241 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. + +More presets can be added to the presets dict if needed. + +Can be use when saving JPEG file. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:1:1 (or 4:2:0?). + +You can get the subsampling of a JPEG with the +`JpegImagePlugin.get_subsampling(im)` function. + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The tables format between im.quantization and quantization in presets differ in +3 ways: + +1. The base container of the preset is a list with sublists instead of dict. + dict[0] -> list[0], dict[1] -> list[1], ... +2. Each table in a preset is a list instead of an array. +3. The zigzag order is remove in the preset (needed by libjpeg >= 6a). + +You can convert the dict format to the preset format with the +`JpegImagePlugin.convert_dict_qtables(dict_qtables)` function. + +Libjpeg ref.: http://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +presets = { + 'web_low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} diff --git a/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..b753603 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/McIdasImagePlugin.py @@ -0,0 +1,74 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct +from PIL import Image, ImageFile + +__version__ = "0.2" + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self.size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10]*w[11]*w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py new file mode 100644 index 0000000..3c91244 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/MicImagePlugin.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, TiffImagePlugin +from PIL.OleFileIO import MAGIC, OleFileIO + +__version__ = "0.1" + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an MIC file; invalid OLE file") + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.__fp = self.fp + self.frame = 0 + + if len(self.images) > 1: + self.category = Image.CONTAINER + + self.seek(0) + + @property + def n_frames(self): + return len(self.images) + + @property + def is_animated(self): + return len(self.images) > 1 + + def seek(self, frame): + + try: + filename = self.images[frame] + except IndexError: + raise EOFError("no such frame") + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..6671b86 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/MpegImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL._binary import i8 + +__version__ = "0.1" + + +# +# Bitstream parser + +class BitStream(object): + + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self.size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extension(MpegImageFile.format, ".mpg") +Image.register_extension(MpegImageFile.format, ".mpeg") + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..1d26021 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/MpoImagePlugin.py @@ -0,0 +1,99 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, JpegImagePlugin + +__version__ = "0.1" + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self.mpinfo = self._getmp() + self.__framecount = self.mpinfo[0xB001] + self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] + for mpent in self.mpinfo[0xB002]] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.__framecount == len(self.__mpoffsets) + del self.info['mpoffset'] # no longer needed + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if frame < 0 or frame >= self.__framecount: + raise EOFError("no more images in MPO file") + else: + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + self.tile = [ + ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) + ] + self.__frame = frame + + def tell(self): + return self.__frame + + +# -------------------------------------------------------------------q- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py new file mode 100644 index 0000000..85f8e76 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/MspImagePlugin.py @@ -0,0 +1,104 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + + +# +# read MSP files + +i16 = _binary.i16le + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if s[:4] not in [b"DanM", b"LinS"]: + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s[i:i+2]) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self.size = i16(s[4:]), i16(s[6:]) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] + else: + self.tile = [("msp", (0, 0)+self.size, 32+2*self.size[1], None)] + +# +# write MSP files (uncompressed only) + +o16 = _binary.o16le + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as MSP" % im.mode) + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/server/www/packages/packages-linux/x64/PIL/OleFileIO-README.md b/server/www/packages/packages-linux/x64/PIL/OleFileIO-README.md new file mode 100644 index 0000000..eb6c9bc --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/OleFileIO-README.md @@ -0,0 +1,180 @@ +olefile (formerly OleFileIO_PL) +=============================== + +[olefile](http://www.decalage.info/olefile) is a Python package to parse, read and write +[Microsoft OLE2 files](http://en.wikipedia.org/wiki/Compound_File_Binary_Format) +(also called Structured Storage, Compound File Binary Format or Compound Document File Format), +such as Microsoft Office 97-2003 documents, vbaProject.bin in MS Office 2007+ files, Image Composer +and FlashPix files, Outlook messages, StickyNotes, several Microscopy file formats, McAfee antivirus quarantine files, +etc. + + +**Quick links:** [Home page](http://www.decalage.info/olefile) - +[Download/Install](https://bitbucket.org/decalage/olefileio_pl/wiki/Install) - +[Documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) - +[Report Issues/Suggestions/Questions](https://bitbucket.org/decalage/olefileio_pl/issues?status=new&status=open) - +[Contact the author](http://decalage.info/contact) - +[Repository](https://bitbucket.org/decalage/olefileio_pl) - +[Updates on Twitter](https://twitter.com/decalage2) + + +News +---- + +Follow all updates and news on Twitter: + +- **2015-01-25 v0.42**: improved handling of special characters in stream/storage names on Python 2.x (using UTF-8 + instead of Latin-1), fixed bug in listdir with empty storages. +- 2014-11-25 v0.41: OleFileIO.open and isOleFile now support OLE files stored in byte strings, fixed installer for + python 3, added support for Jython (Niko Ehrenfeuchter) +- 2014-10-01 v0.40: renamed OleFileIO_PL to olefile, added initial write support for streams >4K, updated doc and + license, improved the setup script. +- 2014-07-27 v0.31: fixed support for large files with 4K sectors, thanks to Niko Ehrenfeuchter, Martijn Berger and + Dave Jones. Added test scripts from Pillow (by hugovk). Fixed setup for Python 3 (Martin Panter) +- 2014-02-04 v0.30: now compatible with Python 3.x, thanks to Martin Panter who did most of the hard work. +- 2013-07-24 v0.26: added methods to parse stream/storage timestamps, improved listdir to include storages, fixed + parsing of direntry timestamps +- 2013-05-27 v0.25: improved metadata extraction, properties parsing and exception handling, fixed + [issue #12](https://bitbucket.org/decalage/olefileio_pl/issue/12/error-when-converting-timestamps-in-ole) +- 2013-05-07 v0.24: new features to extract metadata (get\_metadata method and OleMetadata class), improved + getproperties to convert timestamps to Python datetime +- 2012-10-09: published [python-oletools](http://www.decalage.info/python/oletools), a package of analysis tools based + on OleFileIO_PL +- 2012-09-11 v0.23: added support for file-like objects, fixed [issue #8](https://bitbucket.org/decalage/olefileio_pl/issue/8/bug-with-file-object) +- 2012-02-17 v0.22: fixed issues #7 (bug in getproperties) and #2 (added close method) +- 2011-10-20: code hosted on bitbucket to ease contributions and bug tracking +- 2010-01-24 v0.21: fixed support for big-endian CPUs, such as PowerPC Macs. +- 2009-12-11 v0.20: small bugfix in OleFileIO.open when filename is not plain str. +- 2009-12-10 v0.19: fixed support for 64 bits platforms (thanks to Ben G. and Martijn for reporting the bug) +- see changelog in source code for more info. + +Download/Install +---------------- + +If you have pip or setuptools installed (pip is included in Python 2.7.9+), you may simply run **pip install olefile** +or **easy_install olefile** for the first installation. + +To update olefile, run **pip install -U olefile**. + +Otherwise, see https://bitbucket.org/decalage/olefileio_pl/wiki/Install + +Features +-------- + +- Parse, read and write any OLE file such as Microsoft Office 97-2003 legacy document formats (Word .doc, Excel .xls, + PowerPoint .ppt, Visio .vsd, Project .mpp), Image Composer and FlashPix files, Outlook messages, StickyNotes, + Zeiss AxioVision ZVI files, Olympus FluoView OIB files, etc +- List all the streams and storages contained in an OLE file +- Open streams as files +- Parse and read property streams, containing metadata of the file +- Portable, pure Python module, no dependency + +olefile can be used as an independent package or with PIL/Pillow. + +olefile is mostly meant for developers. If you are looking for tools to analyze OLE files or to extract data (especially +for security purposes such as malware analysis and forensics), then please also check my +[python-oletools](http://www.decalage.info/python/oletools), which are built upon olefile and provide a higher-level interface. + + +History +------- + +olefile is based on the OleFileIO module from [PIL](http://www.pythonware.com/products/pil/index.htm), the excellent +Python Imaging Library, created and maintained by Fredrik Lundh. The olefile API is still compatible with PIL, but +since 2005 I have improved the internal implementation significantly, with new features, bugfixes and a more robust +design. From 2005 to 2014 the project was called OleFileIO_PL, and in 2014 I changed its name to olefile to celebrate +its 9 years and its new write features. + +As far as I know, olefile is the most complete and robust Python implementation to read MS OLE2 files, portable on +several operating systems. (please tell me if you know other similar Python modules) + +Since 2014 olefile/OleFileIO_PL has been integrated into [Pillow](http://python-pillow.org), the friendly fork +of PIL. olefile will continue to be improved as a separate project, and new versions will be merged into Pillow +regularly. + + +Main improvements over the original version of OleFileIO in PIL: +---------------------------------------------------------------- + +- Compatible with Python 3.x and 2.6+ +- Many bug fixes +- Support for files larger than 6.8MB +- Support for 64 bits platforms and big-endian CPUs +- Robust: many checks to detect malformed files +- Runtime option to choose if malformed files should be parsed or raise exceptions +- Improved API +- Metadata extraction, stream/storage timestamps (e.g. for document forensics) +- Can open file-like objects +- Added setup.py and install.bat to ease installation +- More convenient slash-based syntax for stream paths +- Write features + +Documentation +------------- + +Please see the [online documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) for more information, +especially the [OLE overview](https://bitbucket.org/decalage/olefileio_pl/wiki/OLE_Overview) and the +[API page](https://bitbucket.org/decalage/olefileio_pl/wiki/API) which describe how to use olefile in Python applications. +A copy of the same documentation is also provided in the doc subfolder of the olefile package. + + +## Real-life examples ## + +A real-life example: [using OleFileIO_PL for malware analysis and forensics](http://blog.gregback.net/2011/03/using-remnux-for-forensic-puzzle-6/). + +See also [this paper](https://computer-forensics.sans.org/community/papers/gcfa/grow-forensic-tools-taxonomy-python-libraries-helpful-forensic-analysis_6879) about python tools for forensics, which features olefile. + + +License +------- + +olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +([http://www.decalage.info](http://www.decalage.info)) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +---------- + +olefile is based on source code from the OleFileIO module of the Python Imaging Library (PIL) published by Fredrik +Lundh under the following license: + +The Python Imaging Library (PIL) is + + Copyright 漏 1997-2011 by Secret Labs AB + Copyright 漏 1995-2011 by Fredrik Lundh + +By obtaining, using, and/or copying this software and/or its associated documentation, you agree that you have read, +understood, and will comply with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its associated documentation for any purpose and +without fee is hereby granted, provided that the above copyright notice appears in all copies, and that both that +copyright notice and this permission notice appear in supporting documentation, and that the name of Secret Labs AB or +the author not be used in advertising or publicity pertaining to distribution of the software without specific, written +prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/server/www/packages/packages-linux/x64/PIL/OleFileIO.py b/server/www/packages/packages-linux/x64/PIL/OleFileIO.py new file mode 100644 index 0000000..1998e3c --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/OleFileIO.py @@ -0,0 +1,2305 @@ +#!/usr/bin/env python + +# olefile (formerly OleFileIO_PL) version 0.42 2015-01-25 +# +# Module to read/write Microsoft OLE2 files (also called Structured Storage or +# Microsoft Compound Document File Format), such as Microsoft Office 97-2003 +# documents, Image Composer and FlashPix files, Outlook messages, ... +# This version is compatible with Python 2.6+ and 3.x +# +# Project website: http://www.decalage.info/olefile +# +# olefile is copyright (c) 2005-2015 Philippe Lagadec (http://www.decalage.info) +# +# olefile is based on the OleFileIO module from the PIL library v1.1.6 +# See: http://www.pythonware.com/products/pil/index.htm +# +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# See source code and LICENSE.txt for information on usage and redistribution. + + +# Since OleFileIO_PL v0.30, only Python 2.6+ and 3.x is supported +# This import enables print() as a function rather than a keyword +# (main requirement to be compatible with Python 3.x) +# The comment on the line below should be printed on Python 2.5 or older: +from __future__ import print_function # This version of olefile requires Python 2.6+ or 3.x. + + +__author__ = "Philippe Lagadec" +__date__ = "2015-01-25" +__version__ = '0.42b' + +#--- LICENSE ------------------------------------------------------------------ + +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +# (http://www.decalage.info) +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# ---------- +# PIL License: +# +# olefile is based on source code from the OleFileIO module of the Python +# Imaging Library (PIL) published by Fredrik Lundh under the following license: + +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its associated +# documentation, you agree that you have read, understood, and will comply with +# the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and its +# associated documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appears in all copies, and that both +# that copyright notice and this permission notice appear in supporting +# documentation, and that the name of Secret Labs AB or the author(s) not be used +# in advertising or publicity pertaining to distribution of the software +# without specific, written prior permission. +# +# SECRET LABS AB AND THE AUTHORS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +# IN NO EVENT SHALL SECRET LABS AB OR THE AUTHORS BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +#----------------------------------------------------------------------------- +# CHANGELOG: (only olefile/OleFileIO_PL changes compared to PIL 1.1.6) +# 2005-05-11 v0.10 PL: - a few fixes for Python 2.4 compatibility +# (all changes flagged with [PL]) +# 2006-02-22 v0.11 PL: - a few fixes for some Office 2003 documents which raise +# exceptions in _OleStream.__init__() +# 2006-06-09 v0.12 PL: - fixes for files above 6.8MB (DIFAT in loadfat) +# - added some constants +# - added header values checks +# - added some docstrings +# - getsect: bugfix in case sectors >512 bytes +# - getsect: added conformity checks +# - DEBUG_MODE constant to activate debug display +# 2007-09-04 v0.13 PL: - improved/translated (lots of) comments +# - updated license +# - converted tabs to 4 spaces +# 2007-11-19 v0.14 PL: - added OleFileIO._raise_defect() to adapt sensitivity +# - improved _unicode() to use Python 2.x unicode support +# - fixed bug in _OleDirectoryEntry +# 2007-11-25 v0.15 PL: - added safety checks to detect FAT loops +# - fixed _OleStream which didn't check stream size +# - added/improved many docstrings and comments +# - moved helper functions _unicode and _clsid out of +# OleFileIO class +# - improved OleFileIO._find() to add Unix path syntax +# - OleFileIO._find() is now case-insensitive +# - added get_type() and get_rootentry_name() +# - rewritten loaddirectory and _OleDirectoryEntry +# 2007-11-27 v0.16 PL: - added _OleDirectoryEntry.kids_dict +# - added detection of duplicate filenames in storages +# - added detection of duplicate references to streams +# - added get_size() and exists() to _OleDirectoryEntry +# - added isOleFile to check header before parsing +# - added __all__ list to control public keywords in pydoc +# 2007-12-04 v0.17 PL: - added _load_direntry to fix a bug in loaddirectory +# - improved _unicode(), added workarounds for Python <2.3 +# - added set_debug_mode and -d option to set debug mode +# - fixed bugs in OleFileIO.open and _OleDirectoryEntry +# - added safety check in main for large or binary +# properties +# - allow size>0 for storages for some implementations +# 2007-12-05 v0.18 PL: - fixed several bugs in handling of FAT, MiniFAT and +# streams +# - added option '-c' in main to check all streams +# 2009-12-10 v0.19 PL: - bugfix for 32 bit arrays on 64 bits platforms +# (thanks to Ben G. and Martijn for reporting the bug) +# 2009-12-11 v0.20 PL: - bugfix in OleFileIO.open when filename is not plain str +# 2010-01-22 v0.21 PL: - added support for big-endian CPUs such as PowerPC Macs +# 2012-02-16 v0.22 PL: - fixed bug in getproperties, patch by chuckleberryfinn +# (https://bitbucket.org/decalage/olefileio_pl/issue/7) +# - added close method to OleFileIO (fixed issue #2) +# 2012-07-25 v0.23 PL: - added support for file-like objects (patch by mete0r_kr) +# 2013-05-05 v0.24 PL: - getproperties: added conversion from filetime to python +# datetime +# - main: displays properties with date format +# - new class OleMetadata to parse standard properties +# - added get_metadata method +# 2013-05-07 v0.24 PL: - a few improvements in OleMetadata +# 2013-05-24 v0.25 PL: - getproperties: option to not convert some timestamps +# - OleMetaData: total_edit_time is now a number of seconds, +# not a timestamp +# - getproperties: added support for VT_BOOL, VT_INT, V_UINT +# - getproperties: filter out null chars from strings +# - getproperties: raise non-fatal defects instead of +# exceptions when properties cannot be parsed properly +# 2013-05-27 PL: - getproperties: improved exception handling +# - _raise_defect: added option to set exception type +# - all non-fatal issues are now recorded, and displayed +# when run as a script +# 2013-07-11 v0.26 PL: - added methods to get modification and creation times +# of a directory entry or a storage/stream +# - fixed parsing of direntry timestamps +# 2013-07-24 PL: - new options in listdir to list storages and/or streams +# 2014-02-04 v0.30 PL: - upgraded code to support Python 3.x by Martin Panter +# - several fixes for Python 2.6 (xrange, MAGIC) +# - reused i32 from Pillow's _binary +# 2014-07-18 v0.31 - preliminary support for 4K sectors +# 2014-07-27 v0.31 PL: - a few improvements in OleFileIO.open (header parsing) +# - Fixed loadfat for large files with 4K sectors (issue #3) +# 2014-07-30 v0.32 PL: - added write_sect to write sectors to disk +# - added write_mode option to OleFileIO.__init__ and open +# 2014-07-31 PL: - fixed padding in write_sect for Python 3, added checks +# - added write_stream to write a stream to disk +# 2014-09-26 v0.40 PL: - renamed OleFileIO_PL to olefile +# 2014-11-09 NE: - added support for Jython (Niko Ehrenfeuchter) +# 2014-11-13 v0.41 PL: - improved isOleFile and OleFileIO.open to support OLE +# data in a string buffer and file-like objects. +# 2014-11-21 PL: - updated comments according to Pillow's commits +# 2015-01-24 v0.42 PL: - changed the default path name encoding from Latin-1 +# to UTF-8 on Python 2.x (Unicode on Python 3.x) +# - added path_encoding option to override the default +# - fixed a bug in _list when a storage is empty + +#----------------------------------------------------------------------------- +# TODO (for version 1.0): +# + get rid of print statements, to simplify Python 2.x and 3.x support +# + add is_stream and is_storage +# + remove leading and trailing slashes where a path is used +# + add functions path_list2str and path_str2list +# + fix how all the methods handle unicode str and/or bytes as arguments +# + add path attrib to _OleDirEntry, set it once and for all in init or +# append_kids (then listdir/_list can be simplified) +# - TESTS with Linux, MacOSX, Python 1.5.2, various files, PIL, ... +# - add underscore to each private method, to avoid their display in +# pydoc/epydoc documentation - Remove it for classes to be documented +# - replace all raised exceptions with _raise_defect (at least in OleFileIO) +# - merge code from _OleStream and OleFileIO.getsect to read sectors +# (maybe add a class for FAT and MiniFAT ?) +# - add method to check all streams (follow sectors chains without storing all +# stream in memory, and report anomalies) +# - use _OleDirectoryEntry.kids_dict to improve _find and _list ? +# - fix Unicode names handling (find some way to stay compatible with Py1.5.2) +# => if possible avoid converting names to Latin-1 +# - review DIFAT code: fix handling of DIFSECT blocks in FAT (not stop) +# - rewrite OleFileIO.getproperties +# - improve docstrings to show more sample uses +# - see also original notes and FIXME below +# - remove all obsolete FIXMEs +# - OleMetadata: fix version attrib according to +# http://msdn.microsoft.com/en-us/library/dd945671%28v=office.12%29.aspx + +# IDEAS: +# - in OleFileIO._open and _OleStream, use size=None instead of 0x7FFFFFFF for +# streams with unknown size +# - use arrays of int instead of long integers for FAT/MiniFAT, to improve +# performance and reduce memory usage ? (possible issue with values >2^31) +# - provide tests with unittest (may need write support to create samples) +# - move all debug code (and maybe dump methods) to a separate module, with +# a class which inherits OleFileIO ? +# - fix docstrings to follow epydoc format +# - add support for big endian byte order ? +# - create a simple OLE explorer with wxPython + +# FUTURE EVOLUTIONS to add write support: +# see issue #6 on Bitbucket: +# https://bitbucket.org/decalage/olefileio_pl/issue/6/improve-olefileio_pl-to-write-ole-files + +#----------------------------------------------------------------------------- +# NOTES from PIL 1.1.6: + +# History: +# 1997-01-20 fl Created +# 1997-01-22 fl Fixed 64-bit portability quirk +# 2003-09-09 fl Fixed typo in OleFileIO.loadfat (noted by Daniel Haertle) +# 2004-02-29 fl Changed long hex constants to signed integers +# +# Notes: +# FIXME: sort out sign problem (eliminate long hex constants) +# FIXME: change filename to use "a/b/c" instead of ["a", "b", "c"] +# FIXME: provide a glob mechanism function (using fnmatchcase) +# +# Literature: +# +# "FlashPix Format Specification, Appendix A", Kodak and Microsoft, +# September 1996. +# +# Quotes: +# +# "If this document and functionality of the Software conflict, +# the actual functionality of the Software represents the correct +# functionality" -- Microsoft, in the OLE format specification + +#------------------------------------------------------------------------------ + + +import io +import sys +import struct +import array +import os.path +import datetime + +#=== COMPATIBILITY WORKAROUNDS ================================================ + +# [PL] Define explicitly the public API to avoid private objects in pydoc: +#TODO: add more +# __all__ = ['OleFileIO', 'isOleFile', 'MAGIC'] + +# For Python 3.x, need to redefine long as int: +if str is not bytes: + long = int + +# Need to make sure we use xrange both on Python 2 and 3.x: +try: + # on Python 2 we need xrange: + iterrange = xrange +except: + # no xrange, for Python 3 it was renamed as range: + iterrange = range + +# [PL] workaround to fix an issue with array item size on 64 bits systems: +if array.array('L').itemsize == 4: + # on 32 bits platforms, long integers in an array are 32 bits: + UINT32 = 'L' +elif array.array('I').itemsize == 4: + # on 64 bits platforms, integers in an array are 32 bits: + UINT32 = 'I' +elif array.array('i').itemsize == 4: + # On 64 bit Jython, signed integers ('i') are the only way to store our 32 + # bit values in an array in a *somewhat* reasonable way, as the otherwise + # perfectly suited 'H' (unsigned int, 32 bits) results in a completely + # unusable behaviour. This is most likely caused by the fact that Java + # doesn't have unsigned values, and thus Jython's "array" implementation, + # which is based on "jarray", doesn't have them either. + # NOTE: to trick Jython into converting the values it would normally + # interpret as "signed" into "unsigned", a binary-and operation with + # 0xFFFFFFFF can be used. This way it is possible to use the same comparing + # operations on all platforms / implementations. The corresponding code + # lines are flagged with a 'JYTHON-WORKAROUND' tag below. + UINT32 = 'i' +else: + raise ValueError('Need to fix a bug with 32 bit arrays, please contact author...') + + +# [PL] These workarounds were inspired from the Path module +# (see http://www.jorendorff.com/articles/python/path/) +try: + basestring +except NameError: + basestring = str + +# [PL] Experimental setting: if True, OLE filenames will be kept in Unicode +# if False (default PIL behaviour), all filenames are converted to Latin-1. +KEEP_UNICODE_NAMES = True + +if sys.version_info[0] < 3: + # On Python 2.x, the default encoding for path names is UTF-8: + DEFAULT_PATH_ENCODING = 'utf-8' +else: + # On Python 3.x, the default encoding for path names is Unicode (None): + DEFAULT_PATH_ENCODING = None + + +#=== DEBUGGING =============================================================== + +#TODO: replace this by proper logging + +# [PL] DEBUG display mode: False by default, use set_debug_mode() or "-d" on +# command line to change it. +DEBUG_MODE = False + + +def debug_print(msg): + print(msg) + + +def debug_pass(msg): + pass + + +debug = debug_pass + + +def set_debug_mode(debug_mode): + """ + Set debug mode on or off, to control display of debugging messages. + :param mode: True or False + """ + global DEBUG_MODE, debug + DEBUG_MODE = debug_mode + if debug_mode: + debug = debug_print + else: + debug = debug_pass + + +#=== CONSTANTS =============================================================== + +# magic bytes that should be at the beginning of every OLE file: +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' + +# [PL]: added constants for Sector IDs (from AAF specifications) +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector + +# [PL]: added constants for Directory Entry IDs (from AAF specifications) +MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID +NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry + +# [PL] object types in storage (from AAF specifications) +STGTY_EMPTY = 0 # empty directory entry (according to OpenOffice.org doc) +STGTY_STORAGE = 1 # element is a storage object +STGTY_STREAM = 2 # element is a stream object +STGTY_LOCKBYTES = 3 # element is an ILockBytes object +STGTY_PROPERTY = 4 # element is an IPropertyStorage object +STGTY_ROOT = 5 # element is a root storage + + +# +# -------------------------------------------------------------------- +# property types + +VT_EMPTY = 0; VT_NULL = 1; VT_I2 = 2; VT_I4 = 3; VT_R4 = 4; VT_R8 = 5; VT_CY = 6; +VT_DATE = 7; VT_BSTR = 8; VT_DISPATCH = 9; VT_ERROR = 10; VT_BOOL = 11; +VT_VARIANT = 12; VT_UNKNOWN = 13; VT_DECIMAL = 14; VT_I1 = 16; VT_UI1 = 17; +VT_UI2 = 18; VT_UI4 = 19; VT_I8 = 20; VT_UI8 = 21; VT_INT = 22; VT_UINT = 23; +VT_VOID = 24; VT_HRESULT = 25; VT_PTR = 26; VT_SAFEARRAY = 27; VT_CARRAY = 28; +VT_USERDEFINED = 29; VT_LPSTR = 30; VT_LPWSTR = 31; VT_FILETIME = 64; +VT_BLOB = 65; VT_STREAM = 66; VT_STORAGE = 67; VT_STREAMED_OBJECT = 68; +VT_STORED_OBJECT = 69; VT_BLOB_OBJECT = 70; VT_CF = 71; VT_CLSID = 72; +VT_VECTOR = 0x1000; + +# map property id to name (for debugging purposes) + +VT = {} +for keyword, var in list(vars().items()): + if keyword[:3] == "VT_": + VT[var] = keyword + +# +# -------------------------------------------------------------------- +# Some common document types (root.clsid fields) + +WORD_CLSID = "00020900-0000-0000-C000-000000000046" +#TODO: check Excel, PPT, ... + +# [PL]: Defect levels to classify parsing errors - see OleFileIO._raise_defect() +DEFECT_UNSURE = 10 # a case which looks weird, but not sure it's a defect +DEFECT_POTENTIAL = 20 # a potential defect +DEFECT_INCORRECT = 30 # an error according to specifications, but parsing + # can go on +DEFECT_FATAL = 40 # an error which cannot be ignored, parsing is + # impossible + +# Minimal size of an empty OLE file, with 512-bytes sectors = 1536 bytes +# (this is used in isOleFile and OleFile.open) +MINIMAL_OLEFILE_SIZE = 1536 + +# [PL] add useful constants to __all__: +# for key in list(vars().keys()): +# if key.startswith('STGTY_') or key.startswith('DEFECT_'): +# __all__.append(key) + + +#=== FUNCTIONS =============================================================== + +def isOleFile(filename): + """ + Test if a file is an OLE container (according to the magic bytes in its header). + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read and seek methods), + it is parsed as-is. + + :returns: True if OLE, False otherwise. + """ + # check if filename is a string-like or file-like object: + if hasattr(filename, 'read'): + # file-like object: use it directly + header = filename.read(len(MAGIC)) + # just in case, seek back to start of file: + filename.seek(0) + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + header = filename[:len(MAGIC)] + else: + # string-like object: filename of file on disk + header = open(filename, 'rb').read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +if bytes is str: + # version for Python 2.x + def i8(c): + return ord(c) +else: + # version for Python 3.x + def i8(c): + return c if c.__class__ is int else c[0] + + +#TODO: replace i16 and i32 with more readable struct.unpack equivalent? + +def i16(c, o = 0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return struct.unpack(" len(fat): + raise IOError('malformed OLE document, stream too large') + # optimization(?): data is first a list of strings, and join() is called + # at the end to concatenate all in one string. + # (this may not be really useful with recent Python versions) + data = [] + # if size is zero, then first sector index should be ENDOFCHAIN: + if size == 0 and sect != ENDOFCHAIN: + debug('size == 0 and sect != ENDOFCHAIN:') + raise IOError('incorrect OLE sector index for empty stream') + # [PL] A fixed-length for loop is used instead of an undefined while + # loop to avoid DoS attacks: + for i in range(nb_sectors): + # Sector index may be ENDOFCHAIN, but only if size was unknown + if sect == ENDOFCHAIN: + if unknown_size: + break + else: + # else this means that the stream is smaller than declared: + debug('sect=ENDOFCHAIN before expected size') + raise IOError('incomplete OLE stream') + # sector index should be within FAT: + if sect < 0 or sect >= len(fat): + debug('sect=%d (%X) / len(fat)=%d' % (sect, sect, len(fat))) + debug('i=%d / nb_sectors=%d' % (i, nb_sectors)) +## tmp_data = b"".join(data) +## f = open('test_debug.bin', 'wb') +## f.write(tmp_data) +## f.close() +## debug('data read so far: %d bytes' % len(tmp_data)) + raise IOError('incorrect OLE FAT, sector index out of range') + #TODO: merge this code with OleFileIO.getsect() ? + #TODO: check if this works with 4K sectors: + try: + fp.seek(offset + sectorsize * sect) + except: + debug('sect=%d, seek=%d, filesize=%d' % + (sect, offset+sectorsize*sect, filesize)) + raise IOError('OLE sector index out of range') + sector_data = fp.read(sectorsize) + # [PL] check if there was enough data: + # Note: if sector is the last of the file, sometimes it is not a + # complete sector (of 512 or 4K), so we may read less than + # sectorsize. + if len(sector_data) != sectorsize and sect != (len(fat)-1): + debug('sect=%d / len(fat)=%d, seek=%d / filesize=%d, len read=%d' % + (sect, len(fat), offset+sectorsize*sect, filesize, len(sector_data))) + debug('seek+len(read)=%d' % (offset+sectorsize*sect+len(sector_data))) + raise IOError('incomplete OLE sector') + data.append(sector_data) + # jump to next sector in the FAT: + try: + sect = fat[sect] & 0xFFFFFFFF # JYTHON-WORKAROUND + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + data = b"".join(data) + # Data is truncated to the actual stream size: + if len(data) >= size: + data = data[:size] + # actual stream size is stored for future use: + self.size = size + elif unknown_size: + # actual stream size was not known, now we know the size of read + # data: + self.size = len(data) + else: + # read data is less than expected: + debug('len(data)=%d, size=%d' % (len(data), size)) + raise IOError('OLE stream size is less than declared') + # when all data is read in memory, BytesIO constructor is called + io.BytesIO.__init__(self, data) + # Then the _OleStream object can be used as a read-only file object. + + +#--- _OleDirectoryEntry ------------------------------------------------------- + +class _OleDirectoryEntry(object): + + """ + OLE2 Directory Entry + """ + # [PL] parsing code moved from OleFileIO.loaddirectory + + # struct to parse directory entries: + # <: little-endian byte order, standard sizes + # (note: this should guarantee that Q returns a 64 bits int) + # 64s: string containing entry name in unicode (max 31 chars) + null char + # H: uint16, number of bytes used in name buffer, including null = (len+1)*2 + # B: uint8, dir entry type (between 0 and 5) + # B: uint8, color: 0=black, 1=red + # I: uint32, index of left child node in the red-black tree, NOSTREAM if none + # I: uint32, index of right child node in the red-black tree, NOSTREAM if none + # I: uint32, index of child root node if it is a storage, else NOSTREAM + # 16s: CLSID, unique identifier (only used if it is a storage) + # I: uint32, user flags + # Q (was 8s): uint64, creation timestamp or zero + # Q (was 8s): uint64, modification timestamp or zero + # I: uint32, SID of first sector if stream or ministream, SID of 1st sector + # of stream containing ministreams if root entry, 0 otherwise + # I: uint32, total stream size in bytes if stream (low 32 bits), 0 otherwise + # I: uint32, total stream size in bytes if stream (high 32 bits), 0 otherwise + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + # size of a directory entry: 128 bytes + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, olefile): + """ + Constructor for an _OleDirectoryEntry object. + Parses a 128-bytes entry from the OLE Directory stream. + + :param entry : string (must be 128 bytes long) + :param sid : index of this directory entry in the OLE file directory + :param olefile: OleFileIO containing this directory entry + """ + self.sid = sid + # ref to olefile is stored for future use + self.olefile = olefile + # kids is a list of children entries, if this entry is a storage: + # (list of _OleDirectoryEntry objects) + self.kids = [] + # kids_dict is a dictionary of children entries, indexed by their + # name in lowercase: used to quickly find an entry, and to detect + # duplicates + self.kids_dict = {} + # flag used to detect if the entry is referenced more than once in + # directory: + self.used = False + # decode DirEntry + ( + name, + namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + sizeLow, + sizeHigh + ) = struct.unpack(_OleDirectoryEntry.STRUCT_DIRENTRY, entry) + if self.entry_type not in [STGTY_ROOT, STGTY_STORAGE, STGTY_STREAM, STGTY_EMPTY]: + olefile.raise_defect(DEFECT_INCORRECT, 'unhandled OLE storage type') + # only first directory entry can (and should) be root: + if self.entry_type == STGTY_ROOT and sid != 0: + olefile.raise_defect(DEFECT_INCORRECT, 'duplicate OLE root entry') + if sid == 0 and self.entry_type != STGTY_ROOT: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect OLE root entry') + #debug (struct.unpack(fmt_entry, entry[:len_entry])) + # name should be at most 31 unicode characters + null character, + # so 64 bytes in total (31*2 + 2): + if namelength > 64: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect DirEntry name length') + # if exception not raised, namelength is set to the maximum value: + namelength = 64 + # only characters without ending null char are kept: + name = name[:(namelength-2)] + #TODO: check if the name is actually followed by a null unicode character ([MS-CFB] 2.6.1) + #TODO: check if the name does not contain forbidden characters: + # [MS-CFB] 2.6.1: "The following characters are illegal and MUST NOT be part of the name: '/', '\', ':', '!'." + # name is converted from UTF-16LE to the path encoding specified in the OleFileIO: + self.name = olefile._decode_utf16_str(name) + + debug('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + debug(' - type: %d' % self.entry_type) + debug(' - sect: %d' % self.isectStart) + debug(' - SID left: %d, right: %d, child: %d' % (self.sid_left, + self.sid_right, self.sid_child)) + + # sizeHigh is only used for 4K sectors, it should be zero for 512 bytes + # sectors, BUT apparently some implementations set it as 0xFFFFFFFF, 1 + # or some other value so it cannot be raised as a defect in general: + if olefile.sectorsize == 512: + if sizeHigh != 0 and sizeHigh != 0xFFFFFFFF: + debug('sectorsize=%d, sizeLow=%d, sizeHigh=%d (%X)' % + (olefile.sectorsize, sizeLow, sizeHigh, sizeHigh)) + olefile.raise_defect(DEFECT_UNSURE, 'incorrect OLE stream size') + self.size = sizeLow + else: + self.size = sizeLow + (long(sizeHigh) << 32) + debug(' - size: %d (sizeLow=%d, sizeHigh=%d)' % (self.size, sizeLow, sizeHigh)) + + self.clsid = _clsid(clsid) + # a storage should have a null size, BUT some implementations such as + # Word 8 for Mac seem to allow non-null values => Potential defect: + if self.entry_type == STGTY_STORAGE and self.size != 0: + olefile.raise_defect(DEFECT_POTENTIAL, 'OLE storage with size>0') + # check if stream is not already referenced elsewhere: + if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: + if self.size < olefile.minisectorcutoff \ + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + # ministream object + minifat = True + else: + minifat = False + olefile._check_duplicate_stream(self.isectStart, minifat) + + def build_storage_tree(self): + """ + Read and build the red-black tree attached to this _OleDirectoryEntry + object, if it is a storage. + Note that this method builds a tree of all subentries, so it should + only be called for the root object once. + """ + debug('build_storage_tree: SID=%d - %s - sid_child=%d' + % (self.sid, repr(self.name), self.sid_child)) + if self.sid_child != NOSTREAM: + # if child SID is not NOSTREAM, then this entry is a storage. + # Let's walk through the tree of children to fill the kids list: + self.append_kids(self.sid_child) + + # Note from OpenOffice documentation: the safest way is to + # recreate the tree because some implementations may store broken + # red-black trees... + + # in the OLE file, entries are sorted on (length, name). + # for convenience, we sort them on name instead: + # (see rich comparison methods in this class) + self.kids.sort() + + def append_kids(self, child_sid): + """ + Walk through red-black tree of children of this directory entry to add + all of them to the kids list. (recursive method) + + :param child_sid : index of child directory entry to use, or None when called + first time for the root. (only used during recursion) + """ + # [PL] this method was added to use simple recursion instead of a complex + # algorithm. + # if this is not a storage or a leaf of the tree, nothing to do: + if child_sid == NOSTREAM: + return + # check if child SID is in the proper range: + if child_sid < 0 or child_sid >= len(self.olefile.direntries): + self.olefile.raise_defect(DEFECT_FATAL, 'OLE DirEntry index out of range') + # get child direntry: + child = self.olefile._load_direntry(child_sid) #direntries[child_sid] + debug('append_kids: child_sid=%d - %s - sid_left=%d, sid_right=%d, sid_child=%d' + % (child.sid, repr(child.name), child.sid_left, child.sid_right, child.sid_child)) + # the directory entries are organized as a red-black tree. + # (cf. Wikipedia for details) + # First walk through left side of the tree: + self.append_kids(child.sid_left) + # Check if its name is not already used (case-insensitive): + name_lower = child.name.lower() + if name_lower in self.kids_dict: + self.olefile.raise_defect(DEFECT_INCORRECT, + "Duplicate filename in OLE storage") + # Then the child_sid _OleDirectoryEntry object is appended to the + # kids list and dictionary: + self.kids.append(child) + self.kids_dict[name_lower] = child + # Check if kid was not already referenced in a storage: + if child.used: + self.olefile.raise_defect(DEFECT_INCORRECT, + 'OLE Entry referenced more than once') + child.used = True + # Finally walk through right side of the tree: + self.append_kids(child.sid_right) + # Afterwards build kid's own tree if it's also a storage: + child.build_storage_tree() + + def __eq__(self, other): + "Compare entries by name" + return self.name == other.name + + def __lt__(self, other): + "Compare entries by name" + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + # Reflected __lt__() and __le__() will be used for __gt__() and __ge__() + + #TODO: replace by the same function as MS implementation ? + # (order by name length first, then case-insensitive order) + + def dump(self, tab = 0): + "Dump this entry, and all its subentries (for debug purposes only)" + TYPES = ["(invalid)", "(storage)", "(stream)", "(lockbytes)", + "(property)", "(root)"] + print(" "*tab + repr(self.name), TYPES[self.entry_type], end=' ') + if self.entry_type in (STGTY_STREAM, STGTY_ROOT): + print(self.size, "bytes", end=' ') + print() + if self.entry_type in (STGTY_STORAGE, STGTY_ROOT) and self.clsid: + print(" "*tab + "{%s}" % self.clsid) + + for kid in self.kids: + kid.dump(tab + 2) + + def getmtime(self): + """ + Return modification time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.modifyTime == 0: + return None + return filetime2datetime(self.modifyTime) + + def getctime(self): + """ + Return creation time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.createTime == 0: + return None + return filetime2datetime(self.createTime) + + +#--- OleFileIO ---------------------------------------------------------------- + +class OleFileIO(object): + """ + OLE container object + + This class encapsulates the interface to an OLE 2 structured + storage file. Use the :py:meth:`~PIL.OleFileIO.OleFileIO.listdir` and + :py:meth:`~PIL.OleFileIO.OleFileIO.openstream` methods to + access the contents of this file. + + Object names are given as a list of strings, one for each subentry + level. The root entry should be omitted. For example, the following + code extracts all image streams from a Microsoft Image Composer file:: + + ole = OleFileIO("fan.mic") + + for entry in ole.listdir(): + if entry[1:2] == "Image": + fin = ole.openstream(entry) + fout = open(entry[0:1], "wb") + while True: + s = fin.read(8192) + if not s: + break + fout.write(s) + + You can use the viewer application provided with the Python Imaging + Library to view the resulting files (which happens to be standard + TIFF files). + """ + + def __init__(self, filename=None, raise_defects=DEFECT_FATAL, + write_mode=False, debug=False, path_encoding=DEFAULT_PATH_ENCODING): + """ + Constructor for the OleFileIO class. + + :param filename: file to open. + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param raise_defects: minimal level for defects to be raised as exceptions. + (use DEFECT_FATAL for a typical application, DEFECT_INCORRECT for a + security-oriented application, see source code for details) + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. + + :param debug: bool, set debug mode + + :param path_encoding: None or str, name of the codec to use for path + names (streams and storages), or None for Unicode. + Unicode by default on Python 3+, UTF-8 on Python 2.x. + (new in olefile 0.42, was hardcoded to Latin-1 until olefile v0.41) + """ + set_debug_mode(debug) + # minimal level for defects to be raised as exceptions: + self._raise_defects_level = raise_defects + # list of defects/issues not raised as exceptions: + # tuples of (exception type, message) + self.parsing_issues = [] + self.write_mode = write_mode + self.path_encoding = path_encoding + self._filesize = None + self.fp = None + if filename: + self.open(filename, write_mode=write_mode) + + def raise_defect(self, defect_level, message, exception_type=IOError): + """ + This method should be called for any defect found during file parsing. + It may raise an IOError exception according to the minimal level chosen + for the OleFileIO object. + + :param defect_level: defect level, possible values are: + + - DEFECT_UNSURE : a case which looks weird, but not sure it's a defect + - DEFECT_POTENTIAL : a potential defect + - DEFECT_INCORRECT : an error according to specifications, but parsing can go on + - DEFECT_FATAL : an error which cannot be ignored, parsing is impossible + + :param message: string describing the defect, used with raised exception. + :param exception_type: exception class to be raised, IOError by default + """ + # added by [PL] + if defect_level >= self._raise_defects_level: + raise exception_type(message) + else: + # just record the issue, no exception raised: + self.parsing_issues.append((exception_type, message)) + + def _decode_utf16_str(self, utf16_str, errors='replace'): + """ + Decode a string encoded in UTF-16 LE format, as found in the OLE + directory or in property streams. Return a string encoded + according to the path_encoding specified for the OleFileIO object. + + :param utf16_str: bytes string encoded in UTF-16 LE format + :param errors: str, see python documentation for str.decode() + :return: str, encoded according to path_encoding + """ + unicode_str = utf16_str.decode('UTF-16LE', errors) + if self.path_encoding: + # an encoding has been specified for path names: + return unicode_str.encode(self.path_encoding, errors) + else: + # path_encoding=None, return the Unicode string as-is: + return unicode_str + + def open(self, filename, write_mode=False): + """ + Open an OLE2 file in read-only or read/write mode. + Read and parse the header, FAT and directory. + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. (ignored if filename is not a path) + """ + self.write_mode = write_mode + # [PL] check if filename is a string-like or file-like object: + # (it is better to check for a read() method) + if hasattr(filename, 'read'): + #TODO: also check seek and tell methods? + # file-like object: use it directly + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + # convert it to BytesIO + self.fp = io.BytesIO(filename) + else: + # string-like object: filename of file on disk + if self.write_mode: + # open file in mode 'read with update, binary' + # According to https://docs.python.org/2/library/functions.html#open + # 'w' would truncate the file, 'a' may only append on some Unixes + mode = 'r+b' + else: + # read-only mode by default + mode = 'rb' + self.fp = open(filename, mode) + # obtain the filesize by using seek and tell, which should work on most + # file-like objects: + #TODO: do it above, using getsize with filename when possible? + #TODO: fix code to fail with clear exception when filesize cannot be obtained + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + + # lists of streams in FAT and MiniFAT, to detect duplicate references + # (list of indexes of first sectors of each stream) + self._used_streams_fat = [] + self._used_streams_minifat = [] + + header = self.fp.read(512) + + if len(header) != 512 or header[:8] != MAGIC: + self.raise_defect(DEFECT_FATAL, "not an OLE2 structured storage file") + + # [PL] header structure according to AAF specifications: + ##Header + ##struct StructuredStorageHeader { // [offset from start (bytes), length (bytes)] + ##BYTE _abSig[8]; // [00H,08] {0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, + ## // 0x1a, 0xe1} for current version + ##CLSID _clsid; // [08H,16] reserved must be zero (WriteClassStg/ + ## // GetClassFile uses root directory class id) + ##USHORT _uMinorVersion; // [18H,02] minor version of the format: 33 is + ## // written by reference implementation + ##USHORT _uDllVersion; // [1AH,02] major version of the dll/format: 3 for + ## // 512-byte sectors, 4 for 4 KB sectors + ##USHORT _uByteOrder; // [1CH,02] 0xFFFE: indicates Intel byte-ordering + ##USHORT _uSectorShift; // [1EH,02] size of sectors in power-of-two; + ## // typically 9 indicating 512-byte sectors + ##USHORT _uMiniSectorShift; // [20H,02] size of mini-sectors in power-of-two; + ## // typically 6 indicating 64-byte mini-sectors + ##USHORT _usReserved; // [22H,02] reserved, must be zero + ##ULONG _ulReserved1; // [24H,04] reserved, must be zero + ##FSINDEX _csectDir; // [28H,04] must be zero for 512-byte sectors, + ## // number of SECTs in directory chain for 4 KB + ## // sectors + ##FSINDEX _csectFat; // [2CH,04] number of SECTs in the FAT chain + ##SECT _sectDirStart; // [30H,04] first SECT in the directory chain + ##DFSIGNATURE _signature; // [34H,04] signature used for transactions; must + ## // be zero. The reference implementation + ## // does not support transactions + ##ULONG _ulMiniSectorCutoff; // [38H,04] maximum size for a mini stream; + ## // typically 4096 bytes + ##SECT _sectMiniFatStart; // [3CH,04] first SECT in the MiniFAT chain + ##FSINDEX _csectMiniFat; // [40H,04] number of SECTs in the MiniFAT chain + ##SECT _sectDifStart; // [44H,04] first SECT in the DIFAT chain + ##FSINDEX _csectDif; // [48H,04] number of SECTs in the DIFAT chain + ##SECT _sectFat[109]; // [4CH,436] the SECTs of first 109 FAT sectors + ##}; + + # [PL] header decoding: + # '<' indicates little-endian byte ordering for Intel (cf. struct module help) + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + debug("fmt_header size = %d, +FAT = %d" % (header_size, header_size + 109*4)) + header1 = header[:header_size] + ( + self.Sig, + self.clsid, + self.MinorVersion, + self.DllVersion, + self.ByteOrder, + self.SectorShift, + self.MiniSectorShift, + self.Reserved, self.Reserved1, + self.csectDir, + self.csectFat, + self.sectDirStart, + self.signature, + self.MiniSectorCutoff, + self.MiniFatStart, + self.csectMiniFat, + self.sectDifStart, + self.csectDif + ) = struct.unpack(fmt_header, header1) + debug(struct.unpack(fmt_header, header1)) + + if self.Sig != MAGIC: + # OLE signature should always be present + self.raise_defect(DEFECT_FATAL, "incorrect OLE signature") + if self.clsid != bytearray(16): + # according to AAF specs, CLSID should always be zero + self.raise_defect(DEFECT_INCORRECT, "incorrect CLSID in OLE header") + debug("MinorVersion = %d" % self.MinorVersion) + debug("DllVersion = %d" % self.DllVersion) + if self.DllVersion not in [3, 4]: + # version 3: usual format, 512 bytes per sector + # version 4: large format, 4K per sector + self.raise_defect(DEFECT_INCORRECT, "incorrect DllVersion in OLE header") + debug("ByteOrder = %X" % self.ByteOrder) + if self.ByteOrder != 0xFFFE: + # For now only common little-endian documents are handled correctly + self.raise_defect(DEFECT_FATAL, "incorrect ByteOrder in OLE header") + # TODO: add big-endian support for documents created on Mac ? + # But according to [MS-CFB] ? v20140502, ByteOrder MUST be 0xFFFE. + self.SectorSize = 2**self.SectorShift + debug("SectorSize = %d" % self.SectorSize) + if self.SectorSize not in [512, 4096]: + self.raise_defect(DEFECT_INCORRECT, "incorrect SectorSize in OLE header") + if (self.DllVersion == 3 and self.SectorSize != 512) \ + or (self.DllVersion == 4 and self.SectorSize != 4096): + self.raise_defect(DEFECT_INCORRECT, "SectorSize does not match DllVersion in OLE header") + self.MiniSectorSize = 2**self.MiniSectorShift + debug("MiniSectorSize = %d" % self.MiniSectorSize) + if self.MiniSectorSize not in [64]: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorSize in OLE header") + if self.Reserved != 0 or self.Reserved1 != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect OLE header (non-null reserved bytes)") + debug("csectDir = %d" % self.csectDir) + # Number of directory sectors (only allowed if DllVersion != 3) + if self.SectorSize == 512 and self.csectDir != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect csectDir in OLE header") + debug("csectFat = %d" % self.csectFat) + # csectFat = number of FAT sectors in the file + debug("sectDirStart = %X" % self.sectDirStart) + # sectDirStart = 1st sector containing the directory + debug("signature = %d" % self.signature) + # Signature should be zero, BUT some implementations do not follow this + # rule => only a potential defect: + # (according to MS-CFB, may be != 0 for applications supporting file + # transactions) + if self.signature != 0: + self.raise_defect(DEFECT_POTENTIAL, "incorrect OLE header (signature>0)") + debug("MiniSectorCutoff = %d" % self.MiniSectorCutoff) + # MS-CFB: This integer field MUST be set to 0x00001000. This field + # specifies the maximum size of a user-defined data stream allocated + # from the mini FAT and mini stream, and that cutoff is 4096 bytes. + # Any user-defined data stream larger than or equal to this cutoff size + # must be allocated as normal sectors from the FAT. + if self.MiniSectorCutoff != 0x1000: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorCutoff in OLE header") + debug("MiniFatStart = %X" % self.MiniFatStart) + debug("csectMiniFat = %d" % self.csectMiniFat) + debug("sectDifStart = %X" % self.sectDifStart) + debug("csectDif = %d" % self.csectDif) + + # calculate the number of sectors in the file + # (-1 because header doesn't count) + self.nb_sect = ((filesize + self.SectorSize-1) // self.SectorSize) - 1 + debug("Number of sectors in the file: %d" % self.nb_sect) + #TODO: change this test, because an OLE file MAY contain other data + # after the last sector. + + # file clsid + self.clsid = _clsid(header[8:24]) + + #TODO: remove redundant attributes, and fix the code which uses them? + self.sectorsize = self.SectorSize #1 << i16(header, 30) + self.minisectorsize = self.MiniSectorSize #1 << i16(header, 32) + self.minisectorcutoff = self.MiniSectorCutoff # i32(header, 56) + + # check known streams for duplicate references (these are always in FAT, + # never in MiniFAT): + self._check_duplicate_stream(self.sectDirStart) + # check MiniFAT only if it is not empty: + if self.csectMiniFat: + self._check_duplicate_stream(self.MiniFatStart) + # check DIFAT only if it is not empty: + if self.csectDif: + self._check_duplicate_stream(self.sectDifStart) + + # Load file allocation tables + self.loadfat(header) + # Load directory. This sets both the direntries list (ordered by sid) + # and the root (ordered by hierarchy) members. + self.loaddirectory(self.sectDirStart)#i32(header, 48)) + self.ministream = None + self.minifatsect = self.MiniFatStart #i32(header, 60) + + def close(self): + """ + close the OLE file, to release the file object + """ + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + """ + Checks if a stream has not been already referenced elsewhere. + This method should only be called once for each known stream, and only + if stream size is not null. + + :param first_sect: int, index of first sector of the stream in FAT + :param minifat: bool, if True, stream is located in the MiniFAT, else in the FAT + """ + if minifat: + debug('_check_duplicate_stream: sect=%d in MiniFAT' % first_sect) + used_streams = self._used_streams_minifat + else: + debug('_check_duplicate_stream: sect=%d in FAT' % first_sect) + # some values can be safely ignored (not a real stream): + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): + return + used_streams = self._used_streams_fat + #TODO: would it be more efficient using a dict or hash values, instead + # of a list of long ? + if first_sect in used_streams: + self.raise_defect(DEFECT_INCORRECT, 'Stream referenced twice') + else: + used_streams.append(first_sect) + + def dumpfat(self, fat, firstindex=0): + "Displays a part of FAT in human-readable form for debugging purpose" + # [PL] added only for debug + if not DEBUG_MODE: + return + # dictionary to convert special FAT values in human-readable strings + VPL = 8 # values per line (8+1 * 8+1 = 81) + fatnames = { + FREESECT: "..free..", + ENDOFCHAIN: "[ END. ]", + FATSECT: "FATSECT ", + DIFSECT: "DIFSECT " + } + nbsect = len(fat) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = fat[i] + aux = sect & 0xFFFFFFFF # JYTHON-WORKAROUND + if aux in fatnames: + name = fatnames[aux] + else: + if sect == i+1: + name = " --->" + else: + name = "%8X" % sect + print(name, end=" ") + print() + + def dumpsect(self, sector, firstindex=0): + "Displays a sector in a human-readable form, for debugging purpose." + if not DEBUG_MODE: + return + VPL = 8 # number of values per line (8+1 * 8+1 = 81) + tab = array.array(UINT32, sector) + if sys.byteorder == 'big': + tab.byteswap() + nbsect = len(tab) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = tab[i] + name = "%8X" % sect + print(name, end=" ") + print() + + def sect2array(self, sect): + """ + convert a sector to an array of 32 bits unsigned integers, + swapping bytes on big endian CPUs such as PowerPC (old Macs) + """ + a = array.array(UINT32, sect) + # if CPU is big endian, swap bytes: + if sys.byteorder == 'big': + a.byteswap() + return a + + def loadfat_sect(self, sect): + """ + Adds the indexes of the given sector to the FAT + + :param sect: string containing the first FAT sector, or array of long integers + :returns: index of last FAT sector. + """ + # a FAT sector is an array of ulong integers. + if isinstance(sect, array.array): + # if sect is already an array it is directly used + fat1 = sect + else: + # if it's a raw sector, it is parsed in an array + fat1 = self.sect2array(sect) + self.dumpsect(sect) + # The FAT is a sector chain starting at the first index of itself. + for isect in fat1: + isect = isect & 0xFFFFFFFF # JYTHON-WORKAROUND + debug("isect = %X" % isect) + if isect == ENDOFCHAIN or isect == FREESECT: + # the end of the sector chain has been reached + debug("found end of sector chain") + break + # read the FAT sector + s = self.getsect(isect) + # parse it as an array of 32 bits integers, and add it to the + # global FAT array + nextfat = self.sect2array(s) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + """ + Load the FAT table. + """ + # The 1st sector of the file contains sector numbers for the first 109 + # FAT sectors, right after the header which is 76 bytes long. + # (always 109, whatever the sector size: 512 bytes = 76+4*109) + # Additional sectors are described by DIF blocks + + sect = header[76:512] + debug("len(sect)=%d, so %d integers" % (len(sect), len(sect)//4)) + #fat = [] + # [PL] FAT is an array of 32 bits unsigned ints, it's more effective + # to use an array than a list in Python. + # It's initialized as empty first: + self.fat = array.array(UINT32) + self.loadfat_sect(sect) + #self.dumpfat(self.fat) +## for i in range(0, len(sect), 4): +## ix = i32(sect, i) +## # [PL] if ix == -2 or ix == -1: # ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## if ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## break +## s = self.getsect(ix) +## #fat = fat + [i32(s, i) for i in range(0, len(s), 4)] +## fat = fat + array.array(UINT32, s) + if self.csectDif != 0: + # [PL] There's a DIFAT because file is larger than 6.8MB + # some checks just in case: + if self.csectFat <= 109: + # there must be at least 109 blocks in header and the rest in + # DIFAT, so number of sectors must be >109. + self.raise_defect(DEFECT_INCORRECT, 'incorrect DIFAT, not enough sectors') + if self.sectDifStart >= self.nb_sect: + # initial DIFAT block index must be valid + self.raise_defect(DEFECT_FATAL, 'incorrect DIFAT, first index out of range') + debug("DIFAT analysis...") + # We compute the necessary number of DIFAT sectors : + # Number of pointers per DIFAT sector = (sectorsize/4)-1 + # (-1 because the last pointer is the next DIFAT sector number) + nb_difat_sectors = (self.sectorsize//4)-1 + # (if 512 bytes: each DIFAT sector = 127 pointers + 1 towards next DIFAT sector) + nb_difat = (self.csectFat-109 + nb_difat_sectors-1)//nb_difat_sectors + debug("nb_difat = %d" % nb_difat) + if self.csectDif != nb_difat: + raise IOError('incorrect DIFAT') + isect_difat = self.sectDifStart + for i in iterrange(nb_difat): + debug("DIFAT block %d, sector %X" % (i, isect_difat)) + #TODO: check if corresponding FAT SID = DIFSECT + sector_difat = self.getsect(isect_difat) + difat = self.sect2array(sector_difat) + self.dumpsect(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + # last DIFAT pointer is next DIFAT sector: + isect_difat = difat[nb_difat_sectors] + debug("next DIFAT sector: %X" % isect_difat) + # checks: + if isect_difat not in [ENDOFCHAIN, FREESECT]: + # last DIFAT pointer value must be ENDOFCHAIN or FREESECT + raise IOError('incorrect end of DIFAT') +## if len(self.fat) != self.csectFat: +## # FAT should contain csectFat blocks +## print("FAT length: %d instead of %d" % (len(self.fat), self.csectFat)) +## raise IOError('incorrect DIFAT') + # since FAT is read from fixed-size sectors, it may contain more values + # than the actual number of sectors in the file. + # Keep only the relevant sector indexes: + if len(self.fat) > self.nb_sect: + debug('len(fat)=%d, shrunk to nb_sect=%d' % (len(self.fat), self.nb_sect)) + self.fat = self.fat[:self.nb_sect] + debug('\nFAT:') + self.dumpfat(self.fat) + + def loadminifat(self): + """ + Load the MiniFAT table. + """ + # MiniFAT is stored in a standard sub-stream, pointed to by a header + # field. + # NOTE: there are two sizes to take into account for this stream: + # 1) Stream size is calculated according to the number of sectors + # declared in the OLE header. This allocated stream may be more than + # needed to store the actual sector indexes. + # (self.csectMiniFat is the number of sectors of size self.SectorSize) + stream_size = self.csectMiniFat * self.SectorSize + # 2) Actually used size is calculated by dividing the MiniStream size + # (given by root entry size) by the size of mini sectors, *4 for + # 32 bits indexes: + nb_minisectors = (self.root.size + self.MiniSectorSize-1) // self.MiniSectorSize + used_size = nb_minisectors * 4 + debug('loadminifat(): minifatsect=%d, nb FAT sectors=%d, used_size=%d, stream_size=%d, nb MiniSectors=%d' % + (self.minifatsect, self.csectMiniFat, used_size, stream_size, nb_minisectors)) + if used_size > stream_size: + # This is not really a problem, but may indicate a wrong implementation: + self.raise_defect(DEFECT_INCORRECT, 'OLE MiniStream is larger than MiniFAT') + # In any case, first read stream_size: + s = self._open(self.minifatsect, stream_size, force_FAT=True).read() + # [PL] Old code replaced by an array: + # self.minifat = [i32(s, i) for i in range(0, len(s), 4)] + self.minifat = self.sect2array(s) + # Then shrink the array to used size, to avoid indexes out of MiniStream: + debug('MiniFAT shrunk from %d to %d sectors' % (len(self.minifat), nb_minisectors)) + self.minifat = self.minifat[:nb_minisectors] + debug('loadminifat(): len=%d' % len(self.minifat)) + debug('\nMiniFAT:') + self.dumpfat(self.minifat) + + def getsect(self, sect): + """ + Read given sector from file on disk. + + :param sect: int, sector index + :returns: a string containing the sector data. + """ + # From [MS-CFB]: A sector number can be converted into a byte offset + # into the file by using the following formula: + # (sector number + 1) x Sector Size. + # This implies that sector #0 of the file begins at byte offset Sector + # Size, not at 0. + + # [PL] the original code in PIL was wrong when sectors are 4KB instead of + # 512 bytes: + # self.fp.seek(512 + self.sectorsize * sect) + # [PL]: added safety checks: + # print("getsect(%X)" % sect) + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('getsect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + sector = self.fp.read(self.sectorsize) + if len(sector) != self.sectorsize: + debug('getsect(): sect=%X, read=%d, sectorsize=%d' % + (sect, len(sector), self.sectorsize)) + self.raise_defect(DEFECT_FATAL, 'incomplete OLE sector') + return sector + + def write_sect(self, sect, data, padding=b'\x00'): + """ + Write given sector to file on disk. + + :param sect: int, sector index + :param data: bytes, sector data + :param padding: single byte, padding character if data < sector size + """ + if not isinstance(data, bytes): + raise TypeError("write_sect: data must be a bytes string") + if not isinstance(padding, bytes) or len(padding) != 1: + raise TypeError("write_sect: padding must be a bytes string of 1 char") + #TODO: we could allow padding=None for no padding at all + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('write_sect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + if len(data) < self.sectorsize: + # add padding + data += padding * (self.sectorsize - len(data)) + elif len(data) < self.sectorsize: + raise ValueError("Data is larger than sector size") + self.fp.write(data) + + def loaddirectory(self, sect): + """ + Load the directory. + + :param sect: sector index of directory stream. + """ + # The directory is stored in a standard + # substream, independent of its size. + + # open directory stream as a read-only file: + # (stream size is not known in advance) + self.directory_fp = self._open(sect) + + # [PL] to detect malformed documents and avoid DoS attacks, the maximum + # number of directory entries can be calculated: + max_entries = self.directory_fp.size // 128 + debug('loaddirectory: size=%d, max_entries=%d' % + (self.directory_fp.size, max_entries)) + + # Create list of directory entries + # self.direntries = [] + # We start with a list of "None" object + self.direntries = [None] * max_entries +## for sid in iterrange(max_entries): +## entry = fp.read(128) +## if not entry: +## break +## self.direntries.append(_OleDirectoryEntry(entry, sid, self)) + # load root entry: + root_entry = self._load_direntry(0) + # Root entry is the first entry: + self.root = self.direntries[0] + # read and build all storage trees, starting from the root: + self.root.build_storage_tree() + + def _load_direntry(self, sid): + """ + Load a directory entry from the directory. + This method should only be called once for each storage/stream when + loading the directory. + + :param sid: index of storage/stream in the directory. + :returns: a _OleDirectoryEntry object + + :exception IOError: if the entry has always been referenced. + """ + # check if SID is OK: + if sid < 0 or sid >= len(self.direntries): + self.raise_defect(DEFECT_FATAL, "OLE directory index out of range") + # check if entry was already referenced: + if self.direntries[sid] is not None: + self.raise_defect(DEFECT_INCORRECT, + "double reference for OLE stream/storage") + # if exception not raised, return the object + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = _OleDirectoryEntry(entry, sid, self) + return self.direntries[sid] + + def dumpdirectory(self): + """ + Dump directory (for debugging only) + """ + self.root.dump() + + def _open(self, start, size = 0x7FFFFFFF, force_FAT=False): + """ + Open a stream, either in FAT or MiniFAT according to its size. + (openstream helper) + + :param start: index of first sector + :param size: size of stream (or nothing if size is unknown) + :param force_FAT: if False (default), stream will be opened in FAT or MiniFAT + according to size. If True, it will always be opened in FAT. + """ + debug('OleFileIO.open(): sect=%d, size=%d, force_FAT=%s' % + (start, size, str(force_FAT))) + # stream size is compared to the MiniSectorCutoff threshold: + if size < self.minisectorcutoff and not force_FAT: + # ministream object + if not self.ministream: + # load MiniFAT if it wasn't already done: + self.loadminifat() + # The first sector index of the miniFAT stream is stored in the + # root directory entry: + size_ministream = self.root.size + debug('Opening MiniStream: sect=%d, size=%d' % + (self.root.isectStart, size_ministream)) + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return _OleStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + # standard stream + return _OleStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, + sectorsize=self.sectorsize, fat=self.fat, + filesize=self._filesize) + + def _list(self, files, prefix, node, streams=True, storages=False): + """ + listdir helper + + :param files: list of files to fill in + :param prefix: current location in storage tree (list of names) + :param node: current node (_OleDirectoryEntry object) + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + """ + prefix = prefix + [node.name] + for entry in node.kids: + if entry.entry_type == STGTY_STORAGE: + # this is a storage + if storages: + # add it to the list + files.append(prefix[1:] + [entry.name]) + # check its kids + self._list(files, prefix, entry, streams, storages) + elif entry.entry_type == STGTY_STREAM: + # this is a stream + if streams: + # add it to the list + files.append(prefix[1:] + [entry.name]) + else: + self.raise_defect(DEFECT_INCORRECT, 'The directory tree contains an entry which is not a stream nor a storage.') + + def listdir(self, streams=True, storages=False): + """ + Return a list of streams and/or storages stored in this file + + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + :returns: list of stream and/or storage paths + """ + files = [] + self._list(files, [], self.root, streams, storages) + return files + + def _find(self, filename): + """ + Returns directory entry of given filename. (openstream helper) + Note: this method is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: sid of requested filename + :exception IOError: if file not found + """ + + # if filename is a string instead of a list, split it on slashes to + # convert to a list: + if isinstance(filename, basestring): + filename = filename.split('/') + # walk across storage tree, following given path: + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + else: + raise IOError("file not found") + node = kid + return node.sid + + def openstream(self, filename): + """ + Open a stream as a read-only file object (BytesIO). + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: file object (read-only) + :exception IOError: if filename not found, or if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this file is not a stream") + return self._open(entry.isectStart, entry.size) + + def write_stream(self, stream_name, data): + """ + Write a stream to disk. For now, it is only possible to replace an + existing stream by data of the same size. + + :param stream_name: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :param data: bytes, data to be written, must be the same size as the original + stream. + """ + if not isinstance(data, bytes): + raise TypeError("write_stream: data must be a bytes string") + sid = self._find(stream_name) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this is not a stream") + size = entry.size + if size != len(data): + raise ValueError("write_stream: data must be the same size as the existing stream") + if size < self.minisectorcutoff: + raise NotImplementedError("Writing a stream in MiniFAT is not implemented yet") + sect = entry.isectStart + # number of sectors to write + nb_sectors = (size + (self.sectorsize-1)) // self.sectorsize + debug('nb_sectors = %d' % nb_sectors) + for i in range(nb_sectors): + # try: + # self.fp.seek(offset + self.sectorsize * sect) + # except: + # debug('sect=%d, seek=%d' % + # (sect, offset+self.sectorsize*sect)) + # raise IOError('OLE sector index out of range') + # extract one sector from data, the last one being smaller: + if i < (nb_sectors-1): + data_sector = data[i*self.sectorsize:(i+1)*self.sectorsize] + #TODO: comment this if it works + assert(len(data_sector) == self.sectorsize) + else: + data_sector = data[i*self.sectorsize:] + # TODO: comment this if it works + debug('write_stream: size=%d sectorsize=%d data_sector=%d size%%sectorsize=%d' + % (size, self.sectorsize, len(data_sector), size % self.sectorsize)) + assert(len(data_sector) % self.sectorsize == size % self.sectorsize) + self.write_sect(sect, data_sector) +# self.fp.write(data_sector) + # jump to next sector in the FAT: + try: + sect = self.fat[sect] + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + + def get_type(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container, and return its type. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: False if object does not exist, its entry type (>0) otherwise: + + - STGTY_STREAM: a stream + - STGTY_STORAGE: a storage + - STGTY_ROOT: the root entry + """ + try: + sid = self._find(filename) + entry = self.direntries[sid] + return entry.entry_type + except: + return False + + def getmtime(self, filename): + """ + Return modification time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getmtime() + + def getctime(self, filename): + """ + Return creation time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if creation time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getctime() + + def exists(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container. + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: True if object exist, else False. + """ + try: + sid = self._find(filename) + return True + except: + return False + + def get_size(self, filename): + """ + Return size of a stream in the OLE container, in bytes. + + :param filename: path of stream in storage tree (see openstream for syntax) + :returns: size in bytes (long integer) + :exception IOError: if file not found + :exception TypeError: if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + #TODO: Should it return zero instead of raising an exception ? + raise TypeError('object is not an OLE stream') + return entry.size + + def get_rootentry_name(self): + """ + Return root entry name. Should usually be 'Root Entry' or 'R' in most + implementations. + """ + return self.root.name + + def getproperties(self, filename, convert_time=False, no_conversion=None): + """ + Return properties described in substream. + + :param filename: path of stream in storage tree (see openstream for syntax) + :param convert_time: bool, if True timestamps will be converted to Python datetime + :param no_conversion: None or list of int, timestamps not to be converted + (for example total editing time is not a real timestamp) + + :returns: a dictionary of values indexed by id (integer) + """ + # REFERENCE: [MS-OLEPS] https://msdn.microsoft.com/en-us/library/dd942421.aspx + # make sure no_conversion is a list, just to simplify code below: + if no_conversion is None: + no_conversion = [] + # stream path as a string to report exceptions: + streampath = filename + if not isinstance(streampath, str): + streampath = '/'.join(streampath) + + fp = self.openstream(filename) + + data = {} + + try: + # header + s = fp.read(28) + clsid = _clsid(s[8:24]) + + # format id + s = fp.read(20) + fmtid = _clsid(s[:16]) + fp.seek(i32(s, 16)) + + # get section + s = b"****" + fp.read(i32(fp.read(4))-4) + # number of properties: + num_props = i32(s, 4) + except BaseException as exc: + # catch exception while parsing property header, and only raise + # a DEFECT_INCORRECT then return an empty dict, because this is not + # a fatal error when parsing the whole file + msg = 'Error while parsing properties header in stream %s: %s' % ( + repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + return data + + for i in range(num_props): + try: + id = 0 # just in case of an exception + id = i32(s, 8+i*8) + offset = i32(s, 12+i*8) + type = i32(s, offset) + + debug('property id=%d: type=%d offset=%X' % (id, type, offset)) + + # test for common types first (should perhaps use + # a dictionary instead?) + + if type == VT_I2: # 16-bit signed integer + value = i16(s, offset+4) + if value >= 32768: + value = value - 65536 + elif type == VT_UI2: # 2-byte unsigned integer + value = i16(s, offset+4) + elif type in (VT_I4, VT_INT, VT_ERROR): + # VT_I4: 32-bit signed integer + # VT_ERROR: HRESULT, similar to 32-bit signed integer, + # see http://msdn.microsoft.com/en-us/library/cc230330.aspx + value = i32(s, offset+4) + elif type in (VT_UI4, VT_UINT): # 4-byte unsigned integer + value = i32(s, offset+4) # FIXME + elif type in (VT_BSTR, VT_LPSTR): + # CodePageString, see http://msdn.microsoft.com/en-us/library/dd942354.aspx + # size is a 32 bits integer, including the null terminator, and + # possibly trailing or embedded null chars + #TODO: if codepage is unicode, the string should be converted as such + count = i32(s, offset+4) + value = s[offset+8:offset+8+count-1] + # remove all null chars: + value = value.replace(b'\x00', b'') + elif type == VT_BLOB: + # binary large object (BLOB) + # see http://msdn.microsoft.com/en-us/library/dd942282.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_LPWSTR: + # UnicodeString + # see http://msdn.microsoft.com/en-us/library/dd942313.aspx + # "the string should NOT contain embedded or additional trailing + # null characters." + count = i32(s, offset+4) + value = self._decode_utf16_str(s[offset+8:offset+8+count*2]) + elif type == VT_FILETIME: + value = long(i32(s, offset+4)) + (long(i32(s, offset+8)) << 32) + # FILETIME is a 64-bit int: "number of 100ns periods + # since Jan 1,1601". + if convert_time and id not in no_conversion: + debug('Converting property #%d to python datetime, value=%d=%fs' + % (id, value, float(value) / 10000000)) + # convert FILETIME to Python datetime.datetime + # inspired from http://code.activestate.com/recipes/511425-filetime-to-datetime/ + _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) + debug('timedelta days=%d' % (value//(10*1000000*3600*24))) + value = _FILETIME_null_date + datetime.timedelta(microseconds=value//10) + else: + # legacy code kept for backward compatibility: returns a + # number of seconds since Jan 1,1601 + value = value // 10000000 # seconds + elif type == VT_UI1: # 1-byte unsigned integer + value = i8(s[offset+4]) + elif type == VT_CLSID: + value = _clsid(s[offset+4:offset+20]) + elif type == VT_CF: + # PropertyIdentifier or ClipboardData?? + # see http://msdn.microsoft.com/en-us/library/dd941945.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_BOOL: + # VARIANT_BOOL, 16 bits bool, 0x0000=Fals, 0xFFFF=True + # see http://msdn.microsoft.com/en-us/library/cc237864.aspx + value = bool(i16(s, offset+4)) + else: + value = None # everything else yields "None" + debug('property id=%d: type=%d not implemented in parser yet' % (id, type)) + + # missing: VT_EMPTY, VT_NULL, VT_R4, VT_R8, VT_CY, VT_DATE, + # VT_DECIMAL, VT_I1, VT_I8, VT_UI8, + # see http://msdn.microsoft.com/en-us/library/dd942033.aspx + + # FIXME: add support for VT_VECTOR + # VT_VECTOR is a 32 uint giving the number of items, followed by + # the items in sequence. The VT_VECTOR value is combined with the + # type of items, e.g. VT_VECTOR|VT_BSTR + # see http://msdn.microsoft.com/en-us/library/dd942011.aspx + + # print("%08x" % id, repr(value), end=" ") + # print("(%s)" % VT[i32(s, offset) & 0xFFF]) + + data[id] = value + except BaseException as exc: + # catch exception while parsing each property, and only raise + # a DEFECT_INCORRECT, because parsing can go on + msg = 'Error while parsing property id %d in stream %s: %s' % ( + id, repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + + return data + + def get_metadata(self): + """ + Parse standard properties streams, return an OleMetadata object + containing all the available metadata. + (also stored in the metadata attribute of the OleFileIO object) + + new in version 0.25 + """ + self.metadata = OleMetadata() + self.metadata.parse_properties(self) + return self.metadata + +# +# -------------------------------------------------------------------- +# This script can be used to dump the directory of any OLE2 structured +# storage file. + +if __name__ == "__main__": + + # [PL] display quick usage info if launched from command-line + if len(sys.argv) <= 1: + print('olefile version %s %s - %s' % (__version__, __date__, __author__)) + print( +""" +Launched from the command line, this script parses OLE files and prints info. + +Usage: olefile.py [-d] [-c] [file2 ...] + +Options: +-d : debug mode (displays a lot of debug information, for developers only) +-c : check all streams (for debugging purposes) + +For more information, see http://www.decalage.info/olefile +""") + sys.exit() + + check_streams = False + for filename in sys.argv[1:]: + # try: + # OPTIONS: + if filename == '-d': + # option to switch debug mode on: + set_debug_mode(True) + continue + if filename == '-c': + # option to switch check streams mode on: + check_streams = True + continue + + ole = OleFileIO(filename)#, raise_defects=DEFECT_INCORRECT) + print("-" * 68) + print(filename) + print("-" * 68) + ole.dumpdirectory() + for streamname in ole.listdir(): + if streamname[-1][0] == "\005": + print(streamname, ": properties") + props = ole.getproperties(streamname, convert_time=True) + props = sorted(props.items()) + for k, v in props: + # [PL]: avoid to display too large or binary values: + if isinstance(v, (basestring, bytes)): + if len(v) > 50: + v = v[:50] + if isinstance(v, bytes): + # quick and dirty binary check: + for c in (1, 2, 3, 4, 5, 6, 7, 11, 12, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31): + if c in bytearray(v): + v = '(binary data)' + break + print(" ", k, v) + + if check_streams: + # Read all streams to check if there are errors: + print('\nChecking streams...') + for streamname in ole.listdir(): + # print name using repr() to convert binary chars to \xNN: + print('-', repr('/'.join(streamname)), '-', end=' ') + st_type = ole.get_type(streamname) + if st_type == STGTY_STREAM: + print('size %d' % ole.get_size(streamname)) + # just try to read stream in memory: + ole.openstream(streamname) + else: + print('NOT a stream : type=%d' % st_type) + print() + +# for streamname in ole.listdir(): +# # print name using repr() to convert binary chars to \xNN: +# print('-', repr('/'.join(streamname)),'-', end=' ') +# print(ole.getmtime(streamname)) +# print() + + print('Modification/Creation times of all directory entries:') + for entry in ole.direntries: + if entry is not None: + print('- %s: mtime=%s ctime=%s' % (entry.name, + entry.getmtime(), entry.getctime())) + print() + + # parse and display metadata: + meta = ole.get_metadata() + meta.dump() + print() + # [PL] Test a few new methods: + root = ole.get_rootentry_name() + print('Root entry name: "%s"' % root) + if ole.exists('worddocument'): + print("This is a Word document.") + print("type of stream 'WordDocument':", ole.get_type('worddocument')) + print("size :", ole.get_size('worddocument')) + if ole.exists('macros/vba'): + print("This document may contain VBA macros.") + + # print parsing issues: + print('\nNon-fatal issues raised during parsing:') + if ole.parsing_issues: + for exctype, msg in ole.parsing_issues: + print('- %s: %s' % (exctype.__name__, msg)) + else: + print('None') +## except IOError as v: +## print("***", "cannot read", file, "-", v) + +# this code was developed while listening to The Wedding Present "Sea Monsters" diff --git a/server/www/packages/packages-linux/x64/PIL/PSDraw.py b/server/www/packages/packages-linux/x64/PIL/PSDraw.py new file mode 100644 index 0000000..d4e7b18 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PSDraw.py @@ -0,0 +1,235 @@ +# +# The Python Imaging Library +# $Id$ +# +# simple postscript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import EpsImagePlugin +import sys + +## +# Simple Postscript graphics interface. + + +class PSDraw(object): + """ + Sets up printing to the given file. If **file** is omitted, + :py:attr:`sys.stdout` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + fp = sys.stdout + self.fp = fp + + def _fp_write(self, to_write): + if bytes is str or self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, 'UTF-8')) + + def begin_document(self, id=None): + """Set up printing of a document. (Write Postscript DSC header.)""" + # FIXME: incomplete + self._fp_write("%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n") + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write Postscript DSC footer.)""" + self._fp_write("%%EndDocument\n" + "restore showpage\n" + "%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A Postscript font name + :param size: Size in points. + """ + if font not in self.isofont: + # reencode font + self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % + (font, font)) + self.isofont[font] = 1 + # rough + self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + Postscript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + xy = xy0 + xy1 + self._fp_write("%d %d %d %d Vl\n" % xy) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A 4-tuple of integers whose order and function is currently + undocumented. + + Hint: the tuple is passed into this format string: + + .. code-block:: python + + %d %d M %d %d 0 Vr\n + """ + self._fp_write("%d %d M %d %d 0 Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = "\\(".join(text.split("(")) + text = "\\)".join(text.split(")")) + xy = xy + (text,) + self._fp_write("%d %d M (%s) S\n" % xy) + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = float(im.size[0] * 72) / dpi + y = float(im.size[1] * 72) / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self._fp_write("%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, None, 0) + self._fp_write("\ngrestore\n") + +# -------------------------------------------------------------------- +# Postscript driver + +# +# EDROFF.PS -- Postscript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +EDROFF_PS = """\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- Postscript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = """\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup neg 0 exch rlineto + exch neg 0 rlineto + 0 exch rlineto + 100 div setgray fill 0 setgray } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = """\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/server/www/packages/packages-linux/x64/PIL/PaletteFile.py b/server/www/packages/packages-linux/x64/PIL/PaletteFile.py new file mode 100644 index 0000000..ef50fee --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PaletteFile.py @@ -0,0 +1,55 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL._binary import o8 + + +## +# File handler for Teragon-style palette files. + +class PaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[0:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..4f415ff --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PalmImagePlugin.py @@ -0,0 +1,241 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from PIL import Image, ImageFile, _binary + +__version__ = "1.0" + +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues),)) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for i in range(len(_Palm8BitColormapValues)): + palettedata = palettedata + _Palm8BitColormapValues[i] + for i in range(256 - len(_Palm8BitColormapValues)): + palettedata = palettedata + (0, 0, 0) + image.putpalette(palettedata) + return image + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = { + "custom-colormap": 0x4000, + "is-compressed": 0x8000, + "has-transparent": 0x2000, + } + +_COMPRESSION_TYPES = { + "none": 0xFF, + "rle": 0x01, + "scanline": 0x00, + } + +o8 = _binary.o8 +o16b = _binary.o16be + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + +def _save(im, fp, filename, check=0): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif (im.mode == "L" and + "bpp" in im.encoderinfo and + im.encoderinfo["bpp"] in (1, 2, 4)): + + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): + + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise IOError("cannot write mode %s as Palm" % im.mode) + + if check: + return check + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == 'RGB': + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2])) + elif colormapmode == 'RGBA': + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2])) + + # now convert data to raw form + ImageFile._save( + im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..b53635a --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PcdImagePlugin.py @@ -0,0 +1,59 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +i8 = _binary.i8 + + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = i8(s[1538]) & 3 + if orientation == 1: + self.tile_post_rotate = 90 # hack + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self.size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py b/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py new file mode 100644 index 0000000..c200690 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PcfFontFile.py @@ -0,0 +1,252 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile +from PIL import _binary + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = (1 << 0) +PCF_ACCELERATORS = (1 << 1) +PCF_METRICS = (1 << 2) +PCF_BITMAPS = (1 << 3) +PCF_INK_METRICS = (1 << 4) +PCF_BDF_ENCODINGS = (1 << 5) +PCF_SWIDTHS = (1 << 6) +PCF_GLYPH_NAMES = (1 << 7) +PCF_BDF_ACCELERATORS = (1 << 8) + +BYTES_PER_ROW = [ + lambda bits: ((bits+7) >> 3), + lambda bits: ((bits+15) >> 3) & ~1, + lambda bits: ((bits+31) >> 3) & ~3, + lambda bits: ((bits+63) >> 3) & ~7, +] + +i8 = _binary.i8 +l16 = _binary.i16le +l32 = _binary.i32le +b16 = _binary.i16be +b32 = _binary.i32be + + +def sz(s, o): + return s[o:s.index(b"\0", o)] + + +## +# Font file plugin for the X11 PCF format. + +class PcfFontFile(FontFile.FontFile): + + name = "name" + + def __init__(self, fp): + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + FontFile.FontFile.__init__(self) + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch in range(256): + ix = encoding[ch] + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), 1) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xff00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, 0) + ) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, attributes) + ) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise IOError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmapSizes = [] + for i in range(4): + bitmapSizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmapSizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i+1] + bitmaps.append( + Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)) + ) + + return bitmaps + + def _load_encoding(self): + + # map character code to bitmap index + encoding = [None] * 256 + + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) + firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) + + default = i16(fp.read(2)) + + nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) + + for i in range(nencoding): + encodingOffset = i16(fp.read(2)) + if encodingOffset != 0xFFFF: + try: + encoding[i+firstCol] = encodingOffset + except IndexError: + break # only load ISO-8859-1 glyphs + + return encoding diff --git a/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..9440d53 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PcxImagePlugin.py @@ -0,0 +1,187 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +from PIL import Image, ImageFile, ImagePalette, _binary + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 + +__version__ = "0.6" + + +def _accept(prefix): + return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = i8(s[1]) + bits = i8(s[3]) + planes = i8(s[65]) + stride = i16(s, 66) + logger.debug("PCX version %s, bits %s, planes %s, stride %s", + version, bits, planes, stride) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, 2) + s = self.fp.read(769) + if len(s) == 769 and i8(s[0]) == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i*3+1:i*3+4] != o8(i)*3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise IOError("unknown PCX mode") + + self.mode = mode + self.size = bbox[2]-bbox[0], bbox[3]-bbox[1] + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + +# -------------------------------------------------------------------- +# save PCX files + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + +o16 = _binary.o16le + + +def _save(im, fp, filename, check=0): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as PCX" % im.mode) + + if check: + return check + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], bits, stride) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) + + o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) + + o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) + + o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) + + b"\0"*54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, + (rawmode, bits*planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i)*3) + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") diff --git a/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..7decf0e --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PdfImagePlugin.py @@ -0,0 +1,258 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +from PIL import Image, ImageFile +from PIL._binary import i8 +import io + +__version__ = "0.4" + + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + +def _obj(fp, obj, **dict): + fp.write("%d 0 obj\n" % obj) + if dict: + fp.write("<<\n") + for k, v in dict.items(): + if v is not None: + fp.write("/%s %s\n" % (k, v)) + fp.write(">>\n") + + +def _endobj(fp): + fp.write("endobj\n") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + +def _save(im, fp, filename, save_all=False): + resolution = im.encoderinfo.get("resolution", 72.0) + + # + # make sure image data is available + im.load() + + xref = [0] + + class TextWriter(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def write(self, value): + self.fp.write(value.encode('latin-1')) + + fp = TextWriter(fp) + + fp.write("%PDF-1.2\n") + fp.write("% created by PIL PDF driver " + __version__ + "\n") + + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) + # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports + # Flatedecode (zip compression). + + bits = 8 + params = None + + if im.mode == "1": + filter = "/ASCIIHexDecode" + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "/DCTDecode" + # params = "<< /Predictor 15 /Columns %d >>" % (width-2) + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + elif im.mode == "P": + filter = "/ASCIIHexDecode" + colorspace = "[ /Indexed /DeviceRGB 255 <" + palette = im.im.getpalette("RGB") + for i in range(256): + r = i8(palette[i*3]) + g = i8(palette[i*3+1]) + b = i8(palette[i*3+2]) + colorspace += "%02x%02x%02x " % (r, g, b) + colorspace += "> ]" + procset = "/ImageI" # indexed color + elif im.mode == "RGB": + filter = "/DCTDecode" + colorspace = "/DeviceRGB" + procset = "/ImageC" # color images + elif im.mode == "CMYK": + filter = "/DCTDecode" + colorspace = "/DeviceCMYK" + procset = "/ImageC" # color images + else: + raise ValueError("cannot save mode %s" % im.mode) + + # + # catalogue + + xref.append(fp.tell()) + _obj( + fp, 1, + Type="/Catalog", + Pages="2 0 R") + _endobj(fp) + + # + # pages + numberOfPages = 1 + if save_all: + try: + numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. It is a single frame image + pass + pages = [str(pageNumber*3+4)+" 0 R" + for pageNumber in range(0, numberOfPages)] + + xref.append(fp.tell()) + _obj( + fp, 2, + Type="/Pages", + Count=len(pages), + Kids="["+"\n".join(pages)+"]") + _endobj(fp) + + for pageNumber in range(0, numberOfPages): + im.seek(pageNumber) + + # + # image + + op = io.BytesIO() + + if filter == "/ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", (len(data), 1), None) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "/FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) + else: + raise ValueError("unsupported PDF filter (%s)" % filter) + + # + # Get image characteristics + + width, height = im.size + + xref.append(fp.tell()) + _obj( + fp, pageNumber*3+3, + Type="/XObject", + Subtype="/Image", + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Length=len(op.getvalue()), + Filter=filter, + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace) + + fp.write("stream\n") + fp.fp.write(op.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # page + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+4) + fp.write( + "<<\n/Type /Page\n/Parent 2 0 R\n" + "/Resources <<\n/ProcSet [ /PDF %s ]\n" + "/XObject << /image %d 0 R >>\n>>\n" + "/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % ( + procset, + pageNumber*3+3, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + pageNumber*3+5)) + _endobj(fp) + + # + # page contents + + op = TextWriter(io.BytesIO()) + + op.write( + "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution))) + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue())) + + fp.write("stream\n") + fp.fp.write(op.fp.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # trailer + startxref = fp.tell() + fp.write("xref\n0 %d\n0000000000 65535 f \n" % len(xref)) + for x in xref[1:]: + fp.write("%010d 00000 n \n" % x) + fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref)) + fp.write("startxref\n%d\n%%%%EOF\n" % startxref) + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..fd002d9 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PixarImagePlugin.py @@ -0,0 +1,71 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +# +# helpers + +i16 = _binary.i16le + + +def _accept(prefix): + return prefix[:4] == b"\200\350\000\000" + + +## +# Image plugin for PIXAR raster images. + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label + s = self.fp.read(4) + if s != b"\200\350\000\000": + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self.size = i16(s[418:420]), i16(s[416:418]) + + # get channel/depth descriptions + mode = i16(s[424:426]), i16(s[426:428]) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile, _accept) + +Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py new file mode 100644 index 0000000..18deec5 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PngImagePlugin.py @@ -0,0 +1,840 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +import re +import zlib +import struct + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.9" + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + +is_cid = re.compile(b"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b'^\xff*\x00\xff*$') + +# Maximum decompressed size for a iTXt or zTXt chunk. +# Eliminates decompression bombs where compressed chunks can expand 1000x +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +# Set the maximum total text chunk size. +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + +class ChunkStream(object): + + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + if not hasattr(Image.core, "crc32"): + self.crc = self.crc_skip + + def read(self): + "Fetch a new chunk. Returns header information." + cid = None + + if self.queue: + cid, pos, length = self.queue.pop() + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + raise SyntaxError("broken PNG file (chunk %s)" % repr(cid)) + + return cid, pos, length + + def close(self): + self.queue = self.crc = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + "Call the appropriate chunk handler" + + logger.debug("STREAM %s %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) + + def crc(self, cid, data): + "Read and verify checksum" + + # Skip CRC checks for ancillary chunks if allowed to load truncated images + # 5th byte of first char is 1 [specs, section 5.4] + if ImageFile.LOAD_TRUNCATED_IMAGES and (i8(cid[0]) >> 5 & 1): + self.crc_skip(cid, data) + return + + try: + crc1 = Image.core.crc32(data, Image.core.crc32(cid)) + crc2 = i16(self.fp.read(2)), i16(self.fp.read(2)) + if crc1 != crc2: + raise SyntaxError("broken PNG file (bad header checksum in %s)" + % cid) + except struct.error: + raise SyntaxError("broken PNG file (incomplete checksum in %s)" + % cid) + + def crc_skip(self, cid, data): + "Read checksum. Used if the C module is not present" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + try: + cid, pos, length = self.read() + except struct.error: + raise IOError("truncated PNG file") + + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + @staticmethod + def __new__(cls, text, lang, tkey): + """ + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo(object): + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + + """ + + self.chunks.append((cid, data)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + + zlib.compress(value)) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + + value) + + def add_text(self, key, value, zip=0): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, bool(zip)) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode('latin-1', 'strict') + except UnicodeError: + return self.add_itxt(key, value, zip=bool(zip)) + + if not isinstance(key, bytes): + key = key.encode('latin-1', 'strict') + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + +class PngStream(ChunkStream): + + def __init__(self, fp): + + ChunkStream.__init__(self, fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" % + self.text_memory) + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %s", s[:i]) + logger.debug("Compression method %s", i8(s[i])) + comp_method = i8(s[i]) + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in iCCP chunk" % + comp_method) + try: + icc_profile = _safe_zlib_decompress(s[i+2:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + icc_profile = None + else: + raise + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + self.im_size = i32(s), i32(s[4:]) + try: + self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))] + except: + pass + if i8(s[12]): + self.im_info["interlace"] = 1 + if i8(s[11]): + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + # tRNS contains only one full-transparent entry, + # other entries are full opaque + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + else: + # otherwise, we have a byte string with one alpha value + # for each palette entry + self.im_info["transparency"] = s + elif self.im_mode == "L": + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:]) + return s + + def chunk_gAMA(self, pos, length): + + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + px, py = i32(s), i32(s[4:]) + unit = i8(s[8]) + if unit == 1: # meter + dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = i8(v[0]) + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in zTXt chunk" % + comp_method) + try: + v = _safe_zlib_decompress(v[1:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + v = b"" + else: + raise + except zlib.error: + v = b"" + + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = i8(r[0]), i8(r[1]), r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + else: + raise + except zlib.error: + return s + else: + return s + if bytes is not str: + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + +# -------------------------------------------------------------------- +# PNG reader + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if self.fp.read(8) != _MAGIC: + raise SyntaxError("not a PNG file") + + # + # Parse headers up to the first IDAT chunk + + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%s %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self.size = self.png.im_size + self.info = self.png.im_info + self.text = self.png.im_text # experimental + self.tile = self.png.im_tile + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + self.__idat = length # used by load_read() + + def verify(self): + "Verify PNG file" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + self.fp = None + + def load_prepare(self): + "internal: prepare to read PNG file" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + "internal: read more image data" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT"]: + self.png.push(cid, pos, length) + return b"" + + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + "internal: finished reading image data" + + self.png.close() + self.png = None + + +# -------------------------------------------------------------------- +# PNG writer + +o8 = _binary.o8 +o16 = _binary.o16be +o32 = _binary.o32be + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b'\x01\x00'), + "L;1": ("L;1", b'\x01\x00'), + "L;2": ("L;2", b'\x02\x00'), + "L;4": ("L;4", b'\x04\x00'), + "L": ("L", b'\x08\x00'), + "LA": ("LA", b'\x08\x04'), + "I": ("I;16B", b'\x10\x00'), + "P;1": ("P;1", b'\x01\x03'), + "P;2": ("P;2", b'\x02\x03'), + "P;4": ("P;4", b'\x04\x03'), + "P": ("P", b'\x08\x03'), + "RGB": ("RGB", b'\x08\x02'), + "RGBA": ("RGBA", b'\x08\x06'), +} + + +def putchunk(fp, cid, *data): + "Write a PNG chunk (including CRC field)" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + fp.write(o16(hi) + o16(lo)) + + +class _idat(object): + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +def _save(im, fp, filename, chunk=putchunk, check=0): + # save an image to disk (called by the save method) + + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = 1 << im.encoderinfo["bits"] + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1])//3, 256), 2) + else: + colors = 256 + + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + elif colors <= 16: + bits = 4 + else: + bits = 8 + if bits != 8: + mode = "%s;%d" % (mode, bits) + + # encoder options + if "dictionary" in im.encoderinfo: + dictionary = im.encoderinfo["dictionary"] + else: + dictionary = b"" + + im.encoderconfig = ("optimize" in im.encoderinfo, + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + dictionary) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError: + raise IOError("cannot write mode %s as PNG" % mode) + + if check: + return check + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk(fp, b"IHDR", + o32(im.size[0]), o32(im.size[1]), # 0: size + mode, # 8: depth/type + b'\0', # 10: compression + b'\0', # 11: filter category + b'\0') # 12: interlace flag + + if im.mode == "P": + palette_byte_number = (2 ** bits) * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b'\0' + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get('transparency', + im.info.get('transparency', None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = 2**bits + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b'\xFF' * transparency + b'\0' + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode == "L": + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise IOError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = 2**bits + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk(fp, b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b'\x01') + + info = im.encoderinfo.get("pnginfo") + if info: + for cid, data in info.chunks: + chunk(fp, cid, data) + + icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(icc) + chunk(fp, b"iCCP", data) + + ImageFile._save(im, _idat(fp, chunk), + [("zip", (0, 0)+im.size, 0, rawmode)]) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector(object): + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + crc = o16(hi) + o16(lo) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) + +Image.register_extension(PngImageFile.format, ".png") + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..68073ca --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PpmImagePlugin.py @@ -0,0 +1,174 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +import string + +from PIL import Image, ImageFile + +__version__ = "0.2" + +# +# -------------------------------------------------------------------- + +b_whitespace = string.whitespace +try: + import locale + locale_lang, locale_enc = locale.getlocale() + if locale_enc is None: + locale_lang, locale_enc = locale.getdefaultlocale() + b_whitespace = b_whitespace.decode(locale_enc) +except: + pass +b_whitespace = b_whitespace.encode('ascii', 'ignore') + +MODES = { + # standard + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK" +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _token(self, s=b""): + while True: # read until next whitespace + c = self.fp.read(1) + if not c or c in b_whitespace: + break + if c > b'\x79': + raise ValueError("Expected ASCII value, found binary") + s = s + c + if (len(s) > 9): + raise ValueError("Expected int, got > 9 digits") + return s + + def _open(self): + + # check magic + s = self.fp.read(1) + if s != b"P": + raise SyntaxError("not a PPM file") + mode = MODES[self._token(s)] + + if mode == "1": + self.mode = "1" + rawmode = "1;I" + else: + self.mode = rawmode = mode + + for ix in range(3): + while True: + while True: + s = self.fp.read(1) + if s not in b_whitespace: + break + if s == b"": + raise ValueError("File does not extend beyond magic number") + if s != b"#": + break + s = self.fp.readline() + s = int(self._token(s)) + if ix == 0: + xsize = s + elif ix == 1: + ysize = s + if mode == "1": + break + elif ix == 2: + # maxgrey + if s > 255: + if not mode == 'L': + raise ValueError("Too many colors for band: %s" % s) + if s < 2**16: + self.mode = 'I' + rawmode = 'I;16B' + else: + self.mode = 'I' + rawmode = 'I;32B' + + self.size = xsize, ysize + self.tile = [("raw", + (0, 0, xsize, ysize), + self.fp.tell(), + (rawmode, 0, 1))] + + # ALTERNATIVE: load via builtin debug function + # self.im = Image.core.open_ppm(self.filename) + # self.mode = self.im.mode + # self.size = self.im.size + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + if im.getextrema()[1] < 2**16: + rawmode, head = "I;16B", b"P5" + else: + rawmode, head = "I;32B", b"P5" + elif im.mode == "RGB": + rawmode, head = "RGB", b"P6" + elif im.mode == "RGBA": + rawmode, head = "RGB", b"P6" + else: + raise IOError("cannot write mode %s as PPM" % im.mode) + fp.write(head + ("\n%d %d\n" % im.size).encode('ascii')) + if head == b"P6": + fp.write(b"255\n") + if head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + elif rawmode == "I;16B": + fp.write(b"65535\n") + elif rawmode == "I;32B": + fp.write(b"2147483648\n") + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + +# +# -------------------------------------------------------------------- + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_extension(PpmImageFile.format, ".pbm") +Image.register_extension(PpmImageFile.format, ".pgm") +Image.register_extension(PpmImageFile.format, ".ppm") diff --git a/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..d06e320 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PsdImagePlugin.py @@ -0,0 +1,312 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +__version__ = "0.4" + +from PIL import Image, ImageFile, ImagePalette, _binary + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3) +} + +# +# helpers + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + + +# --------------------------------------------------------------------. +# read PSD images + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if s[:4] != b"8BPS" or i16(s[4:]) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s[22:]) + psd_channels = i16(s[12:]) + psd_mode = i16(s[24:]) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise IOError("not enough channels") + + self.mode = mode + self.size = i32(s[18:]), i32(s[14:]) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + signature = read(4) + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if (len(data) & 1): + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self.layers = _layerinfo(self.fp) + self.fp.seek(end) + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 0 + + @property + def n_frames(self): + return len(self.layers) + + @property + def is_animated(self): + return len(self.layers) > 1 + + def seek(self, layer): + # seek to given layer (1..max) + if layer == self.frame: + return + try: + if layer <= 0: + raise IndexError + name, mode, bbox, tile = self.layers[layer-1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + return name, bbox + except IndexError: + raise EOFError("no such layer") + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.fill(self.mode, self.size, 0) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + +def _layerinfo(file): + # read layerinfo block + layers = [] + read = file.read + for i in range(abs(i16(read(2)))): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + info = [] + mode = [] + types = list(range(i16(read(2)))) + if len(types) > 4: + continue + + for i in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + size = i32(read(4)) + info.append((m, size)) + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + filler = read(12) + name = "" + size = i32(read(4)) + combined = 0 + if size: + length = i32(read(4)) + if length: + mask_y = i32(read(4)) + mask_x = i32(read(4)) + mask_h = i32(read(4)) - mask_y + mask_w = i32(read(4)) - mask_x + file.seek(length - 16, 1) + combined += length + 4 + + length = i32(read(4)) + if length: + file.seek(length, 1) + combined += length + 4 + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode('latin-1', 'replace') + combined += length + 1 + + file.seek(size - combined, 1) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(file, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize*ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append( + ("packbits", bbox, offset, layer) + ) + for y in range(ysize): + offset = offset + i16(bytecount[i:i+2]) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") diff --git a/server/www/packages/packages-linux/x64/PIL/PyAccess.py b/server/www/packages/packages-linux/x64/PIL/PyAccess.py new file mode 100644 index 0000000..c9cbd70 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/PyAccess.py @@ -0,0 +1,318 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +from __future__ import print_function + +import logging +import sys + +from cffi import FFI + + +logger = logging.getLogger(__name__) + + +defs = """ +struct Pixel_RGBA { + unsigned char r,g,b,a; +}; +struct Pixel_I16 { + unsigned char l,r; +}; +""" +ffi = FFI() +ffi.cdef(defs) + + +class PyAccess(object): + + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast('unsigned char **', vals['image8']) + self.image32 = ffi.cast('int **', vals['image32']) + self.image = ffi.cast('unsigned char **', vals['image']) + self.xsize = vals['xsize'] + self.ysize = vals['ysize'] + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + if self.readonly: + raise ValueError('Attempt to putpixel a read only image') + (x, y) = self.check_xy(xy) + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + + (x, y) = self.check_xy(xy) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError('pixel location out of range') + return xy + + +class _PyAccess32_2(PyAccess): + """ PA, LA, stored in first and last bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """ RGB and friends, stored in the first three bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + + +class _PyAccess32_4(PyAccess): + """ RGBA etc, all 4 bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """ 1, L, P, 8 bit images stored as uint8 """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """ I;16 access, native bitendian without conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('unsigned short **', self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """ I;16L access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """ I;16B access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except: + color = min(color[0], 65535) + + pixel.l = color >> 8 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """ Signed Int32 access, native endian """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """ I;32L/B access, with byteswapping conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new('int *', i) + chars = ffi.cast('unsigned char *', orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ + chars[1], chars[0] + return ffi.cast('int *', chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """ 32 bit float access """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('float **', self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = {'1': _PyAccess8, + 'L': _PyAccess8, + 'P': _PyAccess8, + 'LA': _PyAccess32_2, + 'La': _PyAccess32_2, + 'PA': _PyAccess32_2, + 'RGB': _PyAccess32_3, + 'LAB': _PyAccess32_3, + 'HSV': _PyAccess32_3, + 'YCbCr': _PyAccess32_3, + 'RGBA': _PyAccess32_4, + 'RGBa': _PyAccess32_4, + 'RGBX': _PyAccess32_4, + 'CMYK': _PyAccess32_4, + 'F': _PyAccessF, + 'I': _PyAccessI32_N, + } + +if sys.byteorder == 'little': + mode_map['I;16'] = _PyAccessI16_N + mode_map['I;16L'] = _PyAccessI16_N + mode_map['I;16B'] = _PyAccessI16_B + + mode_map['I;32L'] = _PyAccessI32_N + mode_map['I;32B'] = _PyAccessI32_Swap +else: + mode_map['I;16'] = _PyAccessI16_L + mode_map['I;16L'] = _PyAccessI16_L + mode_map['I;16B'] = _PyAccessI16_N + + mode_map['I;32L'] = _PyAccessI32_Swap + mode_map['I;32B'] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..f890c7e --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/SgiImagePlugin.py @@ -0,0 +1,89 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# History: +# 1995-09-10 fl Created +# +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16be + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +## +# Image plugin for SGI images. + +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + s = self.fp.read(512) + if i16(s) != 474: + raise ValueError("Not an SGI image file") + + # relevant header entries + compression = i8(s[2]) + + # bytes, dimension, zsize + layout = i8(s[3]), i16(s[4:]), i16(s[10:]) + + # determine mode from bytes/zsize + if layout == (1, 2, 1) or layout == (1, 1, 1): + self.mode = "L" + elif layout == (1, 3, 3): + self.mode = "RGB" + elif layout == (1, 3, 4): + self.mode = "RGBA" + else: + raise ValueError("Unsupported SGI image mode") + + # size + self.size = i16(s[6:]), i16(s[8:]) + + # decoder info + if compression == 0: + offset = 512 + pagesize = self.size[0]*self.size[1]*layout[0] + self.tile = [] + for layer in self.mode: + self.tile.append( + ("raw", (0, 0)+self.size, offset, (layer, 0, -1))) + offset = offset + pagesize + elif compression == 1: + raise ValueError("SGI RLE encoding not supported") + +# +# registry + +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) + +Image.register_extension(SgiImageFile.format, ".bw") +Image.register_extension(SgiImageFile.format, ".rgb") +Image.register_extension(SgiImageFile.format, ".rgba") +Image.register_extension(SgiImageFile.format, ".sgi") + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..07f623c --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/SpiderImagePlugin.py @@ -0,0 +1,322 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# http://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# http://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# + +from __future__ import print_function + +from PIL import Image, ImageFile +import os +import struct +import sys + + +def isInt(f): + try: + i = int(f) + if f-i == 0: + return 1 + else: + return 0 + except ValueError: + return 0 + except OverflowError: + return 0 + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no.of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + # print "labrec = %d, labbyt = %d, lenbyt = %d" % (labrec,labbyt,lenbyt) + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + fp = open(filename, 'rb') + f = fp.read(92) # read 23 * 4 bytes + fp.close() + t = struct.unpack('>23f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack('<23f', f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack('>27f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack('<27f', f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error: + raise SyntaxError("not a valid Spider file") + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self.size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [ + ("raw", (0, 0) + self.size, offset, + (self.rawmode, 0, 1))] + self.__fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + raise EOFError("attempt to seek in a non-stack file") + if frame >= self._nimages: + raise EOFError("attempt to seek past end of file") + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self.__fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum-minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + " create a list of Image.images for use in montage " + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print("unable to find %s" % img) + continue + try: + im = Image.open(img).convert2byte() + except: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info['filename'] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = 1024 / lenbyt + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + hdr = [] + nvalues = int(labbyt / 4) + for i in range(nvalues): + hdr.append(0.0) + + if len(hdr) < 23: + return [] + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + hdrstr = [] + for v in hdr: + hdrstr.append(struct.pack('f', v)) + return hdrstr + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert('F') + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise IOError("Error creating Spider header") + + # write the SPIDER header + try: + fp = open(filename, 'wb') + except: + raise IOError("Unable to open %s for writing" % filename) + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + fp.close() + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension(SpiderImageFile.format, ext) + _save(im, fp, filename) + +# -------------------------------------------------------------------- + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if not sys.argv[1:]: + print("Syntax: python SpiderImagePlugin.py [infile] [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + outfile = "" + if len(sys.argv[1:]) > 1: + outfile = sys.argv[2] + + im = Image.open(filename) + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=' ') + print(im.getextrema()) + + if outfile != "": + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + "saving a flipped version of %s as %s " % + (os.path.basename(filename), outfile)) + im.save(outfile, SpiderImageFile.format) diff --git a/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py new file mode 100644 index 0000000..af63144 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/SunImagePlugin.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + +i32 = _binary.i32be + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + + +## +# Image plugin for Sun raster files. + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # HEAD + s = self.fp.read(32) + if i32(s) != 0x59a66a95: + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self.size = i32(s[4:8]), i32(s[8:12]) + + depth = i32(s[12:16]) + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + self.mode, rawmode = "RGB", "BGR" + else: + raise SyntaxError("unsupported mode") + + compression = i32(s[20:24]) + + if i32(s[24:28]) != 0: + length = i32(s[28:32]) + offset = offset + length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(length)) + if self.mode == "L": + self.mode = rawmode = "P" + + stride = (((self.size[0] * depth + 7) // 8) + 3) & (~3) + + if compression == 1: + self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] + elif compression == 2: + self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] + +# +# registry + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/server/www/packages/packages-linux/x64/PIL/TarIO.py b/server/www/packages/packages-linux/x64/PIL/TarIO.py new file mode 100644 index 0000000..4e5115b --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/TarIO.py @@ -0,0 +1,57 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +from PIL import ContainerIO + + +## +# A file object that provides read access to a given member of a TAR +# file. + +class TarIO(ContainerIO.ContainerIO): + + ## + # Create file object. + # + # @param tarfile Name of TAR file. + # @param file Name of member file. + + def __init__(self, tarfile, file): + + fh = open(tarfile, "rb") + + while True: + + s = fh.read(512) + if len(s) != 512: + raise IOError("unexpected end of tar file") + + name = s[:100].decode('utf-8') + i = name.find('\0') + if i == 0: + raise IOError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + fh.seek((size + 511) & (~511), 1) + + # Open region + ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size) diff --git a/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..a75ce29 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/TgaImagePlugin.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + + +# +# -------------------------------------------------------------------- +# Read RGA file + +i8 = _binary.i8 +i16 = _binary.i16le + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + idlen = i8(s[0]) + + colormaptype = i8(s[1]) + imagetype = i8(s[2]) + + depth = i8(s[16]) + + flags = i8(s[17]) + + self.size = i16(s[12:]), i16(s[14:]) + + # validate header fields + if colormaptype not in (0, 1) or\ + self.size[0] <= 0 or self.size[1] <= 0 or\ + depth not in (1, 8, 16, 24, 32): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + if orientation == 0x20: + orientation = 1 + elif not orientation: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if idlen: + self.info["id_section"] = self.fp.read(idlen) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;16", b"\0"*2*start + self.fp.read(2*size)) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0"*3*start + self.fp.read(3*size)) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0"*4*start + self.fp.read(4*size)) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [("tga_rle", (0, 0)+self.size, + self.fp.tell(), (rawmode, orientation, depth))] + else: + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), (rawmode, 0, orientation))] + except KeyError: + pass # cannot decode + +# +# -------------------------------------------------------------------- +# Write TGA file + +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename, check=0): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TGA" % im.mode) + + if check: + return check + + if colormaptype: + colormapfirst, colormaplength, colormapentry = 0, 256, 24 + else: + colormapfirst, colormaplength, colormapentry = 0, 0, 0 + + if im.mode == "RGBA": + flags = 8 + else: + flags = 0 + + orientation = im.info.get("orientation", -1) + if orientation > 0: + flags = flags | 0x20 + + fp.write(b"\000" + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags)) + + if colormaptype: + fp.write(im.im.getpalette("RGB", "BGR")) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extension(TgaImageFile.format, ".tga") diff --git a/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..524d42a --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py @@ -0,0 +1,1504 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division, print_function + +from PIL import Image, ImageFile +from PIL import ImagePalette +from PIL import _binary +from PIL import TiffTags + +import collections +from fractions import Fraction +from numbers import Number, Rational + +import io +import itertools +import os +import struct +import sys +import warnings + +from .TiffTags import TYPES + + +__version__ = "1.3.5" +DEBUG = False # Needs to be merged with the new logging approach. + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +i8 = _binary.i8 +o8 = _binary.o8 + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", +} + +COMPRESSION_INFO_REV = dict([(v, k) for (k, v) in COMPRESSION_INFO.items()]) + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + + (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + + (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I;16S", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I;16BS", "I;16BS"), + + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I;32BS", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + + (II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +class IFDRational(Rational): + """ Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ('_numerator', '_denominator', '_val') + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + self._denominator = denominator + self._numerator = value + self._val = float(1) + + if type(value) == Fraction: + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value + + if type(value) == IFDRational: + self._denominator = value.denominator + self._numerator = value.numerator + self._val = value._val + return + + if denominator == 0: + self._val = float('nan') + return + + elif denominator == 1: + if sys.hexversion < 0x2070000 and type(value) == float: + # python 2.6 is different. + self._val = Fraction.from_float(value) + else: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(a): + return a._numerator + + @property + def denominator(a): + return a._denominator + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return (self.numerator, self.denominator) + + f = self._val.limit_denominator(max_denominator) + return (f.numerator, f.denominator) + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self, other): + return self._val == other + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val, op)(*args) + return delegate + + """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', + 'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero', + 'ceil', 'floor', 'round'] + print "\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a) + """ + + __add__ = _delegate('__add__') + __radd__ = _delegate('__radd__') + __sub__ = _delegate('__sub__') + __rsub__ = _delegate('__rsub__') + __div__ = _delegate('__div__') + __rdiv__ = _delegate('__rdiv__') + __mul__ = _delegate('__mul__') + __rmul__ = _delegate('__rmul__') + __truediv__ = _delegate('__truediv__') + __rtruediv__ = _delegate('__rtruediv__') + __floordiv__ = _delegate('__floordiv__') + __rfloordiv__ = _delegate('__rfloordiv__') + __mod__ = _delegate('__mod__') + __rmod__ = _delegate('__rmod__') + __pow__ = _delegate('__pow__') + __rpow__ = _delegate('__rpow__') + __pos__ = _delegate('__pos__') + __neg__ = _delegate('__neg__') + __abs__ = _delegate('__abs__') + __trunc__ = _delegate('__trunc__') + __lt__ = _delegate('__lt__') + __gt__ = _delegate('__gt__') + __le__ = _delegate('__le__') + __ge__ = _delegate('__ge__') + __nonzero__ = _delegate('__nonzero__') + __ceil__ = _delegate('__ceil__') + __floor__ = _delegate('__floor__') + __round__ = _delegate('__round__') + + +class ImageFileDirectory_v2(collections.MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + `~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError("not a TIFF file (header %r not valid)" % ifh) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.reset() + self.next, = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def as_dict(self): + """Return a dictionary of the image's tags. + + .. deprecated:: 3.0.0 + """ + warnings.warn("as_dict() is deprecated. " + + "Please use dict(ifd) instead.", DeprecationWarning) + return dict(self) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return dict((TiffTags.lookup(code).name, value) + for code, value in self.items()) + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = val, + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + if bytes is str: + def has_key(self, tag): + return tag in self + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + if bytes is str: + basetypes += unicode, + + info = TiffTags.lookup(tag) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = 7 + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = 5 + elif all(isinstance(v, int) for v in values): + if all(v < 2 ** 16 for v in values): + self.tagtype[tag] = 3 + else: + self.tagtype[tag] = 4 + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = 12 + else: + if bytes is str: + # Never treat data as binary by default on Python 2. + self.tagtype[tag] = 2 + else: + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = 2 + + if self.tagtype[tag] == 7 and bytes is not str: + values = [value.encode("ascii", 'replace') if isinstance(value, str) else value] + + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + if info.length == 1: + if legacy_api and self.tagtype[tag] in [5, 10]: + values = values, + dest[tag], = values + else: + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from PIL.TiffTags import TYPES + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func + return func + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func + return func + return decorator + + def _register_basic(idx_fmt_name): + from PIL.TiffTags import TYPES + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( + self._unpack("{0}{1}".format(len(data) // size, fmt), data)) + _write_dispatch[idx] = lambda self, *values: ( + b"".join(self._pack(fmt, value) for value in values)) + + list(map(_register_basic, + [(3, "H", "short"), (4, "L", "long"), + (6, "b", "signed byte"), (8, "h", "signed short"), + (9, "l", "signed long"), (11, "f", "float"), (12, "d", "double")])) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return data + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if sys.version_info[0] == 2: + value = value.decode('ascii', 'replace') + return b"" + value.encode('ascii', 'replace') + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{0}L".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) + for frac in values) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{0}l".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) + for frac in values) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise IOError("Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % + (size, len(ret))) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("tag: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + if DEBUG: + print("- unsupported type", typ) + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + offset, = self._unpack("L", data) + if DEBUG: + print("Tag Location: %s - Data Location: %s" % + (here, offset), end=" ") + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn("Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d. " + "Skipping tag %s" % (size, len(data), tag)) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + if DEBUG: + if size > 32: + print("- value: " % size) + else: + print("- value:", self[tag]) + + self.next, = self._unpack("L", self._ensure_read(fp, 4)) + except IOError as msg: + warnings.warn(str(msg)) + return + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + # FIXME What about tagdata? + fp.write(self._pack("H", len(self._tags_v2))) + + entries = [] + offset = fp.tell() + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + if DEBUG: + print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("save: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + if len(data) >= 16: + print("- value: " % len(data)) + else: + print("- value:", values) + + # count is sum of lengths for string and arbitrary data + count = len(data) if typ in [2, 7] else len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError( + "multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + if DEBUG > 1: + print(tag, typ, count, repr(value), repr(data)) + fp.write(self._pack("HHL4s", tag, typ, count, value)) + + # -- overwrite here for multi-page -- + fp.write(b"\0\0\0\0") # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + fp.write(data) + if len(data) & 1: + fp.write(b"\0") + + return offset + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print ifd[key] + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + `~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + def __init__(self, *args, **kwargs): + ImageFileDirectory_v2.__init__(self, *args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + @classmethod + def from_v2(cls, original): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = val, + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + + def _open(self): + "Open the first image in a TIFF file" + + # Header + ifh = self.fp.read(8) + + # image file directory (tag dictionary) + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy tag/ifd entries will be filled in later + self.tag = self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + self._is_animated = None + + if DEBUG: + print("*** TiffImageFile._open ***") + print("- __first:", self.__first) + print("- ifh: ", ifh) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + "Select a given frame as current image" + self._seek(max(frame, 0)) # Questionable backwards compatibility. + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + if DEBUG: + print("Seeking to frame %s, on frame %s, " + "__next %s, location: %s" % + (frame, self.__frame, self.__next, self.fp.tell())) + # reset python3 buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + if DEBUG: + print("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + "Return the current frame number" + return self.__frame + + def _decoder(self, rawmode, layer, tile=None): + "Setup decoder contexts" + + args = None + if rawmode == "RGB" and self._planar_configuration == 2: + rawmode = rawmode[layer] + compression = self._compression + if compression == "raw": + args = (rawmode, 0, 1) + elif compression == "jpeg": + args = rawmode, "" + if JPEGTABLES in self.tag_v2: + # Hack to handle abbreviated JPEG headers + # FIXME This will fail with more than one value + self.tile_prefix, = self.tag_v2[JPEGTABLES] + elif compression == "packbits": + args = rawmode + elif compression == "tiff_lzw": + args = rawmode + if PREDICTOR in self.tag_v2: + # Section 14: Differencing Predictor + self.decoderconfig = (self.tag_v2[PREDICTOR],) + + if ICCPROFILE in self.tag_v2: + self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + + return args + + def load(self): + if self.use_load_libtiff: + return self._load_libtiff() + return super(TiffImageFile, self).load() + + def _load_libtiff(self): + """ Overload method triggered when we detect a compressed tiff + Calls out to libtiff """ + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.load_prepare() + + if not len(self.tile) == 1: + raise IOError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = self.tile[0][3] + (self.tag_v2.offset,) + decoder = Image._getdecoder(self.mode, 'libtiff', args, + self.decoderconfig) + try: + decoder.setimage(self.im, extents) + except ValueError: + raise IOError("Couldn't set the image") + + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an IOError if there's no underlying fp. Easier to + # deal with here by reordering. + if DEBUG: + print("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif hasattr(self.fp, "fileno"): + # we've got a actual file on disk, pass in the fp. + if DEBUG: + print("have fileno, calling fileno version of the decoder.") + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + if DEBUG: + print("don't have fileno or getvalue. just reading") + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + # libtiff closed the fp in a, we need to close self.fp, if possible + if hasattr(self.fp, 'close'): + if not self.__next: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise IOError(err) + + self.load_end() + + return Image.Image.load(self) + + def _setup(self): + "Setup this image object based on current tags" + + if 0xBC01 in self.tag_v2: + raise IOError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + fillorder = self.tag_v2.get(FILLORDER, 1) + + if DEBUG: + print("*** Summary ***") + print("- compression:", self._compression) + print("- photometric_interpretation:", photo) + print("- planar_configuration:", self._planar_configuration) + print("- fill_order:", fillorder) + + # size + xsize = self.tag_v2.get(IMAGEWIDTH) + ysize = self.tag_v2.get(IMAGELENGTH) + self.size = xsize, ysize + + if DEBUG: + print("- size:", self.size) + + sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if (len(sampleFormat) > 1 + and max(sampleFormat) == min(sampleFormat) == 1): + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + sampleFormat = (1,) + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, photo, sampleFormat, fillorder, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError: + if DEBUG: + print("- unsupported format") + raise SyntaxError("unknown pixel mode") + + if DEBUG: + print("- raw mode:", rawmode) + print("- pil mode:", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, 1) + yres = self.tag_v2.get(Y_RESOLUTION, 1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT, 1) + if resunit == 2: # dots per inch + self.info["dpi"] = xres, yres + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = xres * 2.54, yres * 2.54 + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = l = 0 + self.tile = [] + self.use_load_libtiff = False + if STRIPOFFSETS in self.tag_v2: + # striped image + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3", + "group4", "tiff_jpeg", + "tiff_adobe_deflate", + "tiff_thunderscan", + "tiff_deflate", + "tiff_sgilog", + "tiff_sgilog24", + "tiff_raw_16"]: + # if DEBUG: + # print "Activating g4 compression for whole file" + + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # use the _load_libtiff function. + + self.use_load_libtiff = True + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and \ + os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell for py3 + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except IOError: + # io.BytesIO have a fileno, but returns an IOError if + # it doesn't use a file descriptor. + fp = False + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + key = ( + self.tag_v2.prefix, photo, sampleFormat, 1, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode: + rawmode = 'I;16N' + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, fp) + self.tile.append( + (self._compression, + (0, 0, w, ysize), + 0, a)) + a = None + + else: + for i in range(len(offsets)): + a = self._decoder(rawmode, l, i) + self.tile.append( + (self._compression, + (0, min(y, ysize), w, min(y+h, ysize)), + offsets[i], a)) + if DEBUG: + print("tiles: ", self.tile) + y = y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + elif TILEOFFSETS in self.tag_v2: + # tiled image + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + a = None + for o in self.tag_v2[TILEOFFSETS]: + if not a: + a = self._decoder(rawmode, l) + # FIXME: this doesn't work if the image size + # is not a multiple of the tile size... + self.tile.append( + (self._compression, + (x, y, x+w, y+h), + o, a)) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + else: + if DEBUG: + print("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # fixup palette descriptor + + if self.mode == "P": + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TIFF" % im.mode) + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get('compression', + im.info.get('compression', 'raw')) + + libtiff = WRITE_LIBTIFF or compression != 'raw' + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + if DEBUG: + print("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except: + pass # might not be an IFD, Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, 'tag_v2'): + # preserve tags from original TIFF image file + for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, + IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype.get(key, None) + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [(IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright")]: + name_with_spaces = name.replace("_", " ") + if "_" in name and name_with_spaces in im.encoderinfo: + warnings.warn("%r is deprecated; use %r instead" % + (name_with_spaces, name), DeprecationWarning) + ifd[key] = im.encoderinfo[name.replace("_", " ")] + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode == "P": + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) + + # data orientation + stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + ifd[ROWSPERSTRIP] = im.size[1] + ifd[STRIPBYTECOUNTS] = stride * im.size[1] + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if DEBUG: + print("Saving using libtiff encoder") + print("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS] + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, 'tag'): + legacy_ifd = im.tag.to_v2() + for tag, value in itertools.chain(ifd.items(), + getattr(im, 'tag_v2', {}).items(), + legacy_ifd.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. It will segfault if it attempts + # to add a custom tag without the dictionary entry + # + # UNDONE -- add code for the custom dictionary + if tag not in TiffTags.LIBTIFF_CORE: + continue + if tag not in atts and tag not in blocklist: + if isinstance(value, unicode if bytes is str else str): + atts[tag] = value.encode('ascii', 'replace') + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if DEBUG: + print("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ('I;16B', 'I;16'): + rawmode = 'I;16N' + + a = (rawmode, compression, _fp, filename, atts) + # print(im.mode, compression, a, im.encoderconfig) + e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) + e.setimage(im.im, (0, 0)+im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16*1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + + else: + offset = ifd.save(fp) + + ImageFile._save(im, fp, [ + ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) + ]) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) + +Image.register_extension(TiffImageFile.format, ".tif") +Image.register_extension(TiffImageFile.format, ".tiff") + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/server/www/packages/packages-linux/x64/PIL/TiffTags.py b/server/www/packages/packages-linux/x64/PIL/TiffTags.py new file mode 100644 index 0000000..ecc63ba --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/TiffTags.py @@ -0,0 +1,442 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=None, length=0, enum=None): + return super(TagInfo, cls).__new__( + cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + return self.enum.get(value, value) + + +def lookup(tag): + """ + :param tag: Integer tag number + :returns: Taginfo namedtuple, From the TAGS_V2 info if possible, + otherwise just populating the value and name from TAGS. + If the tag is not recognized, "unknown" is returned for the name + + """ + + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown'))) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# +# The length here differs from the length in the tiff spec. For +# numbers, the tiff spec is for the number of fields returned. We +# agree here. For string-like types, the tiff spec uses the length of +# field in bytes. In Pillow, we are using the number of expected +# fields, in general 1 for string-like types. + + +BYTE = 1 +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 +UNDEFINED = 7 +SIGNED_RATIONAL = 10 +DOUBLE = 12 + +TAGS_V2 = { + + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ("Compression", SHORT, 1, + {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4, + "LZW": 5, "JPEG": 6, "PackBits": 32773}), + + 262: ("PhotometricInterpretation", SHORT, 1, + {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RGB Palette": 3, + "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892}), # Adobe DNG + 263: ("Threshholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellLength", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + + 280: ("MinSampleValue", LONG, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), + 297: ("PageNumber", SHORT, 2), + + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticities", SHORT, 6), + + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + + 340: ("SMinSampleValue", DOUBLE, 0), + 341: ("SMaxSampleValue", DOUBLE, 0), + 342: ("TransferRange", SHORT, 6), + + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", LONG, 0), + + 33432: ("Copyright", ASCII, 1), + + # FIXME add more tags here + 34665: ("ExifIFD", SHORT, 1), + 34675: ('ICCProfile', UNDEFINED, 1), + 34853: ('GPSInfoIFD', BYTE, 1), + + # MPInfo + 45056: ("MPFVersion", UNDEFINED, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", UNDEFINED, 1), + 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), + 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), + 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), + 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), + 45579: ("YawAngle", SIGNED_RATIONAL, 1), + 45580: ("PitchAngle", SIGNED_RATIONAL, 1), + 45581: ("RollAngle", SIGNED_RATIONAL, 1), + + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 1), + 50839: ("ImageJMetaData", UNDEFINED, 1) +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = {347: 'JPEGTables', + 700: 'XMP', + + # Additional Exif Info + 32932: 'Wang Annotation', + 33434: 'ExposureTime', + 33437: 'FNumber', + 33445: 'MD FileTag', + 33446: 'MD ScalePixel', + 33447: 'MD ColorTable', + 33448: 'MD LabName', + 33449: 'MD SampleInfo', + 33450: 'MD PrepDate', + 33451: 'MD PrepTime', + 33452: 'MD FileUnits', + 33550: 'ModelPixelScaleTag', + 33723: 'IptcNaaInfo', + 33918: 'INGR Packet Data Tag', + 33919: 'INGR Flag Registers', + 33920: 'IrasB Transformation Matrix', + 33922: 'ModelTiepointTag', + 34264: 'ModelTransformationTag', + 34377: 'PhotoshopInfo', + 34735: 'GeoKeyDirectoryTag', + 34736: 'GeoDoubleParamsTag', + 34737: 'GeoAsciiParamsTag', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 34908: 'HylaFAX FaxRecvParams', + 34909: 'HylaFAX FaxSubAddress', + 34910: 'HylaFAX FaxRecvTime', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTImeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37724: 'ImageSourceData', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubSec', + 37521: 'SubSecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 40960: 'FlashPixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 40965: 'InteroperabilityIFD', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42112: 'GDAL_METADATA', + 42113: 'GDAL_NODATA', + 42240: 'Gamma', + 50215: 'Oce Scanjob Description', + 50216: 'Oce Application Selector', + 50217: 'Oce Identification Number', + 50218: 'Oce ImageLogic Characteristics', + + # Adobe DNG + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50740: 'DNGPrivateData', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + 50784: 'Alias Layer Metadata' + } + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +LIBTIFF_CORE = set([255, 256, 257, 258, 259, 262, 263, 266, 274, 277, + 278, 280, 281, 340, 341, 282, 283, 284, 286, 287, + 296, 297, 321, 320, 338, 32995, 322, 323, 32998, + 32996, 339, 32997, 330, 531, 530, 301, 532, 333, + # as above + 269 # this has been in our tests forever, and works + ]) + +LIBTIFF_CORE.remove(320) # Array of short, crashes +LIBTIFF_CORE.remove(301) # Array of short, crashes +LIBTIFF_CORE.remove(532) # Array of long, crashes + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for tiled images in libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/server/www/packages/packages-linux/x64/PIL/WalImageFile.py b/server/www/packages/packages-linux/x64/PIL/WalImageFile.py new file mode 100644 index 0000000..0cbd1ca --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/WalImageFile.py @@ -0,0 +1,128 @@ +# encoding: utf-8 +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# NOTE: This format cannot be automatically recognized, so the reader +# is not registered for use with Image.open(). To open a WAL file, use +# the WalImageFile.open() function instead. + +# This reader is based on the specification available from: +# http://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +# and has been tested with a few sample files found using google. + +from __future__ import print_function + +from PIL import Image, _binary + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i32 = _binary.i32le + + +## +# Load texture from a Quake2 WAL texture file. +#

    +# By default, a Quake2 standard palette is attached to the texture. +# To override the palette, use the putpalette method. +# +# @param filename WAL file name, or an opened file handle. +# @return An image instance. + +def open(filename): + # FIXME: modify to return a WalImageFile instance instead of + # plain Image object ? + + if hasattr(filename, "read"): + fp = filename + else: + fp = builtins.open(filename, "rb") + + # read header fields + header = fp.read(32+24+32+12) + size = i32(header, 32), i32(header, 36) + offset = i32(header, 40) + + # load pixel data + fp.seek(offset) + + im = Image.frombytes("P", size, fp.read(size[0] * size[1])) + im.putpalette(quake2palette) + + im.format = "WAL" + im.format_description = "Quake2 Texture" + + # strings are null-terminated + im.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56:56+32].split(b"\0", 1)[0] + if next_name: + im.info["next_name"] = next_name + + return im + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans H盲ggstr枚m + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..6837b53 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/WebPImagePlugin.py @@ -0,0 +1,80 @@ +from PIL import Image +from PIL import ImageFile +from io import BytesIO +from PIL import _webp + + +_VALID_WEBP_MODES = { + "RGB": True, + "RGBA": True, + } + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless + } + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + return is_riff_file_format and is_webp_file and is_valid_vp8_mode + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + + def _open(self): + data, width, height, self.mode, icc_profile, exif = \ + _webp.WebPDecode(self.fp.read()) + + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + + self.size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + + def _getexif(self): + from PIL.JpegImagePlugin import _getexif + return _getexif(self) + + +def _save(im, fp, filename): + image_mode = im.mode + if im.mode not in _VALID_WEBP_MODES: + raise IOError("cannot write mode %s as WEBP" % image_mode) + + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + exif + ) + if data is None: + raise IOError("cannot write file as WEBP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +Image.register_save(WebPImageFile.format, _save) + +Image.register_extension(WebPImageFile.format, ".webp") +Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..3163210 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/WmfImagePlugin.py @@ -0,0 +1,173 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +_handler = None + +if str != bytes: + long = int + + +## +# Install application-specific WMF image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler(object): + + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", "BGR", (im.size[0]*3 + 3) & -4, -1 + ) + + register_handler(WmfHandler()) + +# -------------------------------------------------------------------- + +word = _binary.i16le + + +def short(c, o=0): + v = word(c, o) + if v >= 32768: + v -= 65536 + return v + +dword = _binary.i32le + + +# +# -------------------------------------------------------------------- +# Read WMF file + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or + prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + self.info["dpi"] = 72 + + # print self.mode, self.size, self.info + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif dword(s) == 1 and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = dword(s, 8) + y0 = dword(s, 12) + x1 = dword(s, 16) + y1 = dword(s, 20) + + # get frame (in 0.01 millimeter units) + frame = dword(s, 24), dword(s, 28), dword(s, 32), dword(s, 36) + + # normalize size to 72 dots per inch + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0]) + ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self.size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("WMF save handler not installed") + _handler.save(im, fp, filename) + +# +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extension(WmfStubImageFile.format, ".wmf") +Image.register_extension(WmfStubImageFile.format, ".emf") diff --git a/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..9fe9ca1 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/XVThumbImagePlugin.py @@ -0,0 +1,79 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.1" + +o8 = _binary.o8 + +_MAGIC = b"P7 332" + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + +def _accept(prefix): + return prefix[:6] == _MAGIC + + +## +# Image plugin for XV thumbnail images. + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + if self.fp.read(6) != _MAGIC: + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if s[0] != b'#': + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self.size = int(s[0:1]), int(s[1:2]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [ + ("raw", (0, 0)+self.size, + self.fp.tell(), (self.mode, 0, 1) + )] + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..bca8828 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/XbmImagePlugin.py @@ -0,0 +1,96 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL import Image, ImageFile + +__version__ = "0.6" + +# XBM header +xbm_head = re.compile( + b"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + b"[\\000-\\377]*_bits\\[\\]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if m: + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = ( + int(m.group("xhot")), int(m.group("yhot")) + ) + + self.mode = "1" + self.size = xsize, ysize + + self.tile = [("xbm", (0, 0)+self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as XBM" % im.mode) + + fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii')) + fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii')) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii')) + fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii')) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py b/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..556adb8 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/XpmImagePlugin.py @@ -0,0 +1,130 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8, o8 + +__version__ = "0.2" + +# XPM header +xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self.size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for i in range(pal): + + s = self.fp.readline() + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] in b'\r\n': + s = s[:-1] + + c = i8(s[1]) + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i+1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[0:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = (o8((rgb >> 16) & 255) + + o8((rgb >> 8) & 255) + + o8(rgb & 255)) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1:xsize+1].ljust(xsize) + + self.fp = None + + return b"".join(s) + +# +# Registry + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/server/www/packages/packages-linux/x64/PIL/__init__.py b/server/www/packages/packages-linux/x64/PIL/__init__.py new file mode 100644 index 0000000..e5dcf43 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/__init__.py @@ -0,0 +1,60 @@ +# +# The Python Imaging Library. +# $Id$ +# +# package placeholder +# +# Copyright (c) 1999 by Secret Labs AB. +# +# See the README file for information on usage and redistribution. +# + +# ;-) + +VERSION = '1.1.7' # PIL version +PILLOW_VERSION = '3.3.0' # Pillow + +_plugins = ['BmpImagePlugin', + 'BufrStubImagePlugin', + 'CurImagePlugin', + 'DcxImagePlugin', + 'DdsImagePlugin', + 'EpsImagePlugin', + 'FitsStubImagePlugin', + 'FliImagePlugin', + 'FpxImagePlugin', + 'FtexImagePlugin', + 'GbrImagePlugin', + 'GifImagePlugin', + 'GribStubImagePlugin', + 'Hdf5StubImagePlugin', + 'IcnsImagePlugin', + 'IcoImagePlugin', + 'ImImagePlugin', + 'ImtImagePlugin', + 'IptcImagePlugin', + 'JpegImagePlugin', + 'Jpeg2KImagePlugin', + 'McIdasImagePlugin', + 'MicImagePlugin', + 'MpegImagePlugin', + 'MpoImagePlugin', + 'MspImagePlugin', + 'PalmImagePlugin', + 'PcdImagePlugin', + 'PcxImagePlugin', + 'PdfImagePlugin', + 'PixarImagePlugin', + 'PngImagePlugin', + 'PpmImagePlugin', + 'PsdImagePlugin', + 'SgiImagePlugin', + 'SpiderImagePlugin', + 'SunImagePlugin', + 'TgaImagePlugin', + 'TiffImagePlugin', + 'WebPImagePlugin', + 'WmfImagePlugin', + 'XbmImagePlugin', + 'XpmImagePlugin', + 'XVThumbImagePlugin'] diff --git a/server/www/packages/packages-linux/x64/PIL/_binary.py b/server/www/packages/packages-linux/x64/PIL/_binary.py new file mode 100644 index 0000000..2f5e8ff --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/_binary.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + +from struct import unpack, pack + +if bytes is str: + def i8(c): + return ord(c) + + def o8(i): + return chr(i & 255) +else: + def i8(c): + return c if c.__class__ is int else c[0] + + def o8(i): + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +# TODO: replace with more readable struct.unpack equivalent +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return unpack("H", c[o:o+2])[0] + + +def i32be(c, o=0): + return unpack(">I", c[o:o+4])[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) + +# End of file diff --git a/server/www/packages/packages-linux/x64/PIL/_imaging.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_imaging.cpython-34m.so new file mode 100644 index 0000000..cc97f66 Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_imaging.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/_imagingcms.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_imagingcms.cpython-34m.so new file mode 100644 index 0000000..d8a09b5 Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_imagingcms.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/_imagingft.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_imagingft.cpython-34m.so new file mode 100644 index 0000000..9d2012d Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_imagingft.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/_imagingmath.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_imagingmath.cpython-34m.so new file mode 100644 index 0000000..fdc1118 Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_imagingmath.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/_imagingmorph.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_imagingmorph.cpython-34m.so new file mode 100644 index 0000000..7fc8bd6 Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_imagingmorph.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/_imagingtk.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_imagingtk.cpython-34m.so new file mode 100644 index 0000000..c1f7e31 Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_imagingtk.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/_tkinter_finder.py b/server/www/packages/packages-linux/x64/PIL/_tkinter_finder.py new file mode 100644 index 0000000..df41591 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/_tkinter_finder.py @@ -0,0 +1,20 @@ +""" Find compiled module linking to Tcl / Tk libraries +""" +import sys + +if sys.version_info[0] > 2: + from tkinter import _tkinter as tk +else: + from Tkinter import tkinter as tk + +if hasattr(sys, 'pypy_find_executable'): + # Tested with packages at https://bitbucket.org/pypy/pypy/downloads. + # PyPies 1.6, 2.0 do not have tkinter built in. PyPy3-2.3.1 gives an + # OSError trying to import tkinter. Otherwise: + try: # PyPy 5.1, 4.0.0, 2.6.1, 2.6.0 + TKINTER_LIB = tk.tklib_cffi.__file__ + except AttributeError: + # PyPy3 2.4, 2.1-beta1; PyPy 2.5.1, 2.5.0, 2.4.0, 2.3, 2.2, 2.1 + TKINTER_LIB = tk.tkffi.verifier.modulefilename +else: + TKINTER_LIB = tk.__file__ diff --git a/server/www/packages/packages-linux/x64/PIL/_util.py b/server/www/packages/packages-linux/x64/PIL/_util.py new file mode 100644 index 0000000..51c6f68 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/_util.py @@ -0,0 +1,27 @@ +import os + +if bytes is str: + def isStringType(t): + return isinstance(t, basestring) + + def isPath(f): + return isinstance(f, basestring) +else: + def isStringType(t): + return isinstance(t, str) + + def isPath(f): + return isinstance(f, (bytes, str)) + + +# Checks if an object is a string, and that it points to a directory. +def isDirectory(f): + return isPath(f) and os.path.isdir(f) + + +class deferred_error(object): + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/server/www/packages/packages-linux/x64/PIL/_webp.cpython-34m.so b/server/www/packages/packages-linux/x64/PIL/_webp.cpython-34m.so new file mode 100644 index 0000000..d269438 Binary files /dev/null and b/server/www/packages/packages-linux/x64/PIL/_webp.cpython-34m.so differ diff --git a/server/www/packages/packages-linux/x64/PIL/features.py b/server/www/packages/packages-linux/x64/PIL/features.py new file mode 100644 index 0000000..fd87f09 --- /dev/null +++ b/server/www/packages/packages-linux/x64/PIL/features.py @@ -0,0 +1,67 @@ +from PIL import Image + +modules = { + "pil": "PIL._imaging", + "tkinter": "PIL._imagingtk", + "freetype2": "PIL._imagingft", + "littlecms2": "PIL._imagingcms", + "webp": "PIL._webp", + "transp_webp": ("WEBP", "WebPDecoderBuggyAlpha") +} + + +def check_module(feature): + if feature not in modules: + raise ValueError("Unknown module %s" % feature) + + module = modules[feature] + + method_to_call = None + if type(module) is tuple: + module, method_to_call = module + + try: + imported_module = __import__(module) + except ImportError: + # If a method is being checked, None means that + # rather than the method failing, the module required for the method + # failed to be imported first + return None if method_to_call else False + + if method_to_call: + method = getattr(imported_module, method_to_call) + return method() is True + else: + return True + + +def get_supported_modules(): + supported_modules = [] + for feature in modules: + if check_module(feature): + supported_modules.append(feature) + return supported_modules + +codecs = { + "jpg": "jpeg", + "jpg_2000": "jpeg2k", + "zlib": "zip", + "libtiff": "libtiff" +} + + +def check_codec(feature): + if feature not in codecs: + raise ValueError("Unknown codec %s" % feature) + + codec = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def get_supported_codecs(): + supported_codecs = [] + for feature in codecs: + if check_codec(feature): + supported_codecs.append(feature) + return supported_codecs diff --git a/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py b/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py new file mode 100644 index 0000000..e6cc22f --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/BdfFontFile.py @@ -0,0 +1,132 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile + + +# -------------------------------------------------------------------- +# parse X Bitmap Distribution Format (BDF) +# -------------------------------------------------------------------- + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other" +} + +bdf_spacing = { + "P": "Proportional", + "M": "Monospaced", + "C": "Cell" +} + + +def bdf_char(f): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode('ascii') + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + + # load bitmap + bitmap = [] + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap.append(s[:-1]) + bitmap = b"".join(bitmap) + + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] + + bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y) + + try: + im = Image.frombytes("1", (x, y), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (x, y)) + + return id, int(props["ENCODING"]), bbox, im + + +## +# Font file plugin for the X11 BDF format. + +class BdfFontFile(FontFile.FontFile): + + def __init__(self, fp): + + FontFile.FontFile.__init__(self) + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + raise SyntaxError("not a valid BDF file") + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i+1:-1].decode('ascii')) + + # font = props["FONT"].split("-") + + # font[4] = bdf_slant[font[4].upper()] + # font[11] = bdf_spacing[font[11].upper()] + + # ascent = int(props["FONT_ASCENT"]) + # descent = int(props["FONT_DESCENT"]) + + # fontname = ";".join(font[1:]) + + # print "#", fontname + # for i in comments: + # print "#", i + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/server/www/packages/packages-windows/x86/PIL/BmpImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/BmpImagePlugin.py new file mode 100644 index 0000000..eccd299 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/BmpImagePlugin.py @@ -0,0 +1,294 @@ +# +# The Python Imaging Library. +# $Id$ +# +# BMP file handler +# +# Windows (and OS/2) native bitmap storage format. +# +# history: +# 1995-09-01 fl Created +# 1996-04-30 fl Added save +# 1997-08-27 fl Fixed save of 1-bit images +# 1998-03-06 fl Load P images as L where possible +# 1998-07-03 fl Load P images as 1 where possible +# 1998-12-29 fl Handle small palettes +# 2002-12-30 fl Fixed load of 1-bit palette images +# 2003-04-21 fl Fixed load of 1-bit monochrome images +# 2003-04-23 fl Added limited support for BI_BITFIELDS compression +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary +import math + +__version__ = "0.7" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +# +# -------------------------------------------------------------------- +# Read BMP file + +BIT2MODE = { + # bits => mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +# ============================================================================== +# Image plugin for the Windows BMP format. +# ============================================================================== +class BmpImageFile(ImageFile.ImageFile): + """ Image plugin for the Windows Bitmap format (BMP) """ + + # -------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + # --------------------------------------------------- BMP Compression values + COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5} + RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5 + + def _bitmap(self, header=0, offset=0): + """ Read relevant info about the BMP """ + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + file_info = dict() + file_info['header_size'] = i32(read(4)) # read bmp header size @offset 14 (this is part of the header size) + file_info['direction'] = -1 + # --------------------- If requested, read header at a specific position + header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4) # read the rest of the bmp header, without its size + # --------------------------------------------------- IBM OS/2 Bitmap v1 + # ------ This format has different offsets because of width/height types + if file_info['header_size'] == 12: + file_info['width'] = i16(header_data[0:2]) + file_info['height'] = i16(header_data[2:4]) + file_info['planes'] = i16(header_data[4:6]) + file_info['bits'] = i16(header_data[6:8]) + file_info['compression'] = self.RAW + file_info['palette_padding'] = 3 + # ---------------------------------------------- Windows Bitmap v2 to v5 + elif file_info['header_size'] in (40, 64, 108, 124): # v3, OS/2 v2, v4, v5 + if file_info['header_size'] >= 40: # v3 and OS/2 + file_info['y_flip'] = i8(header_data[7]) == 0xff + file_info['direction'] = 1 if file_info['y_flip'] else -1 + file_info['width'] = i32(header_data[0:4]) + file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8]) + file_info['planes'] = i16(header_data[8:10]) + file_info['bits'] = i16(header_data[10:12]) + file_info['compression'] = i32(header_data[12:16]) + file_info['data_size'] = i32(header_data[16:20]) # byte size of pixel data + file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28])) + file_info['colors'] = i32(header_data[28:32]) + file_info['palette_padding'] = 4 + self.info["dpi"] = tuple( + map(lambda x: int(math.ceil(x / 39.3701)), + file_info['pixels_per_meter'])) + if file_info['compression'] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']): + file_info[mask] = i32(header_data[36+idx*4:40+idx*4]) + else: + # 40 byte headers only have the three components in the bitfields masks, + # ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx + # See also https://github.com/python-pillow/Pillow/issues/1293 + # There is a 4th component in the RGBQuad, in the alpha location, but it + # is listed as a reserved component, and it is not generally an alpha channel + file_info['a_mask'] = 0x0 + for mask in ['r_mask', 'g_mask', 'b_mask']: + file_info[mask] = i32(read(4)) + file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask']) + file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask']) + else: + raise IOError("Unsupported BMP header type (%d)" % file_info['header_size']) + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self.size = file_info['width'], file_info['height'] + # -------- If color count was not found in the header, compute from bits + file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits']) + # -------------------------------- Check abnormal values for DOS attacks + if file_info['width'] * file_info['height'] > 2**31: + raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) + # ----------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None)) + if self.mode is None: + raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits']) + # ----------------- Process BMP with Bitfields compression (not palette) + if file_info['compression'] == self.BITFIELDS: + SUPPORTED = { + 32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0)], + 24: [(0xff0000, 0xff00, 0xff)], + 16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)] + } + MASK_MODES = { + (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX", + (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xff0000, 0xff00, 0xff)): "BGR", + (16, (0xf800, 0x7e0, 0x1f)): "BGR;16", + (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15" + } + if file_info['bits'] in SUPPORTED: + if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])] + self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode + elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])] + else: + raise IOError("Unsupported BMP bitfields layout") + else: + raise IOError("Unsupported BMP bitfields layout") + elif file_info['compression'] == self.RAW: + if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + else: + raise IOError("Unsupported BMP compression (%d)" % file_info['compression']) + # ---------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + # ----------------------------------------------------- 1-bit images + if not (0 < file_info['colors'] <= 65536): + raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors']) + else: + padding = file_info['palette_padding'] + palette = read(padding * file_info['colors']) + greyscale = True + indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors'])) + # ------------------ Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind*padding:ind*padding + 3] + if rgb != o8(val) * 3: + greyscale = False + # -------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info['colors'] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette) + + # ----------------------------- Finally set the tile data for the plugin + self.info['compression'] = file_info['compression'] + self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(), + (raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction']) + )] + + def _open(self): + """ Open file, check magic number and read header """ + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if head_data[0:2] != b"BM": + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data[10:14]) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +# ============================================================================== +# Image plugin for the DIB format (BMP alias) +# ============================================================================== +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + +# +# -------------------------------------------------------------------- +# Write BMP file + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _save(im, fp, filename, check=0): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as BMP" % im.mode) + + if check: + return check + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) + + stride = ((im.size[0]*bits+7)//8+3) & (~3) + header = 40 # or 64 for OS/2 version 2 + offset = 14 + header + colors * 4 + image = stride * im.size[1] + + # bitmap header + fp.write(b"BM" + # file type (magic) + o32(offset+image) + # file size + o32(0) + # reserved + o32(offset)) # image data offset + + # bitmap info header + fp.write(o32(header) + # info header size + o32(im.size[0]) + # width + o32(im.size[1]) + # height + o16(1) + # planes + o16(bits) + # depth + o32(0) + # compression (0=uncompressed) + o32(image) + # size of bitmap + o32(ppm[0]) + o32(ppm[1]) + # resolution + o32(colors) + # colors used + o32(colors)) # colors important + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if im.mode == "1": + for i in (0, 255): + fp.write(o8(i) * 4) + elif im.mode == "L": + for i in range(256): + fp.write(o8(i) * 4) + elif im.mode == "P": + fp.write(im.im.getpalette("RGB", "BGRX")) + + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, + (rawmode, stride, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") diff --git a/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000..45ee547 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/BufrStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific BUFR image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + + format = "BUFR" + format_description = "BUFR" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a BUFR file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("BUFR save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/server/www/packages/packages-windows/x86/PIL/ContainerIO.py b/server/www/packages/packages-windows/x86/PIL/ContainerIO.py new file mode 100644 index 0000000..262f2af --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ContainerIO.py @@ -0,0 +1,117 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +## +# A file object that provides read access to a part of an existing +# file (for example a TAR file). + + +class ContainerIO(object): + + ## + # Create file object. + # + # @param file Existing file. + # @param offset Start of region, in bytes. + # @param length Size of region, in bytes. + + def __init__(self, file, offset, length): + self.fh = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self): + return 0 + + ## + # Move file pointer. + # + # @param offset Offset in bytes. + # @param mode Starting position. Use 0 for beginning of region, 1 + # for current offset, and 2 for end of region. You cannot move + # the pointer outside the defined region. + + def seek(self, offset, mode=0): + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + ## + # Get current file pointer. + # + # @return Offset from start of region, in bytes. + + def tell(self): + return self.pos + + ## + # Read data. + # + # @def read(bytes=0) + # @param bytes Number of bytes to read. If omitted or zero, + # read until end of region. + # @return An 8-bit string. + + def read(self, n=0): + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return "" + self.pos = self.pos + n + return self.fh.read(n) + + ## + # Read a line of text. + # + # @return An 8-bit string. + + def readline(self): + s = "" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == "\n": + break + return s + + ## + # Read multiple lines of text. + # + # @return A list of 8-bit strings. + + def readlines(self): + l = [] + while True: + s = self.readline() + if not s: + break + l.append(s) + return l diff --git a/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py new file mode 100644 index 0000000..4db4c40 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/CurImagePlugin.py @@ -0,0 +1,88 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, BmpImagePlugin, _binary + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le + + +def _accept(prefix): + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + +class CurImageFile(BmpImagePlugin.BmpImageFile): + + format = "CUR" + format_description = "Windows Cursor" + + def _open(self): + + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + raise SyntaxError("not a CUR file") + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s[4:])): + s = self.fp.read(16) + if not m: + m = s + elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]): + m = s + # print "width", i8(s[0]) + # print "height", i8(s[1]) + # print "colors", i8(s[2]) + # print "reserved", i8(s[3]) + # print "hotspot x", i16(s[4:]) + # print "hotspot y", i16(s[6:]) + # print "bytes", i32(s[8:]) + # print "offset", i32(s[12:]) + if not m: + raise TypeError("No cursors were found") + + # load as bitmap + self._bitmap(i32(m[12:]) + offset) + + # patch up the bitmap height + self.size = self.size[0], self.size[1]//2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0)+self.size, o, a + + return + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py new file mode 100644 index 0000000..f9034d1 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/DcxImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, _binary +from PIL.PcxImagePlugin import PcxImageFile + +__version__ = "0.2" + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + +i32 = _binary.i32le + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + +class DcxImageFile(PcxImageFile): + + format = "DCX" + format_description = "Intel DCX" + + def _open(self): + + # Header + s = self.fp.read(4) + if i32(s) != MAGIC: + raise SyntaxError("not a DCX file") + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self.__fp = self.fp + self.seek(0) + + @property + def n_frames(self): + return len(self._offset) + + @property + def is_animated(self): + return len(self._offset) > 1 + + def seek(self, frame): + if frame >= len(self._offset): + raise EOFError("attempt to seek outside DCX directory") + self.frame = frame + self.fp = self.__fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self): + return self.frame + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py new file mode 100644 index 0000000..2ebfdf0 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/DdsImagePlugin.py @@ -0,0 +1,268 @@ +""" +A Pillow loader for .dds files (S3TC-compressed aka DXTC) +Jerome Leclanche + +Documentation: + http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ +""" + +import struct +from io import BytesIO +from PIL import Image, ImageFile + + +# Magic ("DDS ") +DDS_MAGIC = 0x20534444 + +# DDS flags +DDSD_CAPS = 0x1 +DDSD_HEIGHT = 0x2 +DDSD_WIDTH = 0x4 +DDSD_PITCH = 0x8 +DDSD_PIXELFORMAT = 0x1000 +DDSD_MIPMAPCOUNT = 0x20000 +DDSD_LINEARSIZE = 0x80000 +DDSD_DEPTH = 0x800000 + +# DDS caps +DDSCAPS_COMPLEX = 0x8 +DDSCAPS_TEXTURE = 0x1000 +DDSCAPS_MIPMAP = 0x400000 + +DDSCAPS2_CUBEMAP = 0x200 +DDSCAPS2_CUBEMAP_POSITIVEX = 0x400 +DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800 +DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000 +DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000 +DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000 +DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000 +DDSCAPS2_VOLUME = 0x200000 + +# Pixel Format +DDPF_ALPHAPIXELS = 0x1 +DDPF_ALPHA = 0x2 +DDPF_FOURCC = 0x4 +DDPF_PALETTEINDEXED8 = 0x20 +DDPF_RGB = 0x40 +DDPF_LUMINANCE = 0x20000 + + +# dds.h + +DDS_FOURCC = DDPF_FOURCC +DDS_RGB = DDPF_RGB +DDS_RGBA = DDPF_RGB | DDPF_ALPHAPIXELS +DDS_LUMINANCE = DDPF_LUMINANCE +DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS +DDS_ALPHA = DDPF_ALPHA +DDS_PAL8 = DDPF_PALETTEINDEXED8 + +DDS_HEADER_FLAGS_TEXTURE = (DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | + DDSD_PIXELFORMAT) +DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT +DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH +DDS_HEADER_FLAGS_PITCH = DDSD_PITCH +DDS_HEADER_FLAGS_LINEARSIZE = DDSD_LINEARSIZE + +DDS_HEIGHT = DDSD_HEIGHT +DDS_WIDTH = DDSD_WIDTH + +DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS_TEXTURE +DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS_COMPLEX | DDSCAPS_MIPMAP +DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS_COMPLEX + +DDS_CUBEMAP_POSITIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX +DDS_CUBEMAP_NEGATIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX +DDS_CUBEMAP_POSITIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY +DDS_CUBEMAP_NEGATIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY +DDS_CUBEMAP_POSITIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ +DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ + + +# DXT1 +DXT1_FOURCC = 0x31545844 + +# DXT3 +DXT3_FOURCC = 0x33545844 + +# DXT5 +DXT5_FOURCC = 0x35545844 + + +def _decode565(bits): + a = ((bits >> 11) & 0x1f) << 3 + b = ((bits >> 5) & 0x3f) << 2 + c = (bits & 0x1f) << 3 + return a, b, c + + +def _c2a(a, b): + return (2 * a + b) // 3 + + +def _c2b(a, b): + return (a + b) // 2 + + +def _c3(a, b): + return (2 * b + a) // 3 + + +def _dxt1(data, width, height): + # TODO implement this function as pixel format in decode.c + ret = bytearray(4 * width * height) + + for y in range(0, height, 4): + for x in range(0, width, 4): + color0, color1, bits = struct.unpack("> 2 + if control == 0: + r, g, b = r0, g0, b0 + elif control == 1: + r, g, b = r1, g1, b1 + elif control == 2: + if color0 > color1: + r, g, b = _c2a(r0, r1), _c2a(g0, g1), _c2a(b0, b1) + else: + r, g, b = _c2b(r0, r1), _c2b(g0, g1), _c2b(b0, b1) + elif control == 3: + if color0 > color1: + r, g, b = _c3(r0, r1), _c3(g0, g1), _c3(b0, b1) + else: + r, g, b = 0, 0, 0 + + idx = 4 * ((y + j) * width + (x + i)) + ret[idx:idx+4] = struct.pack('4B', r, g, b, 255) + + return bytes(ret) + + +def _dxtc_alpha(a0, a1, ac0, ac1, ai): + if ai <= 12: + ac = (ac0 >> ai) & 7 + elif ai == 15: + ac = (ac0 >> 15) | ((ac1 << 1) & 6) + else: + ac = (ac1 >> (ai - 16)) & 7 + + if ac == 0: + alpha = a0 + elif ac == 1: + alpha = a1 + elif a0 > a1: + alpha = ((8 - ac) * a0 + (ac - 1) * a1) // 7 + elif ac == 6: + alpha = 0 + elif ac == 7: + alpha = 0xff + else: + alpha = ((6 - ac) * a0 + (ac - 1) * a1) // 5 + + return alpha + + +def _dxt5(data, width, height): + # TODO implement this function as pixel format in decode.c + ret = bytearray(4 * width * height) + + for y in range(0, height, 4): + for x in range(0, width, 4): + a0, a1, ac0, ac1, c0, c1, code = struct.unpack("<2BHI2HI", + data.read(16)) + + r0, g0, b0 = _decode565(c0) + r1, g1, b1 = _decode565(c1) + + for j in range(4): + for i in range(4): + ai = 3 * (4 * j + i) + alpha = _dxtc_alpha(a0, a1, ac0, ac1, ai) + + cc = (code >> 2 * (4 * j + i)) & 3 + if cc == 0: + r, g, b = r0, g0, b0 + elif cc == 1: + r, g, b = r1, g1, b1 + elif cc == 2: + r, g, b = _c2a(r0, r1), _c2a(g0, g1), _c2a(b0, b1) + elif cc == 3: + r, g, b = _c3(r0, r1), _c3(g0, g1), _c3(b0, b1) + + idx = 4 * ((y + j) * width + (x + i)) + ret[idx:idx+4] = struct.pack('4B', r, g, b, alpha) + + return bytes(ret) + + +class DdsImageFile(ImageFile.ImageFile): + format = "DDS" + format_description = "DirectDraw Surface" + + def _open(self): + magic, header_size = struct.unpack(" 0: + s = fp.read(min(lengthfile, 100*1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + # Build ghostscript command + command = ["gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dNOPAUSE", # don't pause between pages, + "-dSAFER", # safe mode + "-sDEVICE=ppmraw", # ppm driver + "-sOutputFile=%s" % outfile, # output file + "-c", "%d %d translate" % (-bbox[0], -bbox[1]), + # adjust for image origin + "-f", infile, # input file + ] + + if gs_windows_binary is not None: + if not gs_windows_binary: + raise WindowsError('Unable to locate Ghostscript on paths') + command[0] = gs_windows_binary + + # push data through ghostscript + try: + gs = subprocess.Popen(command, stdin=subprocess.PIPE, + stdout=subprocess.PIPE) + gs.stdin.close() + status = gs.wait() + if status: + raise IOError("gs failed (status %d)" % status) + im = Image.core.open_ppm(outfile) + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + return im + + +class PSFile(object): + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ + def __init__(self, fp): + self.fp = fp + self.char = None + + def seek(self, offset, whence=0): + self.char = None + self.fp.seek(offset, whence) + + def readline(self): + s = self.char or b"" + self.char = None + + c = self.fp.read(1) + while c not in b"\r\n": + s = s + c + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return s.decode('latin-1') + + +def _accept(prefix): + return prefix[:4] == b"%!PS" or \ + (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + +## +# Image plugin for Encapsulated Postscript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"} + + def _open(self): + (length, offset) = self._find_offset(self.fp) + + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + try: + if bytes is str: + # Python2, no encoding conversion necessary + fp = open(self.fp.name, "Ur") + else: + # Python3, can use bare open command. + fp = open(self.fp.name, "Ur", encoding='latin-1') + except: + # Expect this for bytesio/stringio + fp = PSFile(self.fp) + + # go to offset - start of "%!PS" + fp.seek(offset) + + box = None + + self.mode = "RGB" + self.size = 1, 1 # FIXME: huh? + + # + # Load EPS header + + s = fp.readline().strip('\r\n') + + while s: + if len(s) > 255: + raise SyntaxError("not an EPS file") + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an EPS file") + + if m: + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self.size = box[2] - box[0], box[3] - box[1] + self.tile = [("eps", (0, 0) + self.size, offset, + (length, box))] + except: + pass + + else: + m = field.match(s) + if m: + k = m.group(1) + + if k == "EndComments": + break + if k[:8] == "PS-Adobe": + self.info[k[:8]] = k[9:] + else: + self.info[k] = "" + elif s[0] == '%': + # handle non-DSC Postscript comments that some + # tools mistakenly put in the Comments section + pass + else: + raise IOError("bad EPS header") + + s = fp.readline().strip('\r\n') + + if s[:1] != "%": + break + + # + # Scan for an "ImageData" descriptor + + while s[:1] == "%": + + if len(s) > 255: + raise SyntaxError("not an EPS file") + + if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] + + if int(bi) != 8: + break + try: + self.mode = self.mode_map[int(mo)] + except ValueError: + break + + self.size = int(x), int(y) + return + + s = fp.readline().strip('\r\n') + if not s: + break + + if not box: + raise IOError("cannot determine EPS bounding box") + + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, 2) + length = fp.tell() + offset = 0 + elif i32(s[0:4]) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s[4:8]) + length = i32(s[8:12]) + else: + raise SyntaxError("not an EPS file") + + return (length, offset) + + def load(self, scale=1): + # Load EPS via Ghostscript + if not self.tile: + return + self.im = Ghostscript(self.tile, self.size, self.fp, scale) + self.mode = self.im.mode + self.size = self.im.size + self.tile = [] + + def load_seek(self, *args, **kwargs): + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename, eps=1): + """EPS Writer for the Python Imaging Library.""" + + # + # make sure image data is available + im.load() + + # + # determine postscript image mode + if im.mode == "L": + operator = (8, 1, "image") + elif im.mode == "RGB": + operator = (8, 3, "false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, "false 4 colorimage") + else: + raise ValueError("image mode is not supported") + + class NoCloseStream(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def close(self): + pass + + base_fp = fp + if fp != sys.stdout: + fp = NoCloseStream(fp) + if sys.version_info[0] > 2: + fp = io.TextIOWrapper(fp, encoding='latin-1') + + if eps: + # + # write EPS header + fp.write("%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write("%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write("%%Pages: 1\n") + fp.write("%%EndComments\n") + fp.write("%%Page: 1 1\n") + fp.write("%%ImageData: %d %d " % im.size) + fp.write("%d %d 0 1 1 \"%s\"\n" % operator) + + # + # image header + fp.write("gsave\n") + fp.write("10 dict begin\n") + fp.write("/buf %d string def\n" % (im.size[0] * operator[1])) + fp.write("%d %d scale\n" % im.size) + fp.write("%d %d 8\n" % im.size) # <= bits + fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) + fp.write("{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + "\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)]) + + fp.write("\n%%%%EndBinary\n") + fp.write("grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extension(EpsImageFile.format, ".ps") +Image.register_extension(EpsImageFile.format, ".eps") + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/server/www/packages/packages-windows/x86/PIL/ExifTags.py b/server/www/packages/packages-windows/x86/PIL/ExifTags.py new file mode 100644 index 0000000..a8ad26b --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ExifTags.py @@ -0,0 +1,315 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known EXIF tags. +## + +## +# Maps EXIF tags to tag names. + +TAGS = { + + # possibly incomplete + 0x000b: "ProcessingSoftware", + 0x00fe: "NewSubfileType", + 0x00ff: "SubfileType", + 0x0100: "ImageWidth", + 0x0101: "ImageLength", + 0x0102: "BitsPerSample", + 0x0103: "Compression", + 0x0106: "PhotometricInterpretation", + 0x0107: "Thresholding", + 0x0108: "CellWidth", + 0x0109: "CellLength", + 0x010a: "FillOrder", + 0x010d: "DocumentName", + 0x010e: "ImageDescription", + 0x010f: "Make", + 0x0110: "Model", + 0x0111: "StripOffsets", + 0x0112: "Orientation", + 0x0115: "SamplesPerPixel", + 0x0116: "RowsPerStrip", + 0x0117: "StripByteCounts", + 0x0118: "MinSampleValue", + 0x0119: "MaxSampleValue", + 0x011a: "XResolution", + 0x011b: "YResolution", + 0x011c: "PlanarConfiguration", + 0x011d: "PageName", + 0x0120: "FreeOffsets", + 0x0121: "FreeByteCounts", + 0x0122: "GrayResponseUnit", + 0x0123: "GrayResponseCurve", + 0x0124: "T4Options", + 0x0125: "T6Options", + 0x0128: "ResolutionUnit", + 0x0129: "PageNumber", + 0x012d: "TransferFunction", + 0x0131: "Software", + 0x0132: "DateTime", + 0x013b: "Artist", + 0x013c: "HostComputer", + 0x013d: "Predictor", + 0x013e: "WhitePoint", + 0x013f: "PrimaryChromaticities", + 0x0140: "ColorMap", + 0x0141: "HalftoneHints", + 0x0142: "TileWidth", + 0x0143: "TileLength", + 0x0144: "TileOffsets", + 0x0145: "TileByteCounts", + 0x014a: "SubIFDs", + 0x014c: "InkSet", + 0x014d: "InkNames", + 0x014e: "NumberOfInks", + 0x0150: "DotRange", + 0x0151: "TargetPrinter", + 0x0152: "ExtraSamples", + 0x0153: "SampleFormat", + 0x0154: "SMinSampleValue", + 0x0155: "SMaxSampleValue", + 0x0156: "TransferRange", + 0x0157: "ClipPath", + 0x0158: "XClipPathUnits", + 0x0159: "YClipPathUnits", + 0x015a: "Indexed", + 0x015b: "JPEGTables", + 0x015f: "OPIProxy", + 0x0200: "JPEGProc", + 0x0201: "JpegIFOffset", + 0x0202: "JpegIFByteCount", + 0x0203: "JpegRestartInterval", + 0x0205: "JpegLosslessPredictors", + 0x0206: "JpegPointTransforms", + 0x0207: "JpegQTables", + 0x0208: "JpegDCTables", + 0x0209: "JpegACTables", + 0x0211: "YCbCrCoefficients", + 0x0212: "YCbCrSubSampling", + 0x0213: "YCbCrPositioning", + 0x0214: "ReferenceBlackWhite", + 0x02bc: "XMLPacket", + 0x1000: "RelatedImageFileFormat", + 0x1001: "RelatedImageWidth", + 0x1002: "RelatedImageLength", + 0x4746: "Rating", + 0x4749: "RatingPercent", + 0x800d: "ImageID", + 0x828d: "CFARepeatPatternDim", + 0x828e: "CFAPattern", + 0x828f: "BatteryLevel", + 0x8298: "Copyright", + 0x829a: "ExposureTime", + 0x829d: "FNumber", + 0x83bb: "IPTCNAA", + 0x8649: "ImageResources", + 0x8769: "ExifOffset", + 0x8773: "InterColorProfile", + 0x8822: "ExposureProgram", + 0x8824: "SpectralSensitivity", + 0x8825: "GPSInfo", + 0x8827: "ISOSpeedRatings", + 0x8828: "OECF", + 0x8829: "Interlace", + 0x882a: "TimeZoneOffset", + 0x882b: "SelfTimerMode", + 0x9000: "ExifVersion", + 0x9003: "DateTimeOriginal", + 0x9004: "DateTimeDigitized", + 0x9101: "ComponentsConfiguration", + 0x9102: "CompressedBitsPerPixel", + 0x9201: "ShutterSpeedValue", + 0x9202: "ApertureValue", + 0x9203: "BrightnessValue", + 0x9204: "ExposureBiasValue", + 0x9205: "MaxApertureValue", + 0x9206: "SubjectDistance", + 0x9207: "MeteringMode", + 0x9208: "LightSource", + 0x9209: "Flash", + 0x920a: "FocalLength", + 0x920b: "FlashEnergy", + 0x920c: "SpatialFrequencyResponse", + 0x920d: "Noise", + 0x9211: "ImageNumber", + 0x9212: "SecurityClassification", + 0x9213: "ImageHistory", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x9216: "TIFF/EPStandardID", + 0x927c: "MakerNote", + 0x9286: "UserComment", + 0x9290: "SubsecTime", + 0x9291: "SubsecTimeOriginal", + 0x9292: "SubsecTimeDigitized", + 0x9c9b: "XPTitle", + 0x9c9c: "XPComment", + 0x9c9d: "XPAuthor", + 0x9c9e: "XPKeywords", + 0x9c9f: "XPSubject", + 0xa000: "FlashPixVersion", + 0xa001: "ColorSpace", + 0xa002: "ExifImageWidth", + 0xa003: "ExifImageHeight", + 0xa004: "RelatedSoundFile", + 0xa005: "ExifInteroperabilityOffset", + 0xa20b: "FlashEnergy", + 0xa20c: "SpatialFrequencyResponse", + 0xa20e: "FocalPlaneXResolution", + 0xa20f: "FocalPlaneYResolution", + 0xa210: "FocalPlaneResolutionUnit", + 0xa214: "SubjectLocation", + 0xa215: "ExposureIndex", + 0xa217: "SensingMethod", + 0xa300: "FileSource", + 0xa301: "SceneType", + 0xa302: "CFAPattern", + 0xa401: "CustomRendered", + 0xa402: "ExposureMode", + 0xa403: "WhiteBalance", + 0xa404: "DigitalZoomRatio", + 0xa405: "FocalLengthIn35mmFilm", + 0xa406: "SceneCaptureType", + 0xa407: "GainControl", + 0xa408: "Contrast", + 0xa409: "Saturation", + 0xa40a: "Sharpness", + 0xa40b: "DeviceSettingDescription", + 0xa40c: "SubjectDistanceRange", + 0xa420: "ImageUniqueID", + 0xa430: "CameraOwnerName", + 0xa431: "BodySerialNumber", + 0xa432: "LensSpecification", + 0xa433: "LensMake", + 0xa434: "LensModel", + 0xa435: "LensSerialNumber", + 0xa500: "Gamma", + 0xc4a5: "PrintImageMatching", + 0xc612: "DNGVersion", + 0xc613: "DNGBackwardVersion", + 0xc614: "UniqueCameraModel", + 0xc615: "LocalizedCameraModel", + 0xc616: "CFAPlaneColor", + 0xc617: "CFALayout", + 0xc618: "LinearizationTable", + 0xc619: "BlackLevelRepeatDim", + 0xc61a: "BlackLevel", + 0xc61b: "BlackLevelDeltaH", + 0xc61c: "BlackLevelDeltaV", + 0xc61d: "WhiteLevel", + 0xc61e: "DefaultScale", + 0xc61f: "DefaultCropOrigin", + 0xc620: "DefaultCropSize", + 0xc621: "ColorMatrix1", + 0xc622: "ColorMatrix2", + 0xc623: "CameraCalibration1", + 0xc624: "CameraCalibration2", + 0xc625: "ReductionMatrix1", + 0xc626: "ReductionMatrix2", + 0xc627: "AnalogBalance", + 0xc628: "AsShotNeutral", + 0xc629: "AsShotWhiteXY", + 0xc62a: "BaselineExposure", + 0xc62b: "BaselineNoise", + 0xc62c: "BaselineSharpness", + 0xc62d: "BayerGreenSplit", + 0xc62e: "LinearResponseLimit", + 0xc62f: "CameraSerialNumber", + 0xc630: "LensInfo", + 0xc631: "ChromaBlurRadius", + 0xc632: "AntiAliasStrength", + 0xc633: "ShadowScale", + 0xc634: "DNGPrivateData", + 0xc635: "MakerNoteSafety", + 0xc65a: "CalibrationIlluminant1", + 0xc65b: "CalibrationIlluminant2", + 0xc65c: "BestQualityScale", + 0xc65d: "RawDataUniqueID", + 0xc68b: "OriginalRawFileName", + 0xc68c: "OriginalRawFileData", + 0xc68d: "ActiveArea", + 0xc68e: "MaskedAreas", + 0xc68f: "AsShotICCProfile", + 0xc690: "AsShotPreProfileMatrix", + 0xc691: "CurrentICCProfile", + 0xc692: "CurrentPreProfileMatrix", + 0xc6bf: "ColorimetricReference", + 0xc6f3: "CameraCalibrationSignature", + 0xc6f4: "ProfileCalibrationSignature", + 0xc6f6: "AsShotProfileName", + 0xc6f7: "NoiseReductionApplied", + 0xc6f8: "ProfileName", + 0xc6f9: "ProfileHueSatMapDims", + 0xc6fa: "ProfileHueSatMapData1", + 0xc6fb: "ProfileHueSatMapData2", + 0xc6fc: "ProfileToneCurve", + 0xc6fd: "ProfileEmbedPolicy", + 0xc6fe: "ProfileCopyright", + 0xc714: "ForwardMatrix1", + 0xc715: "ForwardMatrix2", + 0xc716: "PreviewApplicationName", + 0xc717: "PreviewApplicationVersion", + 0xc718: "PreviewSettingsName", + 0xc719: "PreviewSettingsDigest", + 0xc71a: "PreviewColorSpace", + 0xc71b: "PreviewDateTime", + 0xc71c: "RawImageDigest", + 0xc71d: "OriginalRawFileDigest", + 0xc71e: "SubTileBlockSize", + 0xc71f: "RowInterleaveFactor", + 0xc725: "ProfileLookTableDims", + 0xc726: "ProfileLookTableData", + 0xc740: "OpcodeList1", + 0xc741: "OpcodeList2", + 0xc74e: "OpcodeList3", + 0xc761: "NoiseProfile" +} + +## +# Maps EXIF GPS tags to tag names. + +GPSTAGS = { + 0: "GPSVersionID", + 1: "GPSLatitudeRef", + 2: "GPSLatitude", + 3: "GPSLongitudeRef", + 4: "GPSLongitude", + 5: "GPSAltitudeRef", + 6: "GPSAltitude", + 7: "GPSTimeStamp", + 8: "GPSSatellites", + 9: "GPSStatus", + 10: "GPSMeasureMode", + 11: "GPSDOP", + 12: "GPSSpeedRef", + 13: "GPSSpeed", + 14: "GPSTrackRef", + 15: "GPSTrack", + 16: "GPSImgDirectionRef", + 17: "GPSImgDirection", + 18: "GPSMapDatum", + 19: "GPSDestLatitudeRef", + 20: "GPSDestLatitude", + 21: "GPSDestLongitudeRef", + 22: "GPSDestLongitude", + 23: "GPSDestBearingRef", + 24: "GPSDestBearing", + 25: "GPSDestDistanceRef", + 26: "GPSDestDistance", + 27: "GPSProcessingMethod", + 28: "GPSAreaInformation", + 29: "GPSDateStamp", + 30: "GPSDifferential", + 31: "GPSHPositioningError", +} diff --git a/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py new file mode 100644 index 0000000..7aefff2 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/FitsStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS stub adapter +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + +## +# Install application-specific FITS image handler. +# +# @param handler Handler object. + + +def register_handler(handler): + global _handler + _handler = handler + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:6] == b"SIMPLE" + + +class FITSStubImageFile(ImageFile.StubImageFile): + + format = "FITS" + format_description = "FITS" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(6)): + raise SyntaxError("Not a FITS file") + + # FIXME: add more sanity checks here; mandatory header items + # include SIMPLE, BITPIX, NAXIS, etc. + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("FITS save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept) +Image.register_save(FITSStubImageFile.format, _save) + +Image.register_extension(FITSStubImageFile.format, ".fit") +Image.register_extension(FITSStubImageFile.format, ".fits") diff --git a/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py new file mode 100644 index 0000000..a07dc29 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/FliImagePlugin.py @@ -0,0 +1,188 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 + + +# +# decoder + +def _accept(prefix): + return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12] + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + +class FliImageFile(ImageFile.ImageFile): + + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + + def _open(self): + + # HEAD + s = self.fp.read(128) + magic = i16(s[4:6]) + if not (magic in [0xAF11, 0xAF12] and + i16(s[14:16]) in [0, 3] and # flags + s[20:22] == b"\x00\x00"): # reserved + raise SyntaxError("not an FLI/FLC file") + + # image characteristics + self.mode = "P" + self.size = i16(s[8:10]), i16(s[10:12]) + + # animation speed + duration = i32(s[16:20]) + if magic == 0xAF11: + duration = (duration * 1000) / 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s[4:6]) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + s = self.fp.read(16) + + if i16(s[4:6]) == 0xF1FA: + # look for palette chunk + s = self.fp.read(6) + if i16(s[4:6]) == 11: + self._palette(palette, 2) + elif i16(s[4:6]) == 4: + self._palette(palette, 0) + + palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self.__fp = self.fp + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + i8(s[0]) + n = i8(s[1]) + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = i8(s[n]) << shift + g = i8(s[n+1]) << shift + b = i8(s[n+2]) << shift + palette[i] = (r, g, b) + i += 1 + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in FLI file") + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self.__fp.seek(self.__rewind) + self.__offset = 128 + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + # move to next frame + self.fp = self.__fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + raise EOFError + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0)+self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self): + return self.__frame + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extension(FliImageFile.format, ".fli") +Image.register_extension(FliImageFile.format, ".flc") diff --git a/server/www/packages/packages-windows/x86/PIL/FontFile.py b/server/www/packages/packages-windows/x86/PIL/FontFile.py new file mode 100644 index 0000000..db8e6be --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/FontFile.py @@ -0,0 +1,115 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import os +from PIL import Image, _binary + +WIDTH = 800 + + +def puti16(fp, values): + # write network order (big-endian) 16-bit sequence + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +## +# Base class for raster font file handlers. + +class FontFile(object): + + bitmap = None + + def __init__(self): + + self.info = {} + self.glyph = [None] * 256 + + def __getitem__(self, ix): + return self.glyph[ix] + + def compile(self): + "Create metrics and bitmap" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = (src[2] - src[0]) + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return "" + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + # yy = src[3] - src[1] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + # print chr(i), dst, s + self.metrics[i] = d, dst, s + + def save(self, filename): + "Save font" + + self.compile() + + # font data + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + fp = open(os.path.splitext(filename)[0] + ".pil", "wb") + fp.write(b"PILfont\n") + fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, [0] * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) + fp.close() + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py new file mode 100644 index 0000000..aefc574 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/FpxImagePlugin.py @@ -0,0 +1,226 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL.OleFileIO import i8, i32, MAGIC, OleFileIO + +__version__ = "0.1" + + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007ffe): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017ffe): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for the FlashPix images. + +class FpxImageFile(ImageFile.ImageFile): + + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an FPX file; invalid OLE file") + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + raise SyntaxError("not an FPX file; bad root CLSID") + + self._open_index(1) + + def _open_index(self, index=1): + # + # get the Image Contents Property Set + + prop = self.ole.getproperties([ + "Data Object Store %06d" % index, + "\005Image Contents" + ]) + + # size (highest resolution) + + self.size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size / 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + colors = [] + for i in range(i32(s, 4)): + # note: for now, we ignore the "uncalibrated" flag + colors.append(i32(s, 8+i*4) & 0x7fffffff) + + self.mode, self.rawmode = MODES[tuple(colors)] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + # print len(self.jpeg), "tables loaded" + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index=1, subimage=0): + # + # setup tile descriptors for a given subimage + + stream = [ + "Data Object Store %06d" % index, + "Resolution %04d" % subimage, + "Subimage 0000 Header" + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + # print size, self.mode, self.rawmode + + if size != self.size: + raise IOError("subimage mismatch") + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + + compression = i32(s, i+8) + + if compression == 0: + self.tile.append(("raw", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode))) + + elif compression == 1: + + # FIXME: the fill decoder is not implemented + self.tile.append(("fill", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode, s[12:16]))) + + elif compression == 2: + + internal_color_conversion = i8(s[14]) + jpeg_tables = i8(s[15]) + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append(("jpeg", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (rawmode, jpegmode))) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + raise IOError("unknown/invalid compression") + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self.fp = None + + def load(self): + + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + + ["Subimage 0000 Data"]) + + ImageFile.ImageFile.load(self) + +# +# -------------------------------------------------------------------- + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py new file mode 100644 index 0000000..f3a2d7f --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/FtexImagePlugin.py @@ -0,0 +1,96 @@ +""" +A Pillow loader for .ftc and .ftu files (FTEX) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001 + +The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a +packed custom format called FTEX. This file format uses file extensions FTC and FTU. +* FTC files are compressed textures (using standard texture compression). +* FTU files are not compressed. +Texture File Format +The FTC and FTU texture files both use the same format, called. This +has the following structure: +{header} +{format_directory} +{data} +Where: +{header} = { u32:magic, u32:version, u32:width, u32:height, u32:mipmap_count, u32:format_count } + +* The "magic" number is "FTEX". +* "width" and "height" are the dimensions of the texture. +* "mipmap_count" is the number of mipmaps in the texture. +* "format_count" is the number of texture formats (different versions of the same texture) in this file. + +{format_directory} = format_count * { u32:format, u32:where } + +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB uncompressed textures. +The texture data for a format starts at the position "where" in the file. + +Each set of texture data in the file has the following structure: +{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } } +* "mipmap_size" is the number of bytes in that mip level. For compressed textures this is the +size of the texture data compressed with DXT1. For 24 bit uncompressed textures, this is 3 * width * height. +Following this are the image bytes for that mipmap level. + +Note: All data is stored in little-Endian (Intel) byte order. +""" + +import struct +from io import BytesIO +from PIL import Image, ImageFile +from PIL.DdsImagePlugin import _dxt1 + + +MAGIC = b"FTEX" +FORMAT_DXT1 = 0 +FORMAT_UNCOMPRESSED = 1 + + +class FtexImageFile(ImageFile.ImageFile): + format = "FTEX" + format_description = "Texture File Format (IW2:EOC)" + + def _open(self): + magic = struct.unpack("= 8 and i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2) + + +## +# Image plugin for the GIMP brush format. + +class GbrImageFile(ImageFile.ImageFile): + + format = "GBR" + format_description = "GIMP brush file" + + def _open(self): + header_size = i32(self.fp.read(4)) + version = i32(self.fp.read(4)) + if header_size < 20: + raise SyntaxError("not a GIMP brush") + if version not in (1, 2): + raise SyntaxError("Unsupported GIMP brush version: %s" % version) + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0: + raise SyntaxError("not a GIMP brush") + if color_depth not in (1, 4): + raise SyntaxError("Unsupported GIMP brush color depth: %s" % color_depth) + + if version == 1: + comment_length = header_size-20 + else: + comment_length = header_size-28 + magic_number = self.fp.read(4) + if magic_number != b'GIMP': + raise SyntaxError("not a GIMP brush, bad magic number") + self.info['spacing'] = i32(self.fp.read(4)) + + comment = self.fp.read(comment_length)[:-1] + + if color_depth == 1: + self.mode = "L" + else: + self.mode = 'RGBA' + + self.size = width, height + + self.info["comment"] = comment + + # Image might not be small + Image._decompression_bomb_check(self.size) + + # Data is an uncompressed block of w * h * bytes/pixel + self._data_size = width * height * color_depth + + def load(self): + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self._data_size)) + +# +# registry + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/server/www/packages/packages-windows/x86/PIL/GdImageFile.py b/server/www/packages/packages-windows/x86/PIL/GdImageFile.py new file mode 100644 index 0000000..ae3500f --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/GdImageFile.py @@ -0,0 +1,92 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +# NOTE: This format cannot be automatically recognized, so the +# class is not registered for use with Image.open(). To open a +# gd file, use the GdImageFile.open() function instead. + +# THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This +# implementation is provided for convenience and demonstrational +# purposes only. + + +from PIL import ImageFile, ImagePalette, _binary +from PIL._util import isPath + +__version__ = "0.1" + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i16 = _binary.i16be + + +## +# Image plugin for the GD uncompressed format. Note that this format +# is not supported by the standard Image.open function. To use +# this plugin, you have to import the GdImageFile module and +# use the GdImageFile.open function. + +class GdImageFile(ImageFile.ImageFile): + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self): + + # Header + s = self.fp.read(775) + + self.mode = "L" # FIXME: "P" + self.size = i16(s[0:2]), i16(s[2:4]) + + # transparency index + tindex = i16(s[5:7]) + if tindex < 256: + self.info["transparent"] = tindex + + self.palette = ImagePalette.raw("RGB", s[7:]) + + self.tile = [("raw", (0, 0)+self.size, 775, ("L", 0, -1))] + + +## +# Load texture from a GD image file. +# +# @param filename GD file name, or an opened file handle. +# @param mode Optional mode. In this version, if the mode argument +# is given, it must be "r". +# @return An image instance. +# @exception IOError If the image could not be read. + +def open(fp, mode="r"): + + if mode != "r": + raise ValueError("bad mode") + + if isPath(fp): + filename = fp + fp = builtins.open(fp, "rb") + else: + filename = "" + + try: + return GdImageFile(fp, filename) + except SyntaxError: + raise IOError("cannot identify this image file") diff --git a/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py new file mode 100644 index 0000000..6bca4dd --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py @@ -0,0 +1,698 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, ImagePalette, \ + ImageChops, ImageSequence, _binary + +__version__ = "0.9" + + +# -------------------------------------------------------------------- +# Helpers + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 +o16 = _binary.o16le + + +# -------------------------------------------------------------------- +# Identify/read GIF files + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and i8(s): + return self.fp.read(i8(s)) + return None + + def _open(self): + + # Screen + s = self.fp.read(13) + if s[:6] not in [b"GIF87a", b"GIF89a"]: + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self.size = i16(s[6:]), i16(s[8:]) + self.tile = [] + flags = i8(s[10]) + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = i8(s[11]) + # check if palette contains colour indices + p = self.fp.read(3 << bits) + for i in range(0, len(p), 3): + if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + break + + self.__fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in GIF file") + + def _seek(self, frame): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1 + self.__frame = -1 + self.__fp.seek(self.__rewind) + self._prev_im = None + self.disposal_method = 0 + else: + # ensure that the previous frame was loaded + if not self.im: + self.load() + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + self.tile = [] + + self.fp = self.__fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + from copy import copy + self.palette = copy(self.global_palette) + + while True: + + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if i8(s) == 249: + # + # graphic control extension + # + flags = i8(block[0]) + if flags & 1: + self.info["transparency"] = i8(block[3]) + self.info["duration"] = i16(block[1:3]) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif i8(s) == 254: + # + # comment extension + # + self.info["comment"] = block + elif i8(s) == 255: + # + # application extension + # + self.info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and i8(block[0]) == 1: + self.info["loop"] = i16(block[1:3]) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s[0:]), i16(s[2:]) + x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) + self.dispose_extent = x0, y0, x1, y1 + flags = i8(s[8]) + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + self.palette =\ + ImagePalette.raw("RGB", self.fp.read(3 << bits)) + + # image data + bits = i8(self.fp.read(1)) + self.__offset = self.fp.tell() + self.tile = [("gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace))] + break + + else: + pass + # raise IOError, "illegal GIF tag `%x`" % i8(s) + + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + self.dispose = Image.core.fill("P", self.size, + self.info["background"]) + else: + # replace with previous contents + if self.im: + self.dispose = self.im.copy() + + # only dispose the extent in this frame + if self.dispose: + self.dispose = self.dispose.crop(self.dispose_extent) + except (AttributeError, KeyError): + pass + + if not self.tile: + # self.__fp = None + raise EOFError + + self.mode = "L" + if self.palette: + self.mode = "P" + + def tell(self): + return self.__frame + + def load_end(self): + ImageFile.ImageFile.load_end(self) + + # if the disposal method is 'do not dispose', transparent + # pixels should show the content of the previous frame + if self._prev_im and self.disposal_method == 1: + # we do this by pasting the updated area onto the previous + # frame which we then use as the current image content + updated = self.im.crop(self.dispose_extent) + self._prev_im.paste(updated, self.dispose_extent, + updated.convert('RGBA')) + self.im = self._prev_im + self._prev_im = self.im.copy() + +# -------------------------------------------------------------------- +# Write GIF files + +try: + import _imaging_gif +except ImportError: + _imaging_gif = None + +RAWMODE = { + "1": "L", + "L": "L", + "P": "P", +} + + +def _convert_mode(im, initial_call=False): + # convert on the fly (EXPERIMENTAL -- I'm not sure PIL + # should automatically convert images on save...) + if Image.getmodebase(im.mode) == "RGB": + if initial_call: + palette_size = 256 + if im.palette: + palette_size = len(im.palette.getdata()[1]) // 3 + return im.convert("P", palette=1, colors=palette_size) + else: + return im.convert("P") + return im.convert("L") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + + im.encoderinfo.update(im.info) + if _imaging_gif: + # call external driver + try: + _imaging_gif.save(im, fp, filename) + return + except IOError: + pass # write uncompressed file + + if im.mode in RAWMODE: + im_out = im.copy() + else: + im_out = _convert_mode(im, True) + + # header + try: + palette = im.encoderinfo["palette"] + except KeyError: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if save_all: + previous = None + + first_frame = None + for im_frame in ImageSequence.Iterator(im): + im_frame = _convert_mode(im_frame) + + # To specify duration, add the time in milliseconds to getdata(), + # e.g. getdata(im_frame, duration=1000) + if not previous: + # global header + first_frame = getheader(im_frame, palette, im.encoderinfo)[0] + first_frame += getdata(im_frame, (0, 0), **im.encoderinfo) + else: + if first_frame: + for s in first_frame: + fp.write(s) + first_frame = None + + # delta frame + delta = ImageChops.subtract_modulo(im_frame, previous.copy()) + bbox = delta.getbbox() + + if bbox: + # compress difference + for s in getdata(im_frame.crop(bbox), + bbox[:2], **im.encoderinfo): + fp.write(s) + else: + # FIXME: what should we do in this case? + pass + previous = im_frame + if first_frame: + save_all = False + if not save_all: + header = getheader(im_out, palette, im.encoderinfo)[0] + for s in header: + fp.write(s) + + flags = 0 + + if get_interlace(im): + flags = flags | 64 + + # local image header + _get_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0, + RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im): + try: + interlace = im.encoderinfo["interlace"] + except KeyError: + interlace = 1 + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _get_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + pass + else: + transparency = int(transparency) + # optimize the block away if transparent color is not used + transparent_color_exists = True + + if _get_optimize(im, im.encoderinfo): + used_palette_colors = _get_used_palette_colors(im) + + # adjust the transparency index after optimize + if len(used_palette_colors) < 256: + for i in range(len(used_palette_colors)): + if used_palette_colors[i] == transparency: + transparency = i + transparent_color_exists = True + break + else: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + if transparent_color_exists or duration != 0: + transparency_flag = 1 if transparent_color_exists else 0 + if not transparent_color_exists: + transparency = 0 + + fp.write(b"!" + + o8(249) + # extension intro + o8(4) + # length + o8(transparency_flag) + # transparency info present + o16(duration) + # duration + o8(transparency) + # transparency index + o8(0)) + + if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]) <= 255: + fp.write(b"!" + + o8(254) + # extension intro + o8(len(im.encoderinfo["comment"])) + + im.encoderinfo["comment"] + + o8(0)) + if "loop" in im.encoderinfo: + number_of_loops = im.encoderinfo["loop"] + fp.write(b"!" + + o8(255) + # extension intro + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) + # number of loops + o8(0)) + fp.write(b"," + + o16(offset[0]) + # offset + o16(offset[1]) + + o16(im.size[0]) + # size + o16(im.size[1]) + + o8(flags) + # flags + o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + + import os + from subprocess import Popen, check_call, PIPE, CalledProcessError + import tempfile + file = im._dump() + + if im.mode != "RGB": + with open(filename, 'wb') as f: + stderr = tempfile.TemporaryFile() + check_call(["ppmtogif", file], stdout=f, stderr=stderr) + else: + with open(filename, 'wb') as f: + + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (file, filename) + quant_cmd = ["ppmquant", "256", file] + togif_cmd = ["ppmtogif"] + stderr = tempfile.TemporaryFile() + quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr) + stderr = tempfile.TemporaryFile() + togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f, + stderr=stderr) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise CalledProcessError(retcode, togif_cmd) + + try: + os.unlink(file) + except OSError: + pass + + +# -------------------------------------------------------------------- +# GIF utilities + +def _get_optimize(im, info): + return im.mode in ("P", "L") and info and info.get("optimize", 0) + + +def _get_used_palette_colors(im): + used_palette_colors = [] + + # check which colors are used + i = 0 + for count in im.histogram(): + if count: + used_palette_colors.append(i) + i += 1 + + return used_palette_colors + + +def getheader(im, palette=None, info=None): + """Return a list of strings representing a GIF header""" + + # Header Block + # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + for extensionKey in ["transparency", "duration", "loop", "comment"]: + if info and extensionKey in info: + if ((extensionKey == "duration" and info[extensionKey] == 0) or + (extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255))): + continue + version = b"89a" + break + else: + if im.info.get("version") == "89a": + version = b"89a" + + header = [ + b"GIF"+version + # signature + version + o16(im.size[0]) + # canvas width + o16(im.size[1]) # canvas height + ] + + if im.mode == "P": + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = bytearray([i//3 for i in range(768)]) + + used_palette_colors = palette_bytes = None + + if _get_optimize(im, info): + used_palette_colors = _get_used_palette_colors(im) + + # create the new palette if not every color is used + if len(used_palette_colors) < 256: + palette_bytes = b"" + new_positions = {} + + i = 0 + # pick only the used colors from the palette + for oldPosition in used_palette_colors: + palette_bytes += source_palette[oldPosition*3:oldPosition*3+3] + new_positions[oldPosition] = i + i += 1 + + # replace the palette color id of all pixel with the new id + image_bytes = bytearray(im.tobytes()) + for i in range(len(image_bytes)): + image_bytes[i] = new_positions[image_bytes[i]] + im.frombytes(bytes(image_bytes)) + new_palette_bytes = (palette_bytes + + (768 - len(palette_bytes)) * b'\x00') + im.putpalette(new_palette_bytes) + im.palette = ImagePalette.ImagePalette("RGB", + palette=palette_bytes, + size=len(palette_bytes)) + + if not palette_bytes: + palette_bytes = source_palette + + # Logical Screen Descriptor + # calculate the palette size for the header + import math + color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1 + if color_table_size < 0: + color_table_size = 0 + # size of global color table + global color table flag + header.append(o8(color_table_size + 128)) + # background + reserved/aspect + if info and "background" in info: + background = info["background"] + elif "background" in im.info: + # This elif is redundant within GifImagePlugin + # since im.info parameters are bundled into the info dictionary + # However, external scripts may call getheader directly + # So this maintains earlier behaviour + background = im.info["background"] + else: + background = 0 + header.append(o8(background) + o8(0)) + # end of Logical Screen Descriptor + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + + # Header + Logical Screen Descriptor + Global Color Table + header.append(palette_bytes) + return header, used_palette_colors + + +def getdata(im, offset=(0, 0), **params): + """Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data.""" + + class Collector(object): + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + try: + im.encoderinfo = params + + # local image header + _get_local_header(fp, im, offset, 0) + + ImageFile._save(im, fp, [("gif", (0, 0)+im.size, 0, RAWMODE[im.mode])]) + + fp.write(b"\0") # end of image data + + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py b/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py new file mode 100644 index 0000000..45af573 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/GimpGradientFile.py @@ -0,0 +1,137 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from math import pi, log, sin, sqrt +from PIL._binary import o8 + +# -------------------------------------------------------------------- +# Stuff to translate curve segments to palette values (derived from +# the corresponding code in GIMP, written by Federico Mena Quintero. +# See the GIMP distribution for more information.) +# + +EPSILON = 1e-10 + + +def linear(middle, pos): + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle, pos): + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle, pos): + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle, pos): + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle, pos): + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] + + +class GradientFile(object): + + gradient = None + + def getpalette(self, entries=256): + + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + + x = i / float(entries-1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +## +# File handler for GIMP's gradient format. + +class GimpGradientFile(GradientFile): + + def __init__(self, fp): + + if fp.readline()[:13] != b"GIMP Gradient": + raise SyntaxError("not a GIMP gradient file") + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + gradient = [] + + for i in range(count): + + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + raise IOError("cannot handle HSV colour space") + + gradient.append((x0, x1, xm, rgb0, rgb1, segment)) + + self.gradient = gradient diff --git a/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py b/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py new file mode 100644 index 0000000..4bf3ca3 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/GimpPaletteFile.py @@ -0,0 +1,62 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL._binary import o8 + + +## +# File handler for GIMP's palette format. + +class GimpPaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [o8(i)*3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + raise SyntaxError("not a GIMP palette file") + + i = 0 + + while i <= 255: + + s = fp.readline() + + if not s: + break + # skip fields and comment lines + if re.match(b"\w+:|#", s): + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + raise ValueError("bad palette entry") + + if 0 <= i <= 255: + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + i += 1 + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000..8ffad81 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/GribStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific GRIB image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[0:4] == b"GRIB" and prefix[7] == b'\x01' + + +class GribStubImageFile(ImageFile.StubImageFile): + + format = "GRIB" + format_description = "GRIB" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a GRIB file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("GRIB save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000..f7945be --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific HDF5 image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + + format = "HDF5" + format_description = "HDF5" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not an HDF file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("HDF5 save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extension(HDF5StubImageFile.format, ".h5") +Image.register_extension(HDF5StubImageFile.format, ".hdf") diff --git a/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000..a4366e9 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/IcnsImagePlugin.py @@ -0,0 +1,366 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Mac OS X icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, PngImagePlugin, _binary +import io +import os +import shutil +import struct +import sys +import tempfile + +enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') +if enable_jpeg2k: + from PIL import Jpeg2KImagePlugin + +i8 = _binary.i8 + +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack('>4sI', fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b'\x00\x00\x00\x00': + raise SyntaxError('Unknown signature, expecting 0x00000000') + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = i8(byte) + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + raise SyntaxError( + "Error reading channel [%r left]" % bytesleft + ) + band = Image.frombuffer( + "L", pixel_size, b"".join(data), "raw", "L", 0, 1 + ) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer( + "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1 + ) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a': + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + return {"RGBA": im} + elif sig[:4] == b'\xff\x4f\xff\x51' \ + or sig[:4] == b'\x0d\x0a\x87\x0a' \ + or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + if not enable_jpeg2k: + raise ValueError('Unsupported icon subimage format (rebuild PIL ' + 'with JPEG 2000 support to fix this)') + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + if im.mode != 'RGBA': + im = im.convert('RGBA') + return {"RGBA": im} + else: + raise ValueError('Unsupported icon subimage format') + + +class IcnsFile(object): + + SIZES = { + (512, 512, 2): [ + (b'ic10', read_png_or_jpeg2000), + ], + (512, 512, 1): [ + (b'ic09', read_png_or_jpeg2000), + ], + (256, 256, 2): [ + (b'ic14', read_png_or_jpeg2000), + ], + (256, 256, 1): [ + (b'ic08', read_png_or_jpeg2000), + ], + (128, 128, 2): [ + (b'ic13', read_png_or_jpeg2000), + ], + (128, 128, 1): [ + (b'ic07', read_png_or_jpeg2000), + (b'it32', read_32t), + (b't8mk', read_mk), + ], + (64, 64, 1): [ + (b'icp6', read_png_or_jpeg2000), + ], + (32, 32, 2): [ + (b'ic12', read_png_or_jpeg2000), + ], + (48, 48, 1): [ + (b'ih32', read_32), + (b'h8mk', read_mk), + ], + (32, 32, 1): [ + (b'icp5', read_png_or_jpeg2000), + (b'il32', read_32), + (b'l8mk', read_mk), + ], + (16, 16, 2): [ + (b'ic11', read_png_or_jpeg2000), + ], + (16, 16, 1): [ + (b'icp4', read_png_or_jpeg2000), + (b'is32', read_32), + (b's8mk', read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if sig != b'icns': + raise SyntaxError('not an icns file') + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + raise SyntaxError('invalid block header') + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, 1) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for (fmt, reader) in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + raise SyntaxError("No 32bit icon resources found") + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get('RGBA', None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self): + self.icns = IcnsFile(self.fp) + self.mode = 'RGBA' + self.best_size = self.icns.bestsize() + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + self.info['sizes'] = self.icns.itersizes() + # Just use this to see if it's loaded or not yet. + self.tile = ('',) + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + + Image.Image.load(self) + if not self.tile: + return + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + im.load() + + self.im = im.im + self.mode = im.mode + self.size = im.size + self.fp = None + self.icns = None + self.tile = () + self.load_end() + + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then converted to a .icns file + using the OS X command line utility 'iconutil'. + + OS X only. + """ + if hasattr(fp, "flush"): + fp.flush() + + # create the temporary set of pngs + iconset = tempfile.mkdtemp('.iconset') + last_w = None + last_im = None + for w in [16, 32, 128, 256, 512]: + prefix = 'icon_{}x{}'.format(w, w) + + if last_w == w: + im_scaled = last_im + else: + im_scaled = im.resize((w, w), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'.png')) + + im_scaled = im.resize((w*2, w*2), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'@2x.png')) + last_im = im_scaled + + # iconutil -c icns -o {} {} + from subprocess import Popen, PIPE, CalledProcessError + + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + stderr = tempfile.TemporaryFile() + convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=stderr) + + convert_proc.stdout.close() + + retcode = convert_proc.wait() + + # remove the temporary files + shutil.rmtree(iconset) + + if retcode: + raise CalledProcessError(retcode, convert_cmd) + +Image.register_open(IcnsImageFile.format, IcnsImageFile, + lambda x: x[:4] == b'icns') +Image.register_extension(IcnsImageFile.format, '.icns') + +if sys.platform == 'darwin': + Image.register_save(IcnsImageFile.format, _save) + + Image.register_mime(IcnsImageFile.format, "image/icns") + + +if __name__ == '__main__': + imf = IcnsImageFile(open(sys.argv[1], 'rb')) + for size in imf.info['sizes']: + imf.size = size + imf.load() + im = imf.im + im.save('out-%s-%s-%s.png' % size) + im = Image.open(open(sys.argv[1], "rb")) + im.save("out.png") + if sys.platform == 'windows': + os.startfile("out.png") diff --git a/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py new file mode 100644 index 0000000..a01aed3 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/IcoImagePlugin.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * https://msdn.microsoft.com/en-us/library/ms997538.aspx + + +import struct +from io import BytesIO + +from PIL import Image, ImageFile, BmpImagePlugin, PngImagePlugin, _binary +from math import log, ceil + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le + +_MAGIC = b"\0\0\1\0" + + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + sizes = im.encoderinfo.get("sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), + (64, 64), (128, 128), (255, 255)]) + width, height = im.size + filter(lambda x: False if (x[0] > width or x[1] > height or + x[0] > 255 or x[1] > 255) else True, sizes) + fp.write(struct.pack("=8bpp) + 'reserved': i8(s[3]), + 'planes': i16(s[4:]), + 'bpp': i16(s[6:]), + 'size': i32(s[8:]), + 'offset': i32(s[12:]) + } + + # See Wikipedia + for j in ('width', 'height'): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header['color_depth'] = (icon_header['bpp'] or + (icon_header['nb_color'] != 0 and + ceil(log(icon_header['nb_color'], + 2))) or 256) + + icon_header['dim'] = (icon_header['width'], icon_header['height']) + icon_header['square'] = (icon_header['width'] * + icon_header['height']) + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x['color_depth']) + # ICO images are usually squares + # self.entry = sorted(self.entry, key=lambda x: x['width']) + self.entry = sorted(self.entry, key=lambda x: x['square']) + self.entry.reverse() + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return set((h['width'], h['height']) for h in self.entry) + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + for (i, h) in enumerate(self.entry): + if size == h['dim'] and (bpp is False or bpp == h['color_depth']): + return self.frame(i) + return self.frame(0) + + def frame(self, idx): + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header['offset']) + data = self.buf.read(8) + self.buf.seek(header['offset']) + + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + + # change tile dimension to only encompass XOR image + im.size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + mode = a[0] + bpp = 8 + for k in BmpImagePlugin.BIT2MODE.keys(): + if mode == BmpImagePlugin.BIT2MODE[k][1]: + bpp = k + break + + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + 'L', # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + 'raw', # raw decoder + ('L', 0, -1) # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + and_mask_offset = o + int(im.size[0] * im.size[1] * + (bpp / 8.0)) + total_bytes = int((w * im.size[1]) / 8) + + self.buf.seek(and_mask_offset) + maskData = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + '1', # 1 bpp + im.size, # (w, h) + maskData, # source chars + 'raw', # raw decoder + ('1;I', int(w/8), -1) # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert('RGBA') + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki + """ + format = "ICO" + format_description = "Windows Icon" + + def _open(self): + self.ico = IcoFile(self.fp) + self.info['sizes'] = self.ico.sizes() + self.size = self.ico.entry[0]['dim'] + self.load() + + def load(self): + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.mode = im.mode + self.size = im.size + + def load_seek(self): + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass +# +# -------------------------------------------------------------------- + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") diff --git a/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py new file mode 100644 index 0000000..dd4f829 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImImagePlugin.py @@ -0,0 +1,355 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8 + +__version__ = "0.7" + + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0, + SCALE: 0, SIZE: 0, MODE: 0} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGBX", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN["L %s image" % i] = ("F", "F;%s" % i) + OPEN["L*%s image" % i] = ("F", "F;%s" % i) +for i in ["16", "16L", "16B"]: + OPEN["L %s image" % i] = ("I;%s" % i, "I;%s" % i) + OPEN["L*%s image" % i] = ("I;%s" % i, "I;%s" % i) +for i in ["32S"]: + OPEN["L %s image" % i] = ("I", "I;%s" % i) + OPEN["L*%s image" % i] = ("I", "I;%s" % i) +for i in range(2, 33): + OPEN["L*%s image" % i] = ("F", "F;%s" % i) + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s): + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + +class ImImageFile(ImageFile.ImageFile): + + format = "IM" + format_description = "IFUNC Image Memory" + + def _open(self): + + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b'\0' or s == b'\x1A': + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + raise SyntaxError("not an IM file") + + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] == b'\n': + s = s[:-1] + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an IM file") + + if m: + + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode('latin-1', 'replace') + v = v.decode('latin-1', 'replace') + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + + raise SyntaxError("Syntax error in IM header: " + + s.decode('ascii', 'replace')) + + if not n: + raise SyntaxError("Not an IM file") + + # Basic attributes + self.size = self.info[SIZE] + self.mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[0:1] != b'\x1A': + s = self.fp.read(1) + if not s: + raise SyntaxError("File truncated") + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i+256] == palette[i+512]: + if i8(palette[i]) != i: + linear = 0 + else: + greyscale = 0 + if self.mode == "L" or self.mode == "LA": + if greyscale: + if not linear: + self.lut = [i8(c) for c in palette[:256]] + else: + if self.mode == "L": + self.mode = self.rawmode = "P" + elif self.mode == "LA": + self.mode = self.rawmode = "PA" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = [i8(c) for c in palette] + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self.__fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0)+self.size, offs, + (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)), + ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)), + ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0)+self.size, offs, + (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 + + def seek(self, frame): + + if frame < 0 or frame >= self.info[FRAMES]: + raise EOFError("seek outside sequence") + + if self.frame == frame: + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self.__fp + + self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))] + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- +# Save IM files + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L") +} + + +def _save(im, fp, filename, check=0): + + try: + image_type, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as IM" % im.mode) + + try: + frames = im.encoderinfo["frames"] + except KeyError: + frames = 1 + + if check: + return check + + fp.write(("Image type: %s image\r\n" % image_type).encode('ascii')) + if filename: + fp.write(("Name: %s\r\n" % filename).encode('ascii')) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii')) + fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii')) + if im.mode == "P": + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511-fp.tell()) + b"\032") + if im.mode == "P": + fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/server/www/packages/packages-windows/x86/PIL/Image.py b/server/www/packages/packages-windows/x86/PIL/Image.py new file mode 100644 index 0000000..64f4613 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/Image.py @@ -0,0 +1,2522 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import VERSION, PILLOW_VERSION, _plugins + +import logging +import warnings +import math + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class _imaging_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imaging C module is not installed") + + +# Limit to around a quarter gigabyte for a 24 bit (3 bpp) image +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 / 4 / 3) + +try: + # give Tk a chance to set up the environment, in case we're + # using an _imaging module linked against libtcl/libtk (use + # __import__ to hide this from naive packagers; we don't really + # depend on Tk unless ImageTk is used, and that module already + # imports Tkinter) + __import__("FixTk") +except ImportError: + pass + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from PIL import _imaging as core + if PILLOW_VERSION != getattr(core, 'PILLOW_VERSION', None): + raise ImportError("The _imaging extension was built for another " + " version of Pillow or PIL") + +except ImportError as v: + core = _imaging_not_installed() + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version " + "of Python.", + RuntimeWarning + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + elif "Symbol not found: _PyUnicodeUCS2_" in str(v): + # should match _PyUnicodeUCS2_FromString and + # _PyUnicodeUCS2_AsLatin1String + warnings.warn( + "The _imaging extension was built for Python with UCS2 support; " + "recompile Pillow or build Python --without-wide-unicode. ", + RuntimeWarning + ) + elif "Symbol not found: _PyUnicodeUCS4_" in str(v): + # should match _PyUnicodeUCS4_FromString and + # _PyUnicodeUCS4_AsLatin1String + warnings.warn( + "The _imaging extension was built for Python with UCS4 support; " + "recompile Pillow or build Python --with-wide-unicode. ", + RuntimeWarning + ) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting.rst + raise + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +from PIL import ImageMode +from PIL._binary import i8 +from PIL._util import isPath +from PIL._util import isStringType +from PIL._util import deferred_error + +import os +import sys +import io +import struct + +# type stuff +import collections +import numbers + +# works everywhere, win for pypy, not cpython +USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info') +try: + import cffi + HAS_CFFI = True +except ImportError: + HAS_CFFI = False + + +def isImageType(t): + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + +# +# Constants (also defined in _imagingmodule.c!) + +NONE = 0 + +# transpose +FLIP_LEFT_RIGHT = 0 +FLIP_TOP_BOTTOM = 1 +ROTATE_90 = 2 +ROTATE_180 = 3 +ROTATE_270 = 4 +TRANSPOSE = 5 + +# transforms +AFFINE = 0 +EXTENT = 1 +PERSPECTIVE = 2 +QUAD = 3 +MESH = 4 + +# resampling filters +NEAREST = NONE = 0 +LANCZOS = ANTIALIAS = 1 +BILINEAR = LINEAR = 2 +BICUBIC = CUBIC = 3 + +# dithers +NONE = 0 +NEAREST = 0 +ORDERED = 1 # Not yet implemented +RASTERIZE = 2 # Not yet implemented +FLOYDSTEINBERG = 3 # default + +# palettes/quantizers +WEB = 0 +ADAPTIVE = 1 + +MEDIANCUT = 0 +MAXCOVERAGE = 1 +FASTOCTREE = 2 +LIBIMAGEQUANT = 3 + +# categories +NORMAL = 0 +SEQUENCE = 1 +CONTAINER = 2 + +if hasattr(core, 'DEFAULT_STRATEGY'): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +ID = [] +OPEN = {} +MIME = {} +SAVE = {} +SAVE_ALL = {} +EXTENSION = {} + +# -------------------------------------------------------------------- +# Modes supported by this version + +_MODEINFO = { + # NOTE: this table will be removed in future versions. use + # getmode* functions or ImageMode descriptors instead. + + # official modes + "1": ("L", "L", ("1",)), + "L": ("L", "L", ("L",)), + "I": ("L", "I", ("I",)), + "F": ("L", "F", ("F",)), + "P": ("RGB", "L", ("P",)), + "RGB": ("RGB", "L", ("R", "G", "B")), + "RGBX": ("RGB", "L", ("R", "G", "B", "X")), + "RGBA": ("RGB", "L", ("R", "G", "B", "A")), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), + "LAB": ("RGB", "L", ("L", "A", "B")), + "HSV": ("RGB", "L", ("H", "S", "V")), + + # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and + # BGR;24. Use these modes only if you know exactly what you're + # doing... + +} + +if sys.byteorder == 'little': + _ENDIAN = '<' +else: + _ENDIAN = '>' + +_MODE_CONV = { + # official modes + "1": ('|b1', None), # broken + "L": ('|u1', None), + "LA": ('|u1', 2), + "I": (_ENDIAN + 'i4', None), + "F": (_ENDIAN + 'f4', None), + "P": ('|u1', None), + "RGB": ('|u1', 3), + "RGBX": ('|u1', 4), + "RGBA": ('|u1', 4), + "CMYK": ('|u1', 4), + "YCbCr": ('|u1', 3), + "LAB": ('|u1', 3), # UNDONE - unsigned |u1i1i1 + "HSV": ('|u1', 3), + # I;16 == I;16L, and I;32 == I;32L + "I;16": ('u2', None), + "I;16L": ('i2', None), + "I;16LS": ('u4', None), + "I;32L": ('i4', None), + "I;32LS": ('= 1: + return + + try: + from PIL import BmpImagePlugin + except ImportError: + pass + try: + from PIL import GifImagePlugin + except ImportError: + pass + try: + from PIL import JpegImagePlugin + except ImportError: + pass + try: + from PIL import PpmImagePlugin + except ImportError: + pass + try: + from PIL import PngImagePlugin + except ImportError: + pass +# try: +# import TiffImagePlugin +# except ImportError: +# pass + + _initialized = 1 + + +def init(): + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + """ + + global _initialized + if _initialized >= 2: + return 0 + + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__("PIL.%s" % plugin, globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return 1 + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + +def _getdecoder(mode, decoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + # get decoder + decoder = getattr(core, decoder_name + "_decoder") + # print(decoder, mode, args + extra) + return decoder(mode, *args + extra) + except AttributeError: + raise IOError("decoder %s not available" % decoder_name) + + +def _getencoder(mode, encoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + # get encoder + encoder = getattr(core, encoder_name + "_encoder") + # print(encoder, mode, args + extra) + return encoder(mode, *args + extra) + except AttributeError: + raise IOError("encoder %s not available" % encoder_name) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + +def coerce_e(value): + return value if isinstance(value, _E) else _E(value) + + +class _E(object): + def __init__(self, data): + self.data = data + + def __add__(self, other): + return _E((self.data, "__add__", coerce_e(other).data)) + + def __mul__(self, other): + return _E((self.data, "__mul__", coerce_e(other).data)) + + +def _getscaleoffset(expr): + stub = ["stub"] + data = expr(_E(stub)).data + try: + (a, b, c) = data # simplified syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number)): + return c, 0.0 + if a is stub and b == "__add__" and isinstance(c, numbers.Number): + return 1.0, c + except TypeError: + pass + try: + ((a, b, c), d, e) = data # full syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number) and + d == "__add__" and isinstance(e, numbers.Number)): + return c, e + except TypeError: + pass + raise ValueError("illegal expression") + + +# -------------------------------------------------------------------- +# Implementation wrapper + +class Image(object): + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + format = None + format_description = None + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self.mode = "" + self.size = (0, 0) + self.palette = None + self.info = {} + self.category = NORMAL + self.readonly = 0 + self.pyaccess = None + + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + + def _new(self, im): + new = Image() + new.im = im + new.mode = im.mode + new.size = im.size + if self.palette: + new.palette = self.palette.copy() + if im.mode == "P" and not new.palette: + from PIL import ImagePalette + new.palette = ImagePalette.ImagePalette() + new.info = self.info.copy() + return new + + _makeself = _new # compatibility + + # Context Manager Support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is only required to close images that have not + had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. + """ + try: + self.fp.close() + except Exception as msg: + logger.debug("Error closing: %s", msg) + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = deferred_error(ValueError("Operation on closed image")) + + def _copy(self): + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _dump(self, file=None, format=None): + import tempfile + suffix = '' + if format: + suffix = '.'+format + if not file: + f, file = tempfile.mkstemp(suffix) + os.close(f) + + self.load() + if not format or format == "PPM": + self.im.save_ppm(file) + else: + if not file.endswith(format): + file = file + "." + format + self.save(file, format) + return file + + def __eq__(self, other): + return (self.__class__.__name__ == other.__class__.__name__ and + self.mode == other.mode and + self.size == other.size and + self.info == other.info and + self.category == other.category and + self.readonly == other.readonly and + self.getpalette() == other.getpalette() and + self.tobytes() == other.tobytes()) + + def __ne__(self, other): + eq = (self == other) + return not eq + + def __repr__(self): + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, self.__class__.__name__, + self.mode, self.size[0], self.size[1], + id(self) + ) + + def _repr_png_(self): + """ iPython display hook support + + :returns: png version of the image as bytes + """ + from io import BytesIO + b = BytesIO() + self.save(b, 'PNG') + return b.getvalue() + + def __getattr__(self, name): + if name == "__array_interface__": + # numpy array interface support + new = {} + shape, typestr = _conv_type_shape(self) + new['shape'] = shape + new['typestr'] = typestr + new['data'] = self.tobytes() + new['version'] = 3 + return new + raise AttributeError(name) + + def __getstate__(self): + return [ + self.info, + self.mode, + self.size, + self.getpalette(), + self.tobytes()] + + def __setstate__(self, state): + Image.__init__(self) + self.tile = [] + info, mode, size, palette, data = state + self.info = info + self.mode = mode + self.size = size + self.im = core.new(mode, size) + if mode in ("L", "P") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name="raw", *args): + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + :param args: Extra arguments to the encoder. + :rtype: A bytes object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if encoder_name == "raw" and args == (): + args = self.mode + + self.load() + + # unpack data + e = _getencoder(self.mode, encoder_name, args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + data = [] + while True: + l, s, d = e.encode(bufsize) + data.append(d) + if s: + break + if s < 0: + raise RuntimeError("encoder error %d in tobytes" % s) + + return b"".join(data) + + def tostring(self, *args, **kw): + raise NotImplementedError("tostring() has been removed. " + + "Please call tobytes() instead.") + + def tobitmap(self, name="image"): + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + raise ValueError("not a bitmap") + data = self.tobytes("xbm") + return b"".join([ + ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'), + ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'), + ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};" + ]) + + def frombytes(self, data, decoder_name="raw", *args): + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + # default format + if decoder_name == "raw" and args == (): + args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + def fromstring(self, *args, **kw): + raise NotImplementedError("fromstring() has been removed. " + + "Please call frombytes() instead.") + + def load(self): + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. This method will close the file + associated with the image. + + :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` + """ + if self.im and self.palette and self.palette.dirty: + # realize palette + self.im.putpalette(*self.palette.getdata()) + self.palette.dirty = 0 + self.palette.mode = "RGB" + self.palette.rawmode = None + if "transparency" in self.info: + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + + if self.im: + if HAS_CFFI and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from PIL import PyAccess + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + + def verify(self): + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert(self, mode=None, matrix=None, dither=None, + palette=WEB, colors=256): + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + The current version supports all possible conversions between + "L", "RGB" and "CMYK." The **matrix** argument only supports "L" + and "RGB". + + When translating a color image to black and white (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a greyscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is NONE, all non-zero values are set to 255 (white). To + use other thresholds, use the :py:meth:`~PIL.Image.Image.point` + method. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are NONE or FLOYDSTEINBERG (default). + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are WEB or ADAPTIVE. + :param colors: Number of colors to use for the ADAPTIVE palette. + Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if not mode: + # determine default mode + if self.mode == "P": + self.load() + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + else: + return self.copy() + + self.load() + + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + raise ValueError("illegal conversion") + im = self.im.convert_matrix(mode, matrix) + return self._new(im) + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if "transparency" in self.info and \ + self.info['transparency'] is not None: + if self.mode in ('L', 'RGB') and mode == 'RGBA': + # Use transparent conversion to promote from transparent + # color to an alpha channel. + return self._new(self.im.convert_transparent( + mode, self.info['transparency'])) + elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'): + t = self.info['transparency'] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn('Palette images with Transparency ' + + ' expressed in bytes should be converted ' + + 'to RGBA images') + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = Image()._new(core.new(self.mode, (1, 1))) + if self.mode == 'P': + trns_im.putpalette(self.palette) + if type(t) == tuple: + try: + t = trns_im.palette.getcolor(t) + except: + raise ValueError("Couldn't allocate a palette "+ + "color for transparency") + trns_im.putpixel((0, 0), t) + + if mode in ('L', 'RGB'): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert('RGB') + trns = trns_im.getpixel((0, 0)) + + elif self.mode == 'P' and mode == 'RGBA': + t = self.info['transparency'] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should" + + " be bytes or int") + + if mode == "P" and palette == ADAPTIVE: + im = self.im.quantize(colors) + new = self._new(im) + from PIL import ImagePalette + new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB")) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del(new.info['transparency']) + if trns is not None: + try: + new.info['transparency'] = new.palette.getcolor(trns) + except: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del(new.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + return new + + # colorspace conversion + if dither is None: + dither = FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + im = self.im.convert(getmodebase(self.mode)) + im = im.convert(mode, dither) + except KeyError: + raise ValueError("illegal conversion") + + new_im = self._new(im) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del(new_im.info['transparency']) + if trns is not None: + if new_im.mode == 'P': + try: + new_im.info['transparency'] = new_im.palette.getcolor(trns) + except: + del(new_im.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + else: + new_im.info['transparency'] = trns + return new_im + + def quantize(self, colors=256, method=None, kmeans=0, palette=None): + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: 0 = median cut + 1 = maximum coverage + 2 = fast octree + 3 = libimagequant + :param kmeans: Integer + :param palette: Quantize to the :py:class:`PIL.ImagingPalette` palette. + :returns: A new image + + """ + + self.load() + + if method is None: + # defaults: + method = 0 + if self.mode == 'RGBA': + method = 2 + + if self.mode == 'RGBA' and method not in (2, 3): + # Caller specified an invalid mode. + raise ValueError( + 'Fast Octree (method == 2) and libimagequant (method == 3) ' + + 'are the only valid methods for quantizing RGBA images') + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + raise ValueError("bad mode for palette image") + if self.mode != "RGB" and self.mode != "L": + raise ValueError( + "only RGB or L mode images can be quantized to a palette" + ) + im = self.im.convert("P", 1, palette.im) + return self._makeself(im) + + return self._new(self.im.quantize(colors, method, kmeans)) + + def copy(self): + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + return self._new(self.im.copy()) + + __copy__ = copy + + def crop(self, box=None): + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. + + This is a lazy operation. Changes to the source image may or + may not be reflected in the cropped image. To break the + connection, call the :py:meth:`~PIL.Image.Image.load` method on + the cropped copy. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + if box is None: + return self.copy() + + # lazy operation + return _ImageCrop(self, box) + + def draft(self, mode, size): + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it, or to extract a 128x192 + version from a PCD file. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + :param mode: The requested mode. + :param size: The requested size. + """ + pass + + def _expand(self, xmargin, ymargin=None): + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin, 0)) + + def filter(self, filter): + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object. """ + + self.load() + + if isinstance(filter, collections.Callable): + filter = filter() + if not hasattr(filter, "filter"): + raise TypeError("filter argument should be ImageFilter.Filter " + + "instance or class") + + if self.im.bands == 1: + return self._new(filter.filter(self.im)) + # fix to handle multiband images since _imaging doesn't + ims = [] + for c in range(self.im.bands): + ims.append(self._new(filter.filter(self.im.getband(c)))) + return merge(self.mode, ims) + + def getbands(self): + """ + Returns a tuple containing the name of each band in this image. + For example, **getbands** on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self): + """ + Calculates the bounding box of the non-zero regions in the + image. + + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. If the image + is completely empty, this method returns None. + + """ + + self.load() + return self.im.getbbox() + + def getcolors(self, maxcolors=256): + """ + Returns a list of colors used in this image. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [] + for i in range(256): + if h[i]: + out.append((h[i], i)) + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band=None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use **list(im.getdata())**. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self): + """ + Gets the the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + extrema = [] + for i in range(self.im.bands): + extrema.append(self.im.getband(i).getextrema()) + return tuple(extrema) + return self.im.getextrema() + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self): + """ + Returns the image palette as a list. + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + if bytes is str: + return [i8(c) for c in self.im.getpalette()] + else: + return list(self.im.getpalette()) + except ValueError: + return None # no palette + + def getpixel(self, xy): + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(xy) + + def getprojection(self): + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return [i8(c) for c in x], [i8(c) for c in y] + + def histogram(self, mask=None, extrema=None): + """ + Returns a histogram for the image. The histogram is returned as + a list of pixel counts, one for each pixel value in the source + image. If the image has more than one band, the histograms for + all bands are concatenated (for example, the histogram for an + "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a greyscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def offset(self, xoffset, yoffset=None): + raise NotImplementedError("offset() has been removed. " + + "Please call ImageChops.offset() instead.") + + def paste(self, im, box=None, mask=None): + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). If a 4-tuple is given, the size of the pasted image + must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L" or "RGBA" + images (in the latter case, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box) and mask is None: + # abbreviated paste(im, mask) syntax + mask = box + box = None + + if box is None: + # cover all of self + box = (0, 0) + self.size + + if len(box) == 2: + # upper left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + raise ValueError( + "cannot determine region size; use 4-item box" + ) + box = box + (box[0]+size[0], box[1]+size[1]) + + if isStringType(im): + from PIL import ImageColor + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self.load() + if self.readonly: + self._copy() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def point(self, lut, mode=None): + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65336 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + :param mode: Output mode (default is same as input). In the + current version, this can only be used if the source image + has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + lut = [lut(i) for i in range(256)] * self.im.bands + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + raise ValueError("point operation not supported for this mode") + + return self._new(self.im.point(lut, mode)) + + def putalpha(self, alpha): + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer or + other color value. + """ + + self.load() + if self.readonly: + self._copy() + + if self.mode not in ("LA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + self.pyaccess = None + except (AttributeError, ValueError): + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "RGBA"): + raise ValueError # sanity check + self.im = im + self.pyaccess = None + self.mode = self.im.mode + except (KeyError, ValueError): + raise ValueError("illegal image mode") + + if self.mode == "LA": + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + raise ValueError("illegal image mode") + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata(self, data, scale=1.0, offset=0.0): + """ + Copies pixel data to this image. This method copies data from a + sequence object into the image, starting at the upper left + corner (0, 0), and continuing until either the image or the + sequence ends. The scale and offset values are used to adjust + the sequence values: **pixel = value*scale + offset**. + + :param data: A sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self.load() + if self.readonly: + self._copy() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB"): + """ + Attaches a palette to this image. The image must be a "P" or + "L" image, and the palette sequence must contain 768 integer + values, where each group of three values represent the red, + green, and blue values for the corresponding pixel + index. Instead of an integer sequence, you can use an 8-bit + string. + + :param data: A palette sequence (either a list or a string). + """ + from PIL import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + self.load() + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + if bytes is str: + data = "".join(chr(x) for x in data) + else: + data = bytes(data) + palette = ImagePalette.raw(rawmode, data) + self.mode = "P" + self.palette = palette + self.palette.mode = "RGB" + self.load() # install new palette + + def putpixel(self, xy, value): + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + + self.load() + if self.readonly: + self._copy() + self.pyaccess = None + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + return self.im.putpixel(xy, value) + + def resize(self, size, resample=NEAREST): + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation), + :py:attr:`PIL.Image.BICUBIC` (cubic spline interpolation), or + :py:attr:`PIL.Image.LANCZOS` (a high-quality downsampling filter). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS): + raise ValueError("unknown resampling filter") + + self.load() + + size = tuple(size) + if self.size == size: + return self._new(self.im) + + if self.mode in ("1", "P"): + resample = NEAREST + + if self.mode == 'LA': + return self.convert('La').resize(size, resample).convert('LA') + + if self.mode == 'RGBA': + return self.convert('RGBa').resize(size, resample).convert('RGBA') + + return self._new(self.im.resize(size, resample)) + + def rotate(self, angle, resample=NEAREST, expand=0): + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` + (cubic spline interpolation in a 4x4 environment). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + angle = angle % 360.0 + + # Fast paths regardless of filter + if angle == 0: + return self._new(self.im) + if angle == 180: + return self.transpose(ROTATE_180) + if angle == 90 and expand: + return self.transpose(ROTATE_90) + if angle == 270 and expand: + return self.transpose(ROTATE_270) + + angle = - math.radians(angle) + matrix = [ + round(math.cos(angle), 15), round(math.sin(angle), 15), 0.0, + round(-math.sin(angle), 15), round(math.cos(angle), 15), 0.0 + ] + + def transform(x, y, matrix=matrix): + (a, b, c, d, e, f) = matrix + return a*x + b*y + c, d*x + e*y + f + + w, h = self.size + if expand: + # calculate output size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y) + xx.append(x) + yy.append(y) + w = int(math.ceil(max(xx)) - math.floor(min(xx))) + h = int(math.ceil(max(yy)) - math.floor(min(yy))) + + # adjust center + x, y = transform(w / 2.0, h / 2.0) + matrix[2] = self.size[0] / 2.0 - x + matrix[5] = self.size[1] / 2.0 - y + + return self.transform((w, h), AFFINE, matrix, resample) + + def save(self, fp, format=None, **params): + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), pathlib.Path object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param options: Extra parameters to the image writer. + :returns: None + :exception KeyError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception IOError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename = "" + open_fp = False + if isPath(fp): + filename = fp + open_fp = True + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp) + open_fp = True + elif hasattr(fp, "name") and isPath(fp.name): + # only set the name for metadata purposes + filename = fp.name + + # may mutate self! + self.load() + + save_all = False + if 'save_all' in params: + save_all = params.pop('save_all') + self.encoderinfo = params + self.encoderconfig = () + + preinit() + + ext = os.path.splitext(filename)[1].lower() + + if not format: + if ext not in EXTENSION: + init() + format = EXTENSION[ext] + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + if open_fp: + fp = builtins.open(filename, "wb") + + try: + save_handler(self, fp, filename) + finally: + # do what we can to clean up + if open_fp: + fp.close() + + def seek(self, frame): + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + **EOFError** exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + Note that in the current version of the library, most sequence + formats only allows you to seek to the next frame. + + See :py:meth:`~PIL.Image.Image.tell`. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + raise EOFError + + def show(self, title=None, command=None): + """ + Displays this image. This method is mainly intended for + debugging purposes. + + On Unix platforms, this method saves the image to a temporary + PPM file, and calls either the **xv** utility or the **display** + utility, depending on which one can be found. + + On OS X, this method saves the image to a temporary BMP file, and opens + it with the native Preview application. + + On Windows, it saves the image to a temporary BMP file, and uses + the standard BMP display utility to show it (usually Paint). + + :param title: Optional title to use for the image window, + where possible. + :param command: command used to show the image + """ + + _show(self, title=title, command=command) + + def split(self): + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + ims = [self.copy()] + else: + ims = [] + for i in range(self.im.bands): + ims.append(self._new(self.im.getband(i))) + return tuple(ims) + + def tell(self): + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail(self, size, resample=BICUBIC): + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: Requested size. + :param resample: Optional resampling filter. This can be one + of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`, + :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`. + If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`. + (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0) + :returns: None + """ + + # preserve aspect ratio + x, y = self.size + if x > size[0]: + y = int(max(y * size[0] / x, 1)) + x = int(size[0]) + if y > size[1]: + x = int(max(x * size[1] / y, 1)) + y = int(size[1]) + size = x, y + + if size == self.size: + return + + self.draft(None, size) + + im = self.resize(size, resample) + + self.im = im.im + self.mode = im.mode + self.size = size + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform(self, size, method, data=None, resample=NEAREST, fill=1): + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size. + :param method: The transformation method. This is one of + :py:attr:`PIL.Image.EXTENT` (cut out a rectangular subregion), + :py:attr:`PIL.Image.AFFINE` (affine transform), + :py:attr:`PIL.Image.PERSPECTIVE` (perspective transform), + :py:attr:`PIL.Image.QUAD` (map a quadrilateral to a rectangle), or + :py:attr:`PIL.Image.MESH` (map a number of source quadrilaterals + in one operation). + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:attr:`PIL.Image.NEAREST`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode == 'LA': + return self.convert('La').transform( + size, method, data, resample, fill).convert('LA') + + if self.mode == 'RGBA': + return self.convert('RGBa').transform( + size, method, data, resample, fill).convert('RGBA') + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + + if data is None: + raise ValueError("missing method data") + + im = new(self.mode, size, None) + if method == MESH: + # list of quads + for box, quad in data: + im.__transformer(box, self, QUAD, quad, resample, fill) + else: + im.__transformer((0, 0)+size, self, method, data, resample, fill) + + return im + + def __transformer(self, box, image, method, data, + resample=NEAREST, fill=1): + w = box[2] - box[0] + h = box[3] - box[1] + + if method == AFFINE: + data = data[0:6] + + elif method == EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = float(x1 - x0) / w + ys = float(y1 - y0) / h + method = AFFINE + data = (xs, 0, x0 + xs/2, 0, ys, y0 + ys/2) + + elif method == PERSPECTIVE: + data = data[0:8] + + elif method == QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[0:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At, + (se[0]-sw[0]-ne[0]+x0)*As*At, + y0, (ne[1]-y0)*As, (sw[1]-y0)*At, + (se[1]-sw[1]-ne[1]+y0)*As*At) + + else: + raise ValueError("unknown transformation method") + + if resample not in (NEAREST, BILINEAR, BICUBIC): + raise ValueError("unknown resampling filter") + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = NEAREST + + self.im.transform2(box, image.im, method, data, resample, fill) + + def transpose(self, method): + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`, + :py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`, + :py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270` or + :py:attr:`PIL.Image.TRANSPOSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + return self._new(self.im.effect_spread(distance)) + + def toqimage(self): + """Returns a QImage copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Lazy operations + +class _ImageCrop(Image): + + def __init__(self, im, box): + + Image.__init__(self) + + # Round to nearest integer, runs int(round(x)) when unpacking + x0, y0, x1, y1 = map(int, map(round, box)) + + if x1 < x0: + x1 = x0 + if y1 < y0: + y1 = y0 + + self.mode = im.mode + self.size = x1-x0, y1-y0 + + self.__crop = x0, y0, x1, y1 + + self.im = im.im + + def load(self): + + # lazy evaluation! + if self.__crop: + self.im = self.im.crop(self.__crop) + self.__crop = None + + if self.im: + return self.im.pixel_access(self.readonly) + + # FIXME: future versions should optimize crop/paste + # sequences! + + +# -------------------------------------------------------------------- +# Abstract handlers. + +class ImagePointHandler(object): + # used as a mixin by point transforms (for use with im.point) + pass + + +class ImageTransformHandler(object): + # used as a mixin by geometry transforms (for use with im.transform) + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + +def _wedge(): + "Create greyscale wedge (for debugging only)" + + return Image()._new(core.wedge("L")) + + +def new(mode, size, color=0): + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isStringType(color): + # css3-style specifier + + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + + return Image()._new(core.fill(mode, size, color)) + + +def frombytes(mode, size, data, decoder_name="raw", *args): + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Decoder `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw" and args == (): + args = mode + + im = new(mode, size) + im.frombytes(data, decoder_name, args) + return im + + +def fromstring(*args, **kw): + raise NotImplementedError("fromstring() has been removed. " + + "Please call frombytes() instead.") + + +def frombuffer(mode, size, data, decoder_name="raw", *args): + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + **BytesIO** object, and use :py:func:`~PIL.Image.open` to load it. + + In the current version, the default parameters used for the "raw" decoder + differs from that used for :py:func:`~PIL.Image.frombytes`. This is a + bug, and will probably be fixed in a future release. The current release + issues a warning if you do this; to disable the warning, you should provide + the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + warnings.warn( + "the frombuffer defaults may change in a future release; " + "for portability, change the call to read:\n" + " frombuffer(mode, size, data, 'raw', mode, 0, 1)", + RuntimeWarning, stacklevel=2 + ) + args = mode, 0, -1 # may change to (mode, 0, 1) post-1.1.6 + if args[0] in _MAPMODES: + im = new(mode, (1, 1)) + im = im._new( + core.map_buffer(data, size, decoder_name, None, 0, args) + ) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +def fromarray(obj, mode=None): + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol). + + If obj is not contiguous, then the tobytes method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + :param obj: Object with array interface + :param mode: Mode to use (will be determined from type if None) + See: :ref:`concept-modes`. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr['shape'] + ndim = len(shape) + try: + strides = arr['strides'] + except KeyError: + strides = None + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr['typestr'] + mode, rawmode = _fromarray_typemap[typekey] + except KeyError: + # print typekey + raise TypeError("Cannot handle this data type") + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + raise ValueError("Too many dimensions: %d > %d." % (ndim, ndmax)) + + size = shape[1], shape[0] + if strides is not None: + if hasattr(obj, 'tobytes'): + obj = obj.tobytes() + else: + obj = obj.tostring() + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + # ((1, 1), "|b1"): ("1", "1"), # broken + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "u2"): ("I", "I;16B"), + ((1, 1), "i2"): ("I", "I;16BS"), + ((1, 1), "u4"): ("I", "I;32B"), + ((1, 1), "i4"): ("I", "I;32BS"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 2), "|u1"): ("LA", "LA"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), + } + +# shortcuts +_fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I") +_fromarray_typemap[((1, 1), _ENDIAN + "f4")] = ("F", "F") + + +def _decompression_bomb_check(size): + if MAX_IMAGE_PIXELS is None: + return + + pixels = size[0] * size[1] + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + "Image size (%d pixels) exceeds limit of %d pixels, " + "could be decompression bomb DOS attack." % + (pixels, MAX_IMAGE_PIXELS), + DecompressionBombWarning) + + +def open(fp, mode="r"): + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. + + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement :py:meth:`~file.read`, + :py:meth:`~file.seek`, and :py:meth:`~file.tell` methods, + and be opened in binary mode. + :param mode: The mode. If given, this argument must be "r". + :returns: An :py:class:`~PIL.Image.Image` object. + :exception IOError: If the file cannot be found, or the image cannot be + opened and identified. + """ + + if mode != "r": + raise ValueError("bad mode %r" % mode) + + filename = "" + if isPath(fp): + filename = fp + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + if filename: + fp = builtins.open(filename, "rb") + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + + prefix = fp.read(16) + + preinit() + + def _open_core(fp, filename, prefix): + for i in ID: + try: + factory, accept = OPEN[i] + if not accept or accept(prefix): + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + # logger.debug("", exc_info=True) + continue + return None + + im = _open_core(fp, filename, prefix) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix) + + if im: + return im + + raise IOError("cannot identify image file %r" + % (filename if filename else fp)) + +# +# Image processing. + + +def alpha_composite(im1, im2): + """ + Alpha composite im2 over im1. + + :param im1: The first image. Must have mode RGBA. + :param im2: The second image. Must have mode RGBA, and the same size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1, im2, alpha): + """ + Creates a new image by interpolating between two input images, using + a constant alpha.:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1, image2, mask): + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode, bands): + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + raise ValueError("wrong number of bands") + for im in bands[1:]: + if im.mode != getmodetype(mode): + raise ValueError("mode mismatch") + if im.size != bands[0].size: + raise ValueError("size mismatch") + im = core.new(mode, bands[0].size) + for i in range(getmodebands(mode)): + bands[i].load() + im.putband(bands[i].im, i) + return bands[0]._new(im) + + +# -------------------------------------------------------------------- +# Plugin registry + +def register_open(id, factory, accept=None): + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id, mimetype): + """ + Registers an image MIME type. This function should not be used + in application code. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save(id, driver): + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id, extension): + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +# -------------------------------------------------------------------- +# Simple display support. User code may override this. + +def _show(image, **options): + # override me, as necessary + _showxv(image, **options) + + +def _showxv(image, title=None, **options): + from PIL import ImageShow + ImageShow.show(image, title, **options) + + +# -------------------------------------------------------------------- +# Effects + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y2). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageChops.py b/server/www/packages/packages-windows/x86/PIL/ImageChops.py new file mode 100644 index 0000000..ba5350e --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageChops.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +def constant(image, value): + """Fill a channel with a given grey level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image): + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image): + """ + Invert an image (channel). + + .. code-block:: python + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. + + .. code-block:: python + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image + containing the darker values. + + .. code-block:: python + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1, image2): + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. + + .. code-block:: python + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1, image2): + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. + + .. code-block:: python + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1, image2): + """ + Superimposes two inverted images on top of each other. + + .. code-block:: python + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def add(image1, image2, scale=1.0, offset=0): + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract(image1, image2, scale=1.0, offset=0): + """ + Subtracts two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1, image2): + """Add two images, without clipping the result. + + .. code-block:: python + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1, image2): + """Subtract two images, without clipping the result. + + .. code-block:: python + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1, image2): + """Logical AND between two images. + + .. code-block:: python + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1, image2): + """Logical OR between two images. + + .. code-block:: python + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1, image2): + """Logical XOR between two images. + + .. code-block:: python + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1, image2, alpha): + """Blend images using constant transparency weight. Alias for + :py:meth:`PIL.Image.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite(image1, image2, mask): + """Create composite using transparency mask. Alias for + :py:meth:`PIL.Image.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image, xoffset, yoffset=None): + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If **yoffset** is omitted, it + is assumed to be equal to **xoffset**. + + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/server/www/packages/packages-windows/x86/PIL/ImageCms.py b/server/www/packages/packages-windows/x86/PIL/ImageCms.py new file mode 100644 index 0000000..6d5801a --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageCms.py @@ -0,0 +1,973 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color management support, based on Kevin Cazabon's PyCMS +# library. + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. + +from __future__ import print_function +import sys + +from PIL import Image +try: + from PIL import _imagingcms +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from _util import deferred_error + _imagingcms = deferred_error(ex) +from PIL._util import isStringType + +DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + http://www.cazabon.com + + pyCMS home page: http://www.cazabon.com/pyCMS + littleCMS home page: http://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +VERSION = "1.0.0 pil" + +# --------------------------------------------------------------------. + +core = _imagingcms + +# +# intent/direction values + +INTENT_PERCEPTUAL = 0 +INTENT_RELATIVE_COLORIMETRIC = 1 +INTENT_SATURATION = 2 +INTENT_ABSOLUTE_COLORIMETRIC = 3 + +DIRECTION_INPUT = 0 +DIRECTION_OUTPUT = 1 +DIRECTION_PROOF = 2 + +# +# flags + +FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16 # Gridpoints +} + +_MAX_FLAG = 0 +for flag in FLAGS.values(): + if isinstance(flag, int): + _MAX_FLAG = _MAX_FLAG | flag + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + +class ImageCmsProfile(object): + + def __init__(self, profile): + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isStringType(profile): + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + else: + self._set(profile) # assume it's already a profile + + def _set(self, profile, filename=None): + self.profile = profile + self.filename = filename + if profile: + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + else: + self.product_name = None + self.product_info = None + + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + + """ + Transform. This can be used with the procedural API, or with the standard + Image.point() method. + + Will return the output profile in the output.info['icc_profile']. + """ + + def __init__(self, input, output, input_mode, output_mode, + intent=INTENT_PERCEPTUAL, proof=None, + proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0): + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, + input_mode, output_mode, + intent, + flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, output.profile, proof.profile, + input_mode, output_mode, + intent, proof_intent, + flags + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im): + return self.apply(im) + + def apply(self, im, imOut=None): + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info['icc_profile'] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im): + im.load() + if im.mode != self.output_mode: + raise ValueError("mode mismatch") # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info['icc_profile'] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle=None): + """ (experimental) Fetches the profile for the current display device. + :returns: None if the profile is not known. + """ + + if sys.platform == "win32": + from PIL import ImageWin + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) + else: + profile = core.get_display_profile_win32(handle or 0) + else: + try: + get = _imagingcms.get_display_profile + except AttributeError: + return None + else: + profile = get() + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + +class PyCMSError(Exception): + + """ (pyCMS) Exception class. + This is used for all errors in the pyCMS API. """ + pass + + +def profileToProfile( + im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL, + outputMode=None, inPlace=0, flags=0): + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + inputProfile to outputProfile. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If inPlace == TRUE and outputMode != im.mode, + a PyCMSError will be raised. If an error occurs during application of + the profiles, a PyCMSError will be raised. If outputMode is not a mode + supported by the outputProfile (or by pyCMS), a PyCMSError will be + raised. + + This function applies an ICC transformation to im from inputProfile's + color space to outputProfile's color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + OutputMode can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open PIL image object (i.e. Image.new(...) or + Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean (1 = True, None or 0 = False). If True, the + original image is modified in-place, and None is returned. If False + (default), a new Image object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new PIL image object, depending on value of + inPlace + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, outputProfile, im.mode, outputMode, + renderingIntent, flags=flags + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def getOpenProfile(profileFilename): + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If profileFilename is not a vaild filename for an ICC profile, a PyCMSError + will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, flags=0): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If an error occurs during creation of the + transform, a PyCMSError will be raised. + + If inMode or outMode are not a mode supported by the outputProfile (or + by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile using the renderingIntent to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in inMode to images that are in outMode color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the "inMode" and "outMode" attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent, flags=flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildProofTransform( + inputProfile, outputProfile, proofProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, + proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"]): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device. + + If the input, output, or proof profiles specified are not valid + filenames, a PyCMSError will be raised. + + If an error occurs during creation of the transform, a PyCMSError will + be raised. + + If inMode or outMode are not a mode supported by the outputProfile + (or by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device using renderingIntent and + proofRenderingIntent to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in inMode to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the proofProfile device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for proof->output transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, + proofProfile, proofRenderingIntent, flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform(im, transform, inPlace=0): + """ + (pyCMS) Applies a transform to a given image. + + If im.mode != transform.inMode, a PyCMSError is raised. + + If inPlace == TRUE and transform.inMode != transform.outMode, a + PyCMSError is raised. + + If im.mode, transfer.inMode, or transfer.outMode is not supported by + pyCMSdll or the profiles you used for the transform, a PyCMSError is + raised. + + If an error occurs while the transform is being applied, a PyCMSError + is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set inPlace to TRUE. This can only be done if + transform.inMode and transform.outMode are the same, because we can't + change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new Image object of + the same dimensions in mode transform.outMode. + + :param im: A PIL Image object, and im.mode must be the same as the inMode + supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool (1 == True, 0 or None == False). If True, im is + modified in place and None is returned, if False, a new Image object + with the transform applied is returned (and im is not changed). The + default is False. + :returns: Either None, or a new PIL Image object, depending on the value of + inPlace. The profile will be returned in the image's + info['icc_profile']. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def createProfile(colorSpace, colorTemp=-1): + """ + (pyCMS) Creates a profile. + + If colorSpace not in ["LAB", "XYZ", "sRGB"], a PyCMSError is raised + + If using LAB and colorTemp != a positive integer, a PyCMSError is raised. + + If an error occurs while creating the profile, a PyCMSError is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive integer for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + raise PyCMSError( + "Color space not supported for on-the-fly profile creation (%s)" + % colorSpace) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except: + raise PyCMSError( + "Color temperature must be numeric, \"%s\" not valid" + % colorTemp) + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileName(profile): + """ + + (pyCMS) Gets the internal product name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised If an error occurs while trying to obtain the + name tag, a PyCMSError is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.product_model + manufacturer = profile.profile.product_manufacturer + + if not (model or manufacturer): + return profile.profile.product_description + "\n" + if not manufacturer or len(model) > 30: + return model + "\n" + return "%s - %s\n" % (model, manufacturer) + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileInfo(profile): + """ + (pyCMS) Gets the internal product information for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the info tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.product_description + cpright = profile.profile.product_copyright + arr = [] + for elt in (description, cpright): + if elt: + arr.append(elt) + return "\r\n\r\n".join(arr) + "\r\n\r\n" + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileCopyright(profile): + """ + (pyCMS) Gets the copyright for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the copyright tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_copyright + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileManufacturer(profile): + """ + (pyCMS) Gets the manufacturer for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + PyCMSError is raised + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_manufacturer + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileModel(profile): + """ + (pyCMS) Gets the model for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the model tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_model + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileDescription(profile): + """ + (pyCMS) Gets the description for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the description tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_description + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getDefaultIntent(profile): + """ + (pyCMS) Gets the default intent name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the default intent, a + PyCMSError is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def isIntentSupported(profile, intent, direction): + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + renderingIntent with profile, and that profile can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents... so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential PyCMSError that will occur if they don't support the modes + you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for input, + output, or proof + + INPUT = 0 (or use ImageCms.DIRECTION_INPUT) + OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT) + PROOF = 2 (or use ImageCms.DIRECTION_PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def versions(): + """ + (pyCMS) Fetches versions. + """ + + return ( + VERSION, core.littlecms_version, + sys.version.split()[0], Image.VERSION + ) + +# -------------------------------------------------------------------- + +if __name__ == "__main__": + # create a cheap manual from the __doc__ strings for the functions above + + print(__doc__) + + for f in dir(sys.modules[__name__]): + doc = None + try: + exec("doc = %s.__doc__" % (f)) + if "pyCMS" in doc: + # so we don't get the __doc__ string for imported modules + print("=" * 80) + print("%s" % f) + print(doc) + except (AttributeError, TypeError): + pass + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageColor.py b/server/www/packages/packages-windows/x86/PIL/ImageColor.py new file mode 100644 index 0000000..56c38e4 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageColor.py @@ -0,0 +1,280 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +import re + + +def getrgb(color): + """ + Convert a color string to an RGB tuple. If the string cannot be parsed, + this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + try: + rgb = colormap[color] + except KeyError: + try: + # fall back on case-insensitive lookup + rgb = colormap[color.lower()] + except KeyError: + rgb = None + # found color in cache + if rgb: + if isinstance(rgb, tuple): + return rgb + colormap[color] = rgb = getrgb(rgb) + return rgb + # check for known string formats + m = re.match("#\w\w\w$", color) + if m: + return ( + int(color[1]*2, 16), + int(color[2]*2, 16), + int(color[3]*2, 16) + ) + m = re.match("#\w\w\w\w\w\w$", color) + if m: + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16) + ) + m = re.match("rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)) + ) + m = re.match("rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5) + ) + m = re.match("hsl\(\s*(\d+)\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + from colorsys import hls_to_rgb + rgb = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5) + ) + m = re.match("rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", + color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)), + int(m.group(4)) + ) + raise ValueError("unknown color specifier: %r" % color) + + +def getcolor(color, mode): + """ + Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a + greyscale value if the mode is not color or a palette image. If the string + cannot be parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(graylevel [, alpha]) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + color, alpha = getrgb(color), 255 + if len(color) == 4: + color, alpha = color[0:3], color[3] + + if Image.getmodebase(mode) == "L": + r, g, b = color + color = (r*299 + g*587 + b*114)//1000 + if mode[-1] == 'A': + return (color, alpha) + else: + if mode[-1] == 'A': + return color + (alpha,) + return color + +colormap = { + # X11 colour table from https://drafts.csswg.org/css-color-4/, with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "rebeccapurple": "#663399", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/server/www/packages/packages-windows/x86/PIL/ImageDraw.py b/server/www/packages/packages-windows/x86/PIL/ImageDraw.py new file mode 100644 index 0000000..a3e5270 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageDraw.py @@ -0,0 +1,384 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import numbers +import warnings + +from PIL import Image, ImageColor +from PIL._util import isStringType + +""" +A simple 2D drawing interface for PIL images. +

    +Application code should use the Draw factory, instead of +directly. +""" + + +class ImageDraw(object): + + def __init__(self, im, mode=None): + """ + Create a drawing instance. + + @param im The image to draw in. + @param mode Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1, mode) + else: + self.ink = self.draw.draw_ink(-1, mode) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = 0 + self.font = None + + def setink(self, ink): + raise NotImplementedError("setink() has been removed. " + + "Please use keyword arguments instead.") + + def setfill(self, onoff): + raise NotImplementedError("setfill() has been removed. " + + "Please use keyword arguments instead.") + + def setfont(self, font): + warnings.warn("setfont() is deprecated. " + + "Please set the attribute directly instead.") + # compatibility + self.font = font + + def getfont(self): + """Get the current default font.""" + if not self.font: + # FIXME: should add a font repository + from PIL import ImageFont + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isStringType(ink): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink) + ink = self.draw.draw_ink(ink, self.mode) + if fill is not None: + if isStringType(fill): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill) + fill = self.draw.draw_ink(fill, self.mode) + return ink, fill + + def arc(self, xy, start, end, fill=None): + """Draw an arc.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink) + + def bitmap(self, xy, bitmap, fill=None): + """Draw a bitmap.""" + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + def chord(self, xy, start, end, fill=None, outline=None): + """Draw a chord.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_chord(xy, start, end, ink, 0) + + def ellipse(self, xy, fill=None, outline=None): + """Draw an ellipse.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None: + self.draw.draw_ellipse(xy, ink, 0) + + def line(self, xy, fill=None, width=0): + """Draw a line, or a connected sequence of line segments.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_lines(xy, ink, width) + + def shape(self, shape, fill=None, outline=None): + """(Experimental) Draw a shape.""" + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None: + self.draw.draw_outline(shape, ink, 0) + + def pieslice(self, xy, start, end, fill=None, outline=None): + """Draw a pieslice.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_pieslice(xy, start, end, ink, 0) + + def point(self, xy, fill=None): + """Draw one or more individual pixels.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + def polygon(self, xy, fill=None, outline=None): + """Draw a polygon.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None: + self.draw.draw_polygon(xy, ink, 0) + + def rectangle(self, xy, fill=None, outline=None): + """Draw a rectangle.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None: + self.draw.draw_rectangle(xy, ink, 0) + + def _multiline_check(self, text): + """Draw text.""" + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return text.split(split_character) + + def text(self, xy, text, fill=None, font=None, anchor=None, + *args, **kwargs): + if self._multiline_check(text): + return self.multiline_text(xy, text, fill, font, anchor, + *args, **kwargs) + + ink, fill = self._getink(fill) + if font is None: + font = self.getfont() + if ink is None: + ink = fill + if ink is not None: + try: + mask, offset = font.getmask2(text, self.fontmode) + xy = xy[0] + offset[0], xy[1] + offset[1] + except AttributeError: + try: + mask = font.getmask(text, self.fontmode) + except TypeError: + mask = font.getmask(text) + self.draw.draw_bitmap(xy, mask, ink) + + def multiline_text(self, xy, text, fill=None, font=None, anchor=None, + spacing=4, align="left"): + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + widths.append(line_width) + max_width = max(max_width, line_width) + left, top = xy + for idx, line in enumerate(lines): + if align == "left": + pass # left = x + elif align == "center": + left += (max_width - widths[idx]) / 2.0 + elif align == "right": + left += (max_width - widths[idx]) + else: + assert False, 'align must be "left", "center" or "right"' + self.text((left, top), line, fill, font, anchor) + top += line_spacing + left = xy[0] + + def textsize(self, text, font=None, *args, **kwargs): + """Get the size of a given string, in pixels.""" + if self._multiline_check(text): + return self.multiline_textsize(text, font, *args, **kwargs) + + if font is None: + font = self.getfont() + return font.getsize(text) + + def multiline_textsize(self, text, font=None, spacing=4): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + max_width = max(max_width, line_width) + return max_width, len(lines)*line_spacing + + +def Draw(im, mode=None): + """ + A simple 2D drawing interface for PIL images. + + @param im The image to draw in. + @param mode Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +def getdraw(im=None, hints=None): + """ + (Experimental) A more advanced 2D drawing interface for PIL images, + based on the WCK interface. + + @param im The image to draw in. + @param hints An optional list of hints. + @return A (drawing context, drawing resource factory) tuple. + """ + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from PIL import _imagingagg as handler + except ImportError: + pass + if handler is None: + from PIL import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +def floodfill(image, xy, value, border=None): + """ + (experimental) Fills a bounded region with a given color. + + @param image Target image. + @param xy Seed position (a 2-item coordinate tuple). + @param value Fill color. + @param border Optional border value. If given, the region consists of + pixels with a color different from the border color. If not given, + the region consists of pixels having the same color as the seed + pixel. + """ + # based on an implementation by Eric S. Raymond + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if background == value: + return # seed point already has fill color + pixel[x, y] = value + except IndexError: + return # seed point outside image + edge = [(x, y)] + if border is None: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p == background: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge + else: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p != value and p != border: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py b/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py new file mode 100644 index 0000000..62ee116 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageDraw2.py @@ -0,0 +1,111 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageColor, ImageDraw, ImageFont, ImagePath + + +class Pen(object): + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush(object): + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font(object): + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw(object): + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + self.render("rectangle", xy, *options) + + def symbol(self, xy, symbol, *options): + raise NotImplementedError("not in this version") + + def text(self, xy, text, font): + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + return self.draw.textsize(text, font=font.font) diff --git a/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py b/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py new file mode 100644 index 0000000..56b5c01 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageEnhance.py @@ -0,0 +1,100 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFilter, ImageStat + + +class _Enhance(object): + + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + def __init__(self, image): + self.image = image + self.intermediate_mode = 'L' + if 'A' in image.getbands(): + self.intermediate_mode = 'LA' + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) diff --git a/server/www/packages/packages-windows/x86/PIL/ImageFile.py b/server/www/packages/packages-windows/x86/PIL/ImageFile.py new file mode 100644 index 0000000..b21e9e3 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageFile.py @@ -0,0 +1,526 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +import io +import os +import sys +import struct + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024*1024 + +LOAD_TRUNCATED_IMAGES = False + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error" +} + + +def raise_ioerror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = "decoder error %d" % error + raise IOError(message + " when reading image file") + + +# +# -------------------------------------------------------------------- +# Helpers + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + +class ImageFile(Image.Image): + "Base class for image file format handlers." + + def __init__(self, fp=None, filename=None): + Image.Image.__init__(self) + + self.tile = None + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if isPath(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + else: + # stream + self.fp = fp + self.filename = filename + + try: + self._open() + except (IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error) as v: + raise SyntaxError(v) + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + + def draft(self, mode, size): + "Set draft mode" + + pass + + def verify(self): + "Check file integrity" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + self.fp = None + + def load(self): + "Load image data based on tile list" + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + d, e, o, a = self.tile[0] + if d == "raw" and a[0] == self.mode and a[0] in Image._MAPMODES: + try: + if hasattr(Image.core, "map"): + # use built-in mapper + self.map = Image.core.map(self.filename) + self.map.seek(o) + self.im = self.map.readimage( + self.mode, self.size, a[1], a[2] + ) + else: + # use mmap, if possible + import mmap + fp = open(self.filename, "r") + size = os.path.getsize(self.filename) + self.map = mmap.mmap(fp.fileno(), size, access=mmap.ACCESS_READ) + self.im = Image.core.map_buffer( + self.map, self.size, d, e, o, a + ) + readonly = 1 + except (AttributeError, EnvironmentError, ImportError): + self.map = None + + self.load_prepare() + + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + for decoder_name, extents, offset, args in self.tile: + decoder = Image._getdecoder(self.mode, decoder_name, + args, self.decoderconfig) + seek(offset) + try: + decoder.setimage(self.im, extents) + except ValueError: + continue + if decoder.pulls_fd: + decoder.setfd(self.fp) + status, err_code = decoder.decode(b"") + else: + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error): # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated") + + if not s and not decoder.handles_eof: # truncated jpeg + self.tile = [] + + # JpegDecode needs to clean things up here either way + # If we don't destroy the decompressor, + # we have a memory leak. + decoder.cleanup() + + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated " + "(%d bytes not processed)" % len(b)) + + b = b + s + n, err_code = decoder.decode(b) + if n < 0: + break + b = b[n:] + + # Need to cleanup here to prevent leaks in PyPy + decoder.cleanup() + + self.tile = [] + self.readonly = readonly + + self.fp = None # might be shared + + if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: + # still raised if decoder fails to return anything + raise_ioerror(err_code) + + # post processing + if hasattr(self, "tile_post_rotate"): + # FIXME: This is a hack to handle rotated PCD's + self.im = self.im.rotate(self.tile_post_rotate) + self.size = self.im.size + + self.load_end() + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError( + "StubImageFile subclass must implement _open" + ) + + def load(self): + loader = self._load() + if loader is None: + raise IOError("cannot find loader for this %s file" % self.format) + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + + def _load(self): + "(Hook) Find actual image loader." + raise NotImplementedError( + "StubImageFile subclass must implement _load" + ) + + +class Parser(object): + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + + In Python 2.x, this is an old-style class. + """ + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception IOError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_ioerror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + try: + fp = io.BytesIO(self.data) + im = Image.open(fp) + finally: + fp.close() # explicitly close the virtual file + except IOError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder( + im.mode, d, a, im.decoderconfig + ) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset:] + self.offset = 0 + + self.image = im + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception IOError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise IOError("image was incomplete") + if not self.image: + raise IOError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + try: + fp = io.BytesIO(self.data) + self.image = Image.open(fp) + finally: + self.image.load() + fp.close() # explicitly close the virtual file + return self.image + + +# -------------------------------------------------------------------- + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return + try: + fh = fp.fileno() + fp.flush() + except (AttributeError, io.UnsupportedOperation): + # compress to Python file-compatible object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l,s = e.encode_to_pyfd() + else: + while True: + l, s, d = e.encode(bufsize) + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + else: + # slight speedup: compress to real file object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l,s = e.encode_to_pyfd() + else: + s = e.encode_to_file(fh, bufsize) + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + if hasattr(fp, "flush"): + fp.flush() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing up to size bytes of data. + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + return fp.read(size) + data = [] + while size > 0: + block = fp.read(min(size, SAFEBLOCK)) + if not block: + break + data.append(block) + size -= len(block) + return b"".join(data) diff --git a/server/www/packages/packages-windows/x86/PIL/ImageFilter.py b/server/www/packages/packages-windows/x86/PIL/ImageFilter.py new file mode 100644 index 0000000..baa168a --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageFilter.py @@ -0,0 +1,275 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import functools + + +class Filter(object): + pass + + +class Kernel(Filter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. the default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a+b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class BuiltinFilter(Kernel): + def __init__(self): + pass + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the **rank**'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size//2, self.size//2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size*size//2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size*size-1 + + +class ModeFilter(Filter): + """ + + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(Filter): + """Gaussian blur filter. + + :param radius: Blur radius. + """ + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class UnsharpMask(Filter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1 + ) + + +class CONTOUR(BuiltinFilter): + name = "Contour" + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class DETAIL(BuiltinFilter): + name = "Detail" + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0 + ) + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1 + ) + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1 + ) + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0 + ) + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1 + ) + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1 + ) + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2 + ) diff --git a/server/www/packages/packages-windows/x86/PIL/ImageFont.py b/server/www/packages/packages-windows/x86/PIL/ImageFont.py new file mode 100644 index 0000000..af1166d --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageFont.py @@ -0,0 +1,437 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isDirectory, isPath +import os +import sys + + +class _imagingft_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + +try: + from PIL import _imagingft as core +except ImportError: + core = _imagingft_not_installed() + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont(object): + "PIL font wrapper" + + def _load_pilfont(self, filename): + + fp = open(filename, "rb") + + for ext in (".png", ".gif", ".pbm"): + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + raise IOError("cannot find glyph data file") + + self.file = fullname + + return self._load_pilfont_data(fp, image) + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256*20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + # delegate critical operations to internal type + self.getsize = self.font.getsize + self.getmask = self.font.getmask + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + +class FreeTypeFont(object): + "FreeType font wrapper (requires _imagingft service)" + + def __init__(self, font=None, size=10, index=0, encoding=""): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if isPath(font): + self.font = core.getfont(font, size, index, encoding) + else: + self.font_bytes = font.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes) + + def getname(self): + return self.font.family, self.font.style + + def getmetrics(self): + return self.font.ascent, self.font.descent + + def getsize(self, text): + size, offset = self.font.getsize(text) + return (size[0] + offset[0], size[1] + offset[1]) + + def getoffset(self, text): + return self.font.getsize(text)[1] + + def getmask(self, text, mode=""): + return self.getmask2(text, mode)[0] + + def getmask2(self, text, mode="", fill=Image.core.fill): + size, offset = self.font.getsize(text) + im = fill("L", size, 0) + self.font.render(text, im.id, mode == "1") + return im, offset + + def font_variant(self, font=None, size=None, index=None, encoding=None): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont(font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else + encoding) + +## +# Wrapper that creates a transposed font from any existing font +# object. +# +# @param font A font object. +# @param orientation An optional orientation. If given, this should +# be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, +# Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. + + +class TransposedFont(object): + "Wrapper for writing rotated or mirrored text" + + def __init__(self, font, orientation=None): + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text): + w, h = self.font.getsize(text) + if self.orientation in (Image.ROTATE_90, Image.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode=""): + im = self.font.getmask(text, mode) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding=""): + """ + Load a TrueType or OpenType font file, and create a font object. + This function loads a font object from the given file, and creates + a font object for a font of the given size. + + This function requires the _imagingft service. + + :param font: A truetype font file. Under Windows, if the file + is not found in this filename, the loader also looks in + Windows :file:`fonts/` directory. + :param size: The requested size, in points. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Common + encodings are "unic" (Unicode), "symb" (Microsoft + Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), + and "armn" (Apple Roman). See the FreeType documentation + for more information. + :return: A font object. + :exception IOError: If the file could not be read. + """ + + try: + return FreeTypeFont(font, size, index, encoding) + except IOError: + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ('linux', 'linux2'): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = '/usr/share' + dirs += [os.path.join(lindir, "fonts") + for lindir in lindirs.split(":")] + elif sys.platform == 'darwin': + dirs += ['/Library/Fonts', '/System/Library/Fonts', + os.path.expanduser('~/Library/Fonts')] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + return FreeTypeFont(fontpath, size, index, encoding) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == '.ttf': + return FreeTypeFont(fontpath, size, index, encoding) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return FreeTypeFont(first_font_with_a_different_extension, size, + index, encoding) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + for directory in sys.path: + if isDirectory(directory): + if not isinstance(filename, str): + if bytes is str: + filename = filename.encode("utf-8") + else: + filename = filename.decode("utf-8") + try: + return load(os.path.join(directory, filename)) + except IOError: + pass + raise IOError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + from io import BytesIO + import base64 + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO(base64.decodestring(b''' +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +''')), Image.open(BytesIO(base64.decodestring(b''' +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +''')))) + return f + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageGrab.py b/server/www/packages/packages-windows/x86/PIL/ImageGrab.py new file mode 100644 index 0000000..85bc474 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageGrab.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber (OS X and Windows only) +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + +import sys +if sys.platform not in ["win32", "darwin"]: + raise ImportError("ImageGrab is OS X and Windows only") + +if sys.platform == "win32": + grabber = Image.core.grabscreen +elif sys.platform == "darwin": + import os + import tempfile + import subprocess + + +def grab(bbox=None): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp('.png') + os.close(fh) + subprocess.call(['screencapture', '-x', filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + else: + size, data = grabber() + im = Image.frombytes( + "RGB", size, data, + # RGB, 32-bit line padding, origo in lower left corner + "raw", "BGR", (size[0]*3 + 3) & -4, -1 + ) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp('.jpg') + os.close(fh) + commands = [ + "set theFile to (open for access POSIX file \""+filepath+"\" with write permission)", + "try", + "write (the clipboard as JPEG picture) to theFile", + "end try", + "close access theFile" + ] + script = ["osascript"] + for command in commands: + script += ["-e", command] + subprocess.call(script) + + im = None + if os.stat(filepath).st_size != 0: + im = Image.open(filepath) + im.load() + os.unlink(filepath) + return im + else: + debug = 0 # temporary interface + data = Image.core.grabclipboard(debug) + if isinstance(data, bytes): + from PIL import BmpImagePlugin + import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) + return data diff --git a/server/www/packages/packages-windows/x86/PIL/ImageMath.py b/server/www/packages/packages-windows/x86/PIL/ImageMath.py new file mode 100644 index 0000000..c0f3820 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageMath.py @@ -0,0 +1,272 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import _imagingmath + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +VERBOSE = 0 + + +def _isconstant(v): + return isinstance(v, int) or isinstance(v, float) + + +class _Operand(object): + """Wraps an image operand, providing standard operators""" + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError("unsupported mode: %s" % im1.im.mode) + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.mode != im2.mode: + raise ValueError("mode mismatch") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), + min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, size, None) + else: + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + if bytes is str: + # Provide __nonzero__ for pre-Py3k + __nonzero__ = __bool__ + del __bool__ + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + if bytes is str: + # Provide __div__ and __rdiv__ for pre-Py3k + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + del __truediv__ + del __rtruediv__ + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = builtins.eval(expression, args) + try: + return out.im + except AttributeError: + return out + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageMode.py b/server/www/packages/packages-windows/x86/PIL/ImageMode.py new file mode 100644 index 0000000..583fd7e --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageMode.py @@ -0,0 +1,52 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# mode descriptor cache +_modes = {} + + +class ModeDescriptor(object): + """Wrapper for mode strings.""" + + def __init__(self, mode, bands, basemode, basetype): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + + def __str__(self): + return self.mode + + +def getmode(mode): + """Gets a mode descriptor for the given mode.""" + if not _modes: + # initialize mode cache + from PIL import Image + # core modes + for m, (basemode, basetype, bands) in Image._MODEINFO.items(): + _modes[m] = ModeDescriptor(m, bands, basemode, basetype) + # extra experimental modes + _modes["RGBa"] = ModeDescriptor("RGBa", ("R", "G", "B", "a"), "RGB", "L") + _modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") + _modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L") + _modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") + # mapping modes + _modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L") + _modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L") + _modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L") + return _modes[mode] + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageMorph.py b/server/www/packages/packages-windows/x86/PIL/ImageMorph.py new file mode 100644 index 0000000..902ed8d --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageMorph.py @@ -0,0 +1,251 @@ +# A binary morphology add-on for the Python Imaging Library +# +# History: +# 2014-06-04 Initial version. +# +# Copyright (c) 2014 Dov Grobgeld + +from PIL import Image +from PIL import _imagingmorph +import re + +LUT_SIZE = 1 << 9 + + +class LutBuilder(object): + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + 'corner': ['1:(... ... ...)->0', + '4:(00. 01. ...)->1'], + 'dilation4': ['4:(... .0. .1.)->1'], + 'dilation8': ['4:(... .0. .1.)->1', + '4:(... .0. ..1)->1'], + 'erosion4': ['4:(... .1. .0.)->0'], + 'erosion8': ['4:(... .1. .0.)->0', + '4:(... .1. ..0)->0'], + 'edge': ['1:(... ... ...)->0', + '4:(.0. .1. ...)->1', + '4:(01. .1. ...)->1'] + } + if op_name not in known_patterns: + raise Exception('Unknown pattern '+op_name+'!') + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray([symbols[(i & m) > 0] for i in range(LUT_SIZE)]) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert(len(permutation) == 9) + return ''.join([pattern[p] for p in permutation]) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if '4' in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], [6, 3, 0, + 7, 4, 1, + 8, 5, 2]), res)) + # mirror + if 'M' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + patterns.append( + (self._string_permute(pattern, [2, 1, 0, + 5, 4, 3, + 8, 7, 6]), res)) + + # negate + if 'N' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + # Swap 0 and 1 + pattern = (pattern + .replace('0', 'Z') + .replace('1', '0') + .replace('Z', '1')) + res = '%d' % (1-int(res)) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search( + r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', '')) + if not m: + raise Exception('Syntax error in pattern "'+p+'"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(' ', '').replace('\n', '') + + patterns += self._pattern_permute(pattern, options, result) + +# # Debugging +# for p,r in patterns: +# print p,r +# print '--' + + # compile the patterns into regular expressions for speed + for i in range(len(patterns)): + p = patterns[i][0].replace('.', 'X').replace('X', '[01]') + p = re.compile(p) + patterns[i] = (p, patterns[i][1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp(object): + """A class for binary morphological operators""" + + def __init__(self, + lut=None, + op_name=None, + patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply( + bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, 'rb') as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != 8192: + self.lut = None + raise Exception('Wrong size operator file!') + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception('No operator loaded') + with open(filename, 'wb') as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageOps.py b/server/www/packages/packages-windows/x86/PIL/ImageOps.py new file mode 100644 index 0000000..f317645 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageOps.py @@ -0,0 +1,461 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isStringType +import operator +import functools + + +# +# helpers + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isStringType(color): + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise IOError("not supported for this image mode") + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image, removes **cutoff** percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: How many percent to cut off from the histogram. + :param ignore: The background pixel value (use None for no background). + :return: An image. + """ + histogram = image.histogram() + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer:layer+256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the hi end + cut = n * cutoff // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white): + """ + Colorize grayscale image. The **black** and **white** + arguments should be RGB tuples; this function calculates a color + wedge mapping all black pixels in the source image to the first + color, and all white pixels to the second color. + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :return: An image. + """ + assert image.mode == "L" + black = _color(black, "RGB") + white = _color(white, "RGB") + red = [] + green = [] + blue = [] + for i in range(256): + red.append(black[0]+i*(white[0]-black[0])//255) + green.append(black[1]+i*(white[1]-black[1])//255) + blue.append(black[2]+i*(white[2]-black[2])//255) + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop( + (left, top, image.size[0]-right, image.size[1]-bottom) + ) + + +def deform(image, deformer, resample=Image.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + **getmesh** method can be used. + :param resample: What resampling filter to use. + :return: An image. + """ + return image.transform( + image.size, Image.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b:b+256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i+b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + out = Image.new(image.mode, (width, height), _color(fill, image.mode)) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a sized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.NEAREST`. + :param bleed: Remove a border around the outside of the image (from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # http://www.cazabon.com + + # ensure inputs are valid + if not isinstance(centering, list): + centering = [centering[0], centering[1]] + + if centering[0] > 1.0 or centering[0] < 0.0: + centering[0] = 0.50 + if centering[1] > 1.0 or centering[1] < 0.0: + centering[1] = 0.50 + + if bleed > 0.49999 or bleed < 0.0: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleedPixels = ( + int((float(bleed) * float(image.size[0])) + 0.5), + int((float(bleed) * float(image.size[1])) + 0.5) + ) + + liveArea = (0, 0, image.size[0], image.size[1]) + if bleed > 0.0: + liveArea = ( + bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1, + image.size[1] - bleedPixels[1] - 1 + ) + + liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1]) + + # calculate the aspect ratio of the liveArea + liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1]) + + # calculate the aspect ratio of the output image + aspectRatio = float(size[0]) / float(size[1]) + + # figure out if the sides or top/bottom will be cropped off + if liveAreaAspectRatio >= aspectRatio: + # liveArea is wider than what's needed, crop the sides + cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5) + cropHeight = liveSize[1] + else: + # liveArea is taller than what's needed, crop the top and bottom + cropWidth = liveSize[0] + cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5) + + # make the crop + leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0])) + if leftSide < 0: + leftSide = 0 + topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1])) + if topSide < 0: + topSide = 0 + + out = image.crop( + (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight) + ) + + # resize the image and return it + return out.resize(size, method) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255-i) + return _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2**(8-bits)-1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255-i) + return _lut(image, lut) + + +# -------------------------------------------------------------------- +# PIL USM components, from Kevin Cazabon. + +def gaussian_blur(im, radius=None): + """ PIL_usm.gblur(im, [radius])""" + + if radius is None: + radius = 5.0 + + im.load() + + return im.im.gaussian_blur(radius) + +gblur = gaussian_blur + + +def unsharp_mask(im, radius=None, percent=None, threshold=None): + """ PIL_usm.usm(im, [radius, percent, threshold])""" + + if radius is None: + radius = 5.0 + if percent is None: + percent = 150 + if threshold is None: + threshold = 3 + + im.load() + + return im.im.unsharp_mask(radius, percent, threshold) + +usm = unsharp_mask + + +def box_blur(image, radius): + """ + Blur the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param image: The image to blur. + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + :return: An image. + """ + image.load() + + return image._new(image.im.box_blur(radius)) diff --git a/server/www/packages/packages-windows/x86/PIL/ImagePalette.py b/server/www/packages/packages-windows/x86/PIL/ImagePalette.py new file mode 100644 index 0000000..3b60068 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImagePalette.py @@ -0,0 +1,219 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array +from PIL import ImageColor +from PIL import GimpPaletteFile +from PIL import GimpGradientFile +from PIL import PaletteFile + + +class ImagePalette(object): + """ + Color palette for palette mapped images + + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or bytearray(range(256))*len(self.mode) + self.colors = {} + self.dirty = None + if ((size == 0 and len(self.mode)*256 != len(self.palette)) or + (size != 0 and size != len(self.palette))): + raise ValueError("wrong palette size") + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable # for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode + ";L", self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + if hasattr(arr, 'tobytes'): + return arr.tobytes() + return arr.tostring() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + try: + return self.colors[color] + except KeyError: + # allocate new color slot + if isinstance(self.palette, bytes): + self.palette = bytearray(self.palette) + index = len(self.colors) + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") + self.colors[color] = index + self.palette[index] = color[0] + self.palette[index+256] = color[1] + self.palette[index+512] = color[2] + self.dirty = 1 + return index + else: + raise ValueError("unknown color specifier: %r" % color) + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write("# Mode: %s\n" % self.mode) + for i in range(256): + fp.write("%d" % i) + for j in range(i*len(self.mode), (i+1)*len(self.mode)): + try: + fp.write(" %d" % self.palette[j]) + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white*i//255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256)) + palette.reverse() + return ImagePalette(mode, palette * len(mode)) + + +def random(mode="RGB"): + from random import randint + palette = [] + for i in range(256*len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + r, g, b = ImageColor.getrgb(white) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) + return ImagePalette("RGB", r + g + b) + + +def wedge(mode="RGB"): + return ImagePalette(mode, list(range(256)) * len(mode)) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + fp = open(filename, "rb") + + for paletteHandler in [ + GimpPaletteFile.GimpPaletteFile, + GimpGradientFile.GimpGradientFile, + PaletteFile.PaletteFile + ]: + try: + fp.seek(0) + lut = paletteHandler(fp).getpalette() + if lut: + break + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + else: + raise IOError("cannot load palette") + + return lut # data, rawmode diff --git a/server/www/packages/packages-windows/x86/PIL/ImagePath.py b/server/www/packages/packages-windows/x86/PIL/ImagePath.py new file mode 100644 index 0000000..b308749 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImagePath.py @@ -0,0 +1,62 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +# the Python class below is overridden by the C implementation. + + +class Path(object): + + def __init__(self, xy): + pass + + def compact(self, distance=2): + """ + Compacts the path, by removing points that are close to each other. + This method modifies the path in place. + """ + pass + + def getbbox(self): + """Gets the bounding box.""" + pass + + def map(self, function): + """Maps the path through a function.""" + pass + + def tolist(self, flat=0): + """ + Converts the path to Python list. + # + @param flat By default, this function returns a list of 2-tuples + [(x, y), ...]. If this argument is true, it returns a flat list + [x, y, ...] instead. + @return A list of coordinates. + """ + pass + + def transform(self, matrix): + """Transforms the path.""" + pass + + +# override with C implementation +Path = Image.core.path + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageQt.py b/server/www/packages/packages-windows/x86/PIL/ImageQt.py new file mode 100644 index 0000000..4eb3654 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageQt.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +from io import BytesIO + +qt_is_installed = True +qt_version = None +try: + from PyQt5.QtGui import QImage, qRgba, QPixmap + from PyQt5.QtCore import QBuffer, QIODevice + qt_version = '5' +except (ImportError, RuntimeError): + try: + from PyQt4.QtGui import QImage, qRgba, QPixmap + from PyQt4.QtCore import QBuffer, QIODevice + qt_version = '4' + except (ImportError, RuntimeError): + try: + from PySide.QtGui import QImage, qRgba, QPixmap + from PySide.QtCore import QBuffer, QIODevice + qt_version = 'side' + except ImportError: + qt_is_installed = False + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return (qRgba(r, g, b, a) & 0xffffffff) + + +# :param im A PIL Image object, or a file name +# (given either as Python string or a PyQt string object) + +def fromqimage(im): + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, 'png') + else: + im.save(buffer, 'ppm') + + b = BytesIO() + try: + b.write(buffer.data()) + except TypeError: + # workaround for Python 2 + b.write(str(buffer.data())) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = { + '1': 1, + 'L': 8, + 'P': 8, + }[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding) + + return b''.join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + if str is bytes: + im = unicode(im.toUtf8(), "utf-8") + else: + im = str(im.toUtf8(), "utf-8") + if isPath(im): + im = Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i:i+3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + try: + data = im.tobytes("raw", "BGRA") + except SystemError: + # workaround for earlier versions + r, g, b, a = im.split() + im = Image.merge("RGBA", (b, g, r, a)) + format = QImage.Format_ARGB32 + else: + raise ValueError("unsupported image mode %r" % im.mode) + + # must keep a reference, or Qt will crash! + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return { + 'data': __data, 'im': im, 'format': format, 'colortable': colortable + } + +## +# An PIL image wrapper for Qt. This is a subclass of PyQt's QImage +# class. +# +# @param im A PIL Image object, or a file name (given either as Python +# string or a PyQt string object). + +if qt_is_installed: + class ImageQt(QImage): + + def __init__(self, im): + im_data = _toqclass_helper(im) + QImage.__init__(self, + im_data['data'], im_data['im'].size[0], + im_data['im'].size[1], im_data['format']) + if im_data['colortable']: + self.setColorTable(im_data['colortable']) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == 'RGB': + im = im.convert('RGBA') + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/server/www/packages/packages-windows/x86/PIL/ImageSequence.py b/server/www/packages/packages-windows/x86/PIL/ImageSequence.py new file mode 100644 index 0000000..1fc6e5d --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageSequence.py @@ -0,0 +1,56 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator(object): + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + self.position = 0 + + def __getitem__(self, ix): + try: + self.im.seek(ix) + return self.im + except EOFError: + raise IndexError # end of sequence + + def __iter__(self): + return self + + def __next__(self): + try: + self.im.seek(self.position) + self.position += 1 + return self.im + except EOFError: + raise StopIteration + + def next(self): + return self.__next__() diff --git a/server/www/packages/packages-windows/x86/PIL/ImageShow.py b/server/www/packages/packages-windows/x86/PIL/ImageShow.py new file mode 100644 index 0000000..c18ff22 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageShow.py @@ -0,0 +1,178 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image +import os +import sys + +if sys.version_info >= (3, 3): + from shlex import quote +else: + from pipes import quote + +_viewers = [] + + +def register(viewer, order=1): + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + elif order < 0: + _viewers.insert(0, viewer) + + +def show(image, title=None, **options): + """ + Display a given image. + + @param image An image object. + @param title Optional title. Not all viewers can display the title. + @param **options Additional viewer options. + @return True if a suitable viewer was found, false otherwise. + """ + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return 1 + return 0 + + +class Viewer(object): + """Base class for viewers.""" + + # main api + + def show(self, image, **options): + + # save temporary image to disk + if image.mode[:4] == "I;16": + # @PIL88 @PIL101 + # "I;16" isn't an 'official' mode, but we still want to + # provide a simple way to show 16-bit images. + base = "L" + # FIXME: auto-contrast if max() > 255? + else: + base = Image.getmodebase(image.mode) + if base != image.mode and image.mode != "1": + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + + def get_format(self, image): + """Return format name, or None to save as PGM/PPM""" + return self.format + + def get_command(self, file, **options): + raise NotImplementedError + + def save_image(self, image): + """Save to temporary file, and return filename""" + return image._dump(format=self.get_format(image)) + + def show_image(self, image, **options): + """Display given image""" + return self.show_file(self.save_image(image), **options) + + def show_file(self, file, **options): + """Display given file""" + os.system(self.get_command(file, **options)) + return 1 + +# -------------------------------------------------------------------- + +if sys.platform == "win32": + + class WindowsViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + return ('start "Pillow" /WAIT "%s" ' + '&& ping -n 2 127.0.0.1 >NUL ' + '&& del /f "%s"' % (file, file)) + + register(WindowsViewer) + +elif sys.platform == "darwin": + + class MacViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a /Applications/Preview.app" + command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), + quote(file)) + return command + + register(MacViewer) + +else: + + # unixoids + + def which(executable): + path = os.environ.get("PATH") + if not path: + return None + for dirname in path.split(os.pathsep): + filename = os.path.join(dirname, executable) + if os.path.isfile(filename) and os.access(filename, os.X_OK): + return filename + return None + + class UnixViewer(Viewer): + def show_file(self, file, **options): + command, executable = self.get_command_ex(file, **options) + command = "(%s %s; rm -f %s)&" % (command, quote(file), + quote(file)) + os.system(command) + return 1 + + # implementations + + class DisplayViewer(UnixViewer): + def get_command_ex(self, file, **options): + command = executable = "display" + return command, executable + + if which("display"): + register(DisplayViewer) + + class XVViewer(UnixViewer): + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += " -name %s" % quote(title) + return command, executable + + if which("xv"): + register(XVViewer) + +if __name__ == "__main__": + # usage: python ImageShow.py imagefile [title] + print(show(Image.open(sys.argv[1]), *sys.argv[2:])) + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageStat.py b/server/www/packages/packages-windows/x86/PIL/ImageStat.py new file mode 100644 index 0000000..f3c138b --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageStat.py @@ -0,0 +1,147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import math +import operator +import functools + + +class Stat(object): + + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + "Calculate missing attribute" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + "Get min/max values for each band in the image" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + "Get total number of pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i:i+256])) + return v + + def _getsum(self): + "Get sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + layerSum = 0.0 + for j in range(256): + layerSum += j * self.h[i + j] + v.append(layerSum) + return v + + def _getsum2(self): + "Get squared sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j ** 2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + "Get average pixel level for each layer" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + "Get median pixel level for each layer" + + v = [] + for i in self.bands: + s = 0 + l = self.count[i]//2 + b = i * 256 + for j in range(256): + s = s + self.h[b+j] + if s > l: + break + v.append(j) + return v + + def _getrms(self): + "Get RMS for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + "Get variance for each layer" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n) + return v + + def _getstddev(self): + "Get standard deviation for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + +Global = Stat # compatibility diff --git a/server/www/packages/packages-windows/x86/PIL/ImageTk.py b/server/www/packages/packages-windows/x86/PIL/ImageTk.py new file mode 100644 index 0000000..6d47130 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageTk.py @@ -0,0 +1,290 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +try: + import tkinter +except ImportError: + import Tkinter + tkinter = Tkinter + del Tkinter + +from PIL import Image +from io import BytesIO + + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data="PIL:%d" % im.im.id) + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +def _get_image_from_kw(kw): + source = None + if "file" in kw: + source = kw.pop("file") + elif "data" in kw: + source = BytesIO(kw.pop("data")) + if source: + return Image.open(source) + + +# -------------------------------------------------------------------- +# PhotoImage + +class PhotoImage(object): + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the **file** or **data** options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and lower pixel + coordinate. If None is given instead of a tuple, all of + the image is assumed. + """ + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + tk = self.__photo.tk + + try: + tk.call("PyImagingPhoto", self.__photo, block.id) + except tkinter.TclError: + # activate Tkinter hook + try: + from PIL import _imagingtk + try: + _imagingtk.tkinit(tk.interpaddr(), 1) + except AttributeError: + _imagingtk.tkinit(id(tk), 0) + tk.call("PyImagingPhoto", self.__photo, block.id) + except (ImportError, AttributeError, tkinter.TclError): + raise # configuration problem; cannot attach to Tkinter + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage(object): + """ + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is **foreground**, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = "PIL:%d" % image.im.id + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """Copies the contents of a PhotoImage to a PIL image memory.""" + photo.tk.call("PyImagingPhotoGet", photo) + + +def _show(image, title): + """Helper for the Image.show method.""" + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + tkinter.Label.__init__(self, master, image=self.image, + bg="black", bd=0) + + if not tkinter._default_root: + raise IOError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageTransform.py b/server/www/packages/packages-windows/x86/PIL/ImageTransform.py new file mode 100644 index 0000000..9f48833 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageTransform.py @@ -0,0 +1,100 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +class AffineTransform(Transform): + """ + Define an affine image transform. + + This function takes a 6-tuple (a, b, c, d, e, f) which contain the first + two rows from an affine transform matrix. For each pixel (x, y) in the + output image, the new value is taken from a position (a x + b y + c, + d x + e y + f) in the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + @def AffineTransform(matrix) + @param matrix A 6-tuple (a, b, c, d, e, f) containing the first two rows + from an affine transform matrix. + @see Image#Image.transform + """ + method = Image.AFFINE + + +class ExtentTransform(Transform): + """ + Define a transform to extract a subregion from an image. + + Maps a rectangle (defined by two corners) from the image to a rectangle of + the given size. The resulting image will contain data sampled from between + the corners, such that (x0, y0) in the input image will end up at (0,0) in + the output image, and (x1, y1) at size. + + This method can be used to crop, stretch, shrink, or mirror an arbitrary + rectangle in the current image. It is slightly slower than crop, but about + as fast as a corresponding resize operation. + + @def ExtentTransform(bbox) + @param bbox A 4-tuple (x0, y0, x1, y1) which specifies two points in the + input image's coordinate system. + @see Image#Image.transform + """ + method = Image.EXTENT + + +class QuadTransform(Transform): + """ + Define a quad image transform. + + Maps a quadrilateral (a region defined by four corners) from the image to a + rectangle of the given size. + + @def QuadTransform(xy) + @param xy An 8-tuple (x0, y0, x1, y1, x2, y2, y3, y3) which contain the + upper left, lower left, lower right, and upper right corner of the + source quadrilateral. + @see Image#Image.transform + """ + method = Image.QUAD + + +class MeshTransform(Transform): + """ + Define a mesh image transform. A mesh transform consists of one or more + individual quad transforms. + + @def MeshTransform(data) + @param data A list of (bbox, quad) tuples. + @see Image#Image.transform + """ + method = Image.MESH + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImageWin.py b/server/www/packages/packages-windows/x86/PIL/ImageWin.py new file mode 100644 index 0000000..1e408da --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImageWin.py @@ -0,0 +1,237 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class HDC(object): + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND(object): + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib(object): + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + **palette** method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use the + :py:meth:`CDC.GetHandleAttrib` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. If None is given instead of a + tuple, all of the image is assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from tobytes) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + def fromstring(self, *args, **kw): + raise NotImplementedError("fromstring() has been removed. " + + "Please use frombytes() instead.") + + def tostring(self, *args, **kw): + raise NotImplementedError("tostring() has been removed. " + + "Please use tobytes() instead.") + + +class Window(object): + """Create a Window with the given title size.""" + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +class ImageWindow(Window): + """Create an image window which displays the given image.""" + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + Window.__init__(self, title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..63e8924 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/ImtImagePlugin.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from PIL import Image, ImageFile + +__version__ = "0.2" + + +# +# -------------------------------------------------------------------- + +field = re.compile(br"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + xsize = ysize = 0 + + while True: + + s = self.fp.read(1) + if not s: + break + + if s == b'\x0C': + + # image data begins + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), + (self.mode, 0, 1))] + + break + + else: + + # read key/value pair + # FIXME: dangerous, may read whole file + s = s + self.fp.readline() + if len(s) == 1 or len(s) > 100: + break + if s[0] == b"*": + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == "width": + xsize = int(v) + self.size = xsize, ysize + elif k == "height": + ysize = int(v) + self.size = xsize, ysize + elif k == "pixel" and v == "n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..56d1de4 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/IptcImagePlugin.py @@ -0,0 +1,267 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image, ImageFile, _binary +import os +import tempfile + +__version__ = "0.3" + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be +o8 = _binary.o8 + +COMPRESSION = { + 1: "raw", + 5: "jpeg" +} + +PAD = o8(0) * 4 + + +# +# Helpers + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=' ') + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = i8(s[1]), i8(s[2]) + + # syntax + if i8(s[0]) != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = i8(s[3]) + if size > 132: + raise IOError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size-128)) + else: + size = i16(s[3:]) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in list(self.info.keys()): + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # print tag, self.info[tag] + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0])-1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self.size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError: + raise IOError("Unknown IPTC image compression") + + # tile + if tag == (8, 10): + self.tile = [("iptc", (compression, offset), + (0, 0, self.size[0], self.size[1]))] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + try: + # fast + self.im = Image.core.open_ppm(outfile) + except: + # slightly slower + im = Image.open(outfile) + im.load() + self.im = im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +## +# Get IPTC information from TIFF, JPEG, or IPTC file. +# +# @param im An image containing IPTC data. +# @return A dictionary containing IPTC information, or None if +# no IPTC information block was found. + +def getiptcinfo(im): + + from PIL import TiffImagePlugin, JpegImagePlugin + import io + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + try: + app = im.app["APP13"] + if app[:14] == b"Photoshop 3.0\x00": + app = app[14:] + # parse the image resource block + offset = 0 + while app[offset:offset+4] == b"8BIM": + offset += 4 + # resource code + code = i16(app, offset) + offset += 2 + # resource name (usually empty) + name_len = i8(app[offset]) + # name = app[offset+1:offset+1+name_len] + offset = 1 + offset + name_len + if offset & 1: + offset += 1 + # resource data block + size = i32(app, offset) + offset += 4 + if code == 0x0404: + # 0x0404 contains IPTC/NAA data + data = app[offset:offset+size] + break + offset = offset + size + if offset & 1: + offset += 1 + except (AttributeError, KeyError): + pass + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage(object): + pass + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..02b1e53 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,280 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +from PIL import Image, ImageFile +import struct +import os +import io + +__version__ = "0.1" + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack('>H', hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \ + xtosiz, ytosiz, csiz \ + = struct.unpack('>HHIIIIIIIIH', siz[:38]) + ssiz = [None]*csiz + xrsiz = [None]*csiz + yrsiz = [None]*csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] \ + = struct.unpack('>BBB', siz[36 + 3 * i:39 + 3 * i]) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7f) > 8: + mode = 'I;16' + else: + mode = 'L' + elif csiz == 2: + mode = 'LA' + elif csiz == 3: + mode = 'RGB' + elif csiz == 4: + mode = 'RGBA' + else: + mode = None + + return (size, mode) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count and + color space information, returning a PIL (size, mode) tuple.""" + + # Find the JP2 header box + header = None + while True: + lbox, tbox = struct.unpack('>I4s', fp.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', fp.read(8))[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen: + raise SyntaxError('Invalid JP2 header length') + + if tbox == b'jp2h': + header = fp.read(lbox - hlen) + break + else: + fp.seek(lbox - hlen, os.SEEK_CUR) + + if header is None: + raise SyntaxError('could not find JP2 header') + + size = None + mode = None + bpc = None + nc = None + + hio = io.BytesIO(header) + while True: + lbox, tbox = struct.unpack('>I4s', hio.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', hio.read(8))[0] + hlen = 16 + else: + hlen = 8 + + content = hio.read(lbox - hlen) + + if tbox == b'ihdr': + height, width, nc, bpc, c, unkc, ipr \ + = struct.unpack('>IIHBBBB', content) + size = (width, height) + if unkc: + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif tbox == b'colr': + meth, prec, approx = struct.unpack('>BBB', content[:3]) + if meth == 1: + cs = struct.unpack('>I', content[3:7])[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif cs == 17: # grayscale + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + break + elif cs == 18: # sYCC + if nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + + if size is None or mode is None: + raise SyntaxError("Malformed jp2 header") + + return (size, mode) + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b'\xff\x4f\xff\x51': + self.codec = "j2k" + self.size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + self.codec = "jp2" + self.size, self.mode = _parse_jp2_header(self.fp) + else: + raise SyntaxError('not a JPEG 2000 file') + + if self.size is None or self.mode is None: + raise SyntaxError('unable to determine size/mode') + + self.reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, 2) + length = self.fp.tell() + self.fp.seek(pos, 0) + except: + length = -1 + + self.tile = [('jpeg2k', (0, 0) + self.size, 0, + (self.codec, self.reduce, self.layers, fd, length, self.fp))] + + def load(self): + if self.reduce: + power = 1 << self.reduce + adjust = power >> 1 + self.size = (int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power)) + + if self.tile: + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return (prefix[:4] == b'\xff\x4f\xff\x51' or + prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + + +# ------------------------------------------------------------ +# Save support + +def _save(im, fp, filename): + if filename.endswith('.j2k'): + kind = 'j2k' + else: + kind = 'jp2' + + # Get the keyword arguments + info = im.encoderinfo + + offset = info.get('offset', None) + tile_offset = info.get('tile_offset', None) + tile_size = info.get('tile_size', None) + quality_mode = info.get('quality_mode', 'rates') + quality_layers = info.get('quality_layers', None) + num_resolutions = info.get('num_resolutions', 0) + cblk_size = info.get('codeblock_size', None) + precinct_size = info.get('precinct_size', None) + irreversible = info.get('irreversible', False) + progression = info.get('progression', 'LRCP') + cinema_mode = info.get('cinema_mode', 'no') + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + fd + ) + + ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)]) + +# ------------------------------------------------------------ +# Registry stuff + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extension(Jpeg2KImageFile.format, '.jp2') +Image.register_extension(Jpeg2KImageFile.format, '.j2k') +Image.register_extension(Jpeg2KImageFile.format, '.jpc') +Image.register_extension(Jpeg2KImageFile.format, '.jpf') +Image.register_extension(Jpeg2KImageFile.format, '.jpx') +Image.register_extension(Jpeg2KImageFile.format, '.j2c') + +Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') +Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') diff --git a/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..9d4eaab --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/JpegImagePlugin.py @@ -0,0 +1,755 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import array +import struct +import io +import warnings +from struct import unpack_from +from PIL import Image, ImageFile, TiffImagePlugin, _binary +from PIL.JpegPresets import presets +from PIL._util import isStringType + +i8 = _binary.i8 +o8 = _binary.o8 +i16 = _binary.i16be +i32 = _binary.i32be + +__version__ = "0.6" + + +# +# Parser + +def Skip(self, marker): + n = i16(self.fp.read(2))-2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = i8(s[7]) + jfif_density = i16(s, 8), i16(s, 10) + except: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + # extract Exif information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = i8(s[1]) + except: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + self.size = i16(s[3:]), i16(s[1:]) + + self.bits = i8(s[0]) + if self.bits != 8: + raise SyntaxError("cannot handle %d-bit layers" % self.bits) + + self.layers = i8(s[5]) + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError("cannot handle %d-layer images" % self.layers) + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if i8(self.icclist[0][13]) == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = None + + for i in range(6, len(s), 3): + t = s[i:i+3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2]))) + + +def DQT(self, marker): + # + # Define quantization table. Support baseline 8-bit tables + # only. Note that there might be more than one table in + # each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + if len(s) < 65: + raise SyntaxError("bad quantization table marker") + v = i8(s[0]) + if v//16 == 0: + self.quantization[v & 15] = array.array("B", s[1:65]) + s = s[65:] + else: + return # FIXME: add code to read 16-bit tables! + # raise SyntaxError, "bad quantization table element size" + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM) +} + + +def _accept(prefix): + return prefix[0:1] == b"\377" + + +## +# Image plugin for JPEG and JFIF images. + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(1) + + if i8(s) != 255: + raise SyntaxError("not a JPEG file") + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = i8(s) + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + # print hex(i), name, description + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, + (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) + s = self.fp.read(1) + else: + raise SyntaxError("no marker found") + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + d, e, o, a = self.tile[0] + scale = 0 + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = max(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1] + self.size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + return self + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + import subprocess + import tempfile + import os + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + self.im = Image.core.open_ppm(path) + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self.size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + +def _fixup_dict(src_dict): + # Helper function for _getexif() + # returns a dict with any single item tuples/lists as individual values + def _fixup(value): + try: + if len(value) == 1 and not isinstance(value, dict): + return value[0] + except: pass + return value + + return dict([(k, _fixup(v)) for k, v in src_dict.items()]) + + +def _getexif(self): + # Extract EXIF information. This method is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + try: + data = self.info["exif"] + except KeyError: + return None + file = io.BytesIO(data[6:]) + head = file.read(8) + # process dictionary + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif = dict(_fixup_dict(info)) + # get exif extension + try: + # exif field 0x8769 is an offset pointer to the location + # of the nested embedded exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8769]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif.update(_fixup_dict(info)) + # get gpsinfo extension + try: + # exif field 0x8825 is an offset pointer to the location + # of the nested embedded gps exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8825]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif[0x8825] = _fixup_dict(info) + + return exif + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plug-in. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + info.load(file_contents) + mp = dict(info) + except: + raise SyntaxError("malformed MP Index (unreadable directory)") + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError: + raise SyntaxError("malformed MP Index (no number of images)") + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = unpack_from( + '{0}LLLHH'.format(endianness), rawmpentries, entrynum * 16) + labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', + 'EntryNo2') + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + 'DependentParentImageFlag': bool(mpentry['Attribute'] & + (1 << 31)), + 'DependentChildImageFlag': bool(mpentry['Attribute'] & + (1 << 30)), + 'RepresentativeImageFlag': bool(mpentry['Attribute'] & + (1 << 29)), + 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, + 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, + 'MPType': mpentry['Attribute'] & 0x00FFFFFF + } + if mpentryattr['ImageDataFormat'] == 0: + mpentryattr['ImageDataFormat'] = 'JPEG' + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: 'Undefined', + 0x010001: 'Large Thumbnail (VGA Equivalent)', + 0x010002: 'Large Thumbnail (Full HD Equivalent)', + 0x020001: 'Multi-Frame Image (Panorama)', + 0x020002: 'Multi-Frame Image: (Disparity)', + 0x020003: 'Multi-Frame Image: (Multi-Angle)', + 0x030000: 'Baseline MP Primary Image' + } + mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], + 'Unknown') + mpentry['Attribute'] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError: + raise SyntaxError("malformed MP Index (bad MP Entry)") + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBA": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63) + +samplings = {(1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, + } + + +def convert_dict_qtables(qtables): + qtables = [qtables[key] for key in range(len(qtables)) if key in qtables] + for idx, table in enumerate(qtables): + qtables[idx] = [table[i] for i in zigzag_index] + return qtables + + +def get_sampling(im): + # There's no subsampling when image have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, 'layers') or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + + try: + rawmode = RAWMODE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as JPEG" % im.mode) + + info = im.encoderinfo + + dpi = info.get("dpi", (0, 0)) + + quality = info.get("quality", 0) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = 0 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = 0 + subsampling = preset.get('subsampling', -1) + qtables = preset.get('quantization') + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get('subsampling', -1) + if isStringType(qtables) and qtables in presets: + qtables = presets[qtables].get('quantization') + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:1:1": + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isStringType(qtables): + try: + lines = [int(num) for line in qtables.splitlines() + for num in line.split('#', 1)[0].split()] + except ValueError: + raise ValueError("Invalid quantization table") + else: + qtables = [lines[s:s+64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = convert_dict_qtables(qtables) + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise + table = array.array('B', table) + except TypeError: + raise ValueError("Invalid quantization table") + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = b"" + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + + o8(len(markers)) + marker) + i += 1 + + # get keyword arguments + im.encoderconfig = ( + quality, + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + "progressive" in info or "progression" in info, + info.get("smooth", 0), + "optimize" in info, + info.get("streamtype", 0), + dpi[0], dpi[1], + subsampling, + qtables, + extra, + info.get("exif", b"") + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pizel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if "optimize" in info or "progressive" in info or "progression" in info: + # keep sets quality to 0, but the actual value may be high. + if quality >= 95 or quality == 0: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The exif info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5) + + ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + import os + import subprocess + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + im = MpoImageFile(fp, filename) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn("Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file") + return im + + +# -------------------------------------------------------------------q- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extension(JpegImageFile.format, ".jfif") +Image.register_extension(JpegImageFile.format, ".jpe") +Image.register_extension(JpegImageFile.format, ".jpg") +Image.register_extension(JpegImageFile.format, ".jpeg") + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/server/www/packages/packages-windows/x86/PIL/JpegPresets.py b/server/www/packages/packages-windows/x86/PIL/JpegPresets.py new file mode 100644 index 0000000..ece33bb --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/JpegPresets.py @@ -0,0 +1,241 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. + +More presets can be added to the presets dict if needed. + +Can be use when saving JPEG file. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:1:1 (or 4:2:0?). + +You can get the subsampling of a JPEG with the +`JpegImagePlugin.get_subsampling(im)` function. + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The tables format between im.quantization and quantization in presets differ in +3 ways: + +1. The base container of the preset is a list with sublists instead of dict. + dict[0] -> list[0], dict[1] -> list[1], ... +2. Each table in a preset is a list instead of an array. +3. The zigzag order is remove in the preset (needed by libjpeg >= 6a). + +You can convert the dict format to the preset format with the +`JpegImagePlugin.convert_dict_qtables(dict_qtables)` function. + +Libjpeg ref.: http://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +presets = { + 'web_low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} diff --git a/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..b753603 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/McIdasImagePlugin.py @@ -0,0 +1,74 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct +from PIL import Image, ImageFile + +__version__ = "0.2" + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self.size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10]*w[11]*w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py new file mode 100644 index 0000000..3c91244 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/MicImagePlugin.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, TiffImagePlugin +from PIL.OleFileIO import MAGIC, OleFileIO + +__version__ = "0.1" + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an MIC file; invalid OLE file") + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.__fp = self.fp + self.frame = 0 + + if len(self.images) > 1: + self.category = Image.CONTAINER + + self.seek(0) + + @property + def n_frames(self): + return len(self.images) + + @property + def is_animated(self): + return len(self.images) > 1 + + def seek(self, frame): + + try: + filename = self.images[frame] + except IndexError: + raise EOFError("no such frame") + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..6671b86 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/MpegImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL._binary import i8 + +__version__ = "0.1" + + +# +# Bitstream parser + +class BitStream(object): + + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self.size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extension(MpegImageFile.format, ".mpg") +Image.register_extension(MpegImageFile.format, ".mpeg") + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..1d26021 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/MpoImagePlugin.py @@ -0,0 +1,99 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, JpegImagePlugin + +__version__ = "0.1" + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self.mpinfo = self._getmp() + self.__framecount = self.mpinfo[0xB001] + self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] + for mpent in self.mpinfo[0xB002]] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.__framecount == len(self.__mpoffsets) + del self.info['mpoffset'] # no longer needed + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if frame < 0 or frame >= self.__framecount: + raise EOFError("no more images in MPO file") + else: + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + self.tile = [ + ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) + ] + self.__frame = frame + + def tell(self): + return self.__frame + + +# -------------------------------------------------------------------q- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py new file mode 100644 index 0000000..85f8e76 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/MspImagePlugin.py @@ -0,0 +1,104 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + + +# +# read MSP files + +i16 = _binary.i16le + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if s[:4] not in [b"DanM", b"LinS"]: + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s[i:i+2]) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self.size = i16(s[4:]), i16(s[6:]) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] + else: + self.tile = [("msp", (0, 0)+self.size, 32+2*self.size[1], None)] + +# +# write MSP files (uncompressed only) + +o16 = _binary.o16le + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as MSP" % im.mode) + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/server/www/packages/packages-windows/x86/PIL/OleFileIO-README.md b/server/www/packages/packages-windows/x86/PIL/OleFileIO-README.md new file mode 100644 index 0000000..eb6c9bc --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/OleFileIO-README.md @@ -0,0 +1,180 @@ +olefile (formerly OleFileIO_PL) +=============================== + +[olefile](http://www.decalage.info/olefile) is a Python package to parse, read and write +[Microsoft OLE2 files](http://en.wikipedia.org/wiki/Compound_File_Binary_Format) +(also called Structured Storage, Compound File Binary Format or Compound Document File Format), +such as Microsoft Office 97-2003 documents, vbaProject.bin in MS Office 2007+ files, Image Composer +and FlashPix files, Outlook messages, StickyNotes, several Microscopy file formats, McAfee antivirus quarantine files, +etc. + + +**Quick links:** [Home page](http://www.decalage.info/olefile) - +[Download/Install](https://bitbucket.org/decalage/olefileio_pl/wiki/Install) - +[Documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) - +[Report Issues/Suggestions/Questions](https://bitbucket.org/decalage/olefileio_pl/issues?status=new&status=open) - +[Contact the author](http://decalage.info/contact) - +[Repository](https://bitbucket.org/decalage/olefileio_pl) - +[Updates on Twitter](https://twitter.com/decalage2) + + +News +---- + +Follow all updates and news on Twitter: + +- **2015-01-25 v0.42**: improved handling of special characters in stream/storage names on Python 2.x (using UTF-8 + instead of Latin-1), fixed bug in listdir with empty storages. +- 2014-11-25 v0.41: OleFileIO.open and isOleFile now support OLE files stored in byte strings, fixed installer for + python 3, added support for Jython (Niko Ehrenfeuchter) +- 2014-10-01 v0.40: renamed OleFileIO_PL to olefile, added initial write support for streams >4K, updated doc and + license, improved the setup script. +- 2014-07-27 v0.31: fixed support for large files with 4K sectors, thanks to Niko Ehrenfeuchter, Martijn Berger and + Dave Jones. Added test scripts from Pillow (by hugovk). Fixed setup for Python 3 (Martin Panter) +- 2014-02-04 v0.30: now compatible with Python 3.x, thanks to Martin Panter who did most of the hard work. +- 2013-07-24 v0.26: added methods to parse stream/storage timestamps, improved listdir to include storages, fixed + parsing of direntry timestamps +- 2013-05-27 v0.25: improved metadata extraction, properties parsing and exception handling, fixed + [issue #12](https://bitbucket.org/decalage/olefileio_pl/issue/12/error-when-converting-timestamps-in-ole) +- 2013-05-07 v0.24: new features to extract metadata (get\_metadata method and OleMetadata class), improved + getproperties to convert timestamps to Python datetime +- 2012-10-09: published [python-oletools](http://www.decalage.info/python/oletools), a package of analysis tools based + on OleFileIO_PL +- 2012-09-11 v0.23: added support for file-like objects, fixed [issue #8](https://bitbucket.org/decalage/olefileio_pl/issue/8/bug-with-file-object) +- 2012-02-17 v0.22: fixed issues #7 (bug in getproperties) and #2 (added close method) +- 2011-10-20: code hosted on bitbucket to ease contributions and bug tracking +- 2010-01-24 v0.21: fixed support for big-endian CPUs, such as PowerPC Macs. +- 2009-12-11 v0.20: small bugfix in OleFileIO.open when filename is not plain str. +- 2009-12-10 v0.19: fixed support for 64 bits platforms (thanks to Ben G. and Martijn for reporting the bug) +- see changelog in source code for more info. + +Download/Install +---------------- + +If you have pip or setuptools installed (pip is included in Python 2.7.9+), you may simply run **pip install olefile** +or **easy_install olefile** for the first installation. + +To update olefile, run **pip install -U olefile**. + +Otherwise, see https://bitbucket.org/decalage/olefileio_pl/wiki/Install + +Features +-------- + +- Parse, read and write any OLE file such as Microsoft Office 97-2003 legacy document formats (Word .doc, Excel .xls, + PowerPoint .ppt, Visio .vsd, Project .mpp), Image Composer and FlashPix files, Outlook messages, StickyNotes, + Zeiss AxioVision ZVI files, Olympus FluoView OIB files, etc +- List all the streams and storages contained in an OLE file +- Open streams as files +- Parse and read property streams, containing metadata of the file +- Portable, pure Python module, no dependency + +olefile can be used as an independent package or with PIL/Pillow. + +olefile is mostly meant for developers. If you are looking for tools to analyze OLE files or to extract data (especially +for security purposes such as malware analysis and forensics), then please also check my +[python-oletools](http://www.decalage.info/python/oletools), which are built upon olefile and provide a higher-level interface. + + +History +------- + +olefile is based on the OleFileIO module from [PIL](http://www.pythonware.com/products/pil/index.htm), the excellent +Python Imaging Library, created and maintained by Fredrik Lundh. The olefile API is still compatible with PIL, but +since 2005 I have improved the internal implementation significantly, with new features, bugfixes and a more robust +design. From 2005 to 2014 the project was called OleFileIO_PL, and in 2014 I changed its name to olefile to celebrate +its 9 years and its new write features. + +As far as I know, olefile is the most complete and robust Python implementation to read MS OLE2 files, portable on +several operating systems. (please tell me if you know other similar Python modules) + +Since 2014 olefile/OleFileIO_PL has been integrated into [Pillow](http://python-pillow.org), the friendly fork +of PIL. olefile will continue to be improved as a separate project, and new versions will be merged into Pillow +regularly. + + +Main improvements over the original version of OleFileIO in PIL: +---------------------------------------------------------------- + +- Compatible with Python 3.x and 2.6+ +- Many bug fixes +- Support for files larger than 6.8MB +- Support for 64 bits platforms and big-endian CPUs +- Robust: many checks to detect malformed files +- Runtime option to choose if malformed files should be parsed or raise exceptions +- Improved API +- Metadata extraction, stream/storage timestamps (e.g. for document forensics) +- Can open file-like objects +- Added setup.py and install.bat to ease installation +- More convenient slash-based syntax for stream paths +- Write features + +Documentation +------------- + +Please see the [online documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) for more information, +especially the [OLE overview](https://bitbucket.org/decalage/olefileio_pl/wiki/OLE_Overview) and the +[API page](https://bitbucket.org/decalage/olefileio_pl/wiki/API) which describe how to use olefile in Python applications. +A copy of the same documentation is also provided in the doc subfolder of the olefile package. + + +## Real-life examples ## + +A real-life example: [using OleFileIO_PL for malware analysis and forensics](http://blog.gregback.net/2011/03/using-remnux-for-forensic-puzzle-6/). + +See also [this paper](https://computer-forensics.sans.org/community/papers/gcfa/grow-forensic-tools-taxonomy-python-libraries-helpful-forensic-analysis_6879) about python tools for forensics, which features olefile. + + +License +------- + +olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +([http://www.decalage.info](http://www.decalage.info)) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +---------- + +olefile is based on source code from the OleFileIO module of the Python Imaging Library (PIL) published by Fredrik +Lundh under the following license: + +The Python Imaging Library (PIL) is + + Copyright 漏 1997-2011 by Secret Labs AB + Copyright 漏 1995-2011 by Fredrik Lundh + +By obtaining, using, and/or copying this software and/or its associated documentation, you agree that you have read, +understood, and will comply with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its associated documentation for any purpose and +without fee is hereby granted, provided that the above copyright notice appears in all copies, and that both that +copyright notice and this permission notice appear in supporting documentation, and that the name of Secret Labs AB or +the author not be used in advertising or publicity pertaining to distribution of the software without specific, written +prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/server/www/packages/packages-windows/x86/PIL/OleFileIO.py b/server/www/packages/packages-windows/x86/PIL/OleFileIO.py new file mode 100644 index 0000000..1998e3c --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/OleFileIO.py @@ -0,0 +1,2305 @@ +#!/usr/bin/env python + +# olefile (formerly OleFileIO_PL) version 0.42 2015-01-25 +# +# Module to read/write Microsoft OLE2 files (also called Structured Storage or +# Microsoft Compound Document File Format), such as Microsoft Office 97-2003 +# documents, Image Composer and FlashPix files, Outlook messages, ... +# This version is compatible with Python 2.6+ and 3.x +# +# Project website: http://www.decalage.info/olefile +# +# olefile is copyright (c) 2005-2015 Philippe Lagadec (http://www.decalage.info) +# +# olefile is based on the OleFileIO module from the PIL library v1.1.6 +# See: http://www.pythonware.com/products/pil/index.htm +# +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# See source code and LICENSE.txt for information on usage and redistribution. + + +# Since OleFileIO_PL v0.30, only Python 2.6+ and 3.x is supported +# This import enables print() as a function rather than a keyword +# (main requirement to be compatible with Python 3.x) +# The comment on the line below should be printed on Python 2.5 or older: +from __future__ import print_function # This version of olefile requires Python 2.6+ or 3.x. + + +__author__ = "Philippe Lagadec" +__date__ = "2015-01-25" +__version__ = '0.42b' + +#--- LICENSE ------------------------------------------------------------------ + +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +# (http://www.decalage.info) +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# ---------- +# PIL License: +# +# olefile is based on source code from the OleFileIO module of the Python +# Imaging Library (PIL) published by Fredrik Lundh under the following license: + +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its associated +# documentation, you agree that you have read, understood, and will comply with +# the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and its +# associated documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appears in all copies, and that both +# that copyright notice and this permission notice appear in supporting +# documentation, and that the name of Secret Labs AB or the author(s) not be used +# in advertising or publicity pertaining to distribution of the software +# without specific, written prior permission. +# +# SECRET LABS AB AND THE AUTHORS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +# IN NO EVENT SHALL SECRET LABS AB OR THE AUTHORS BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +#----------------------------------------------------------------------------- +# CHANGELOG: (only olefile/OleFileIO_PL changes compared to PIL 1.1.6) +# 2005-05-11 v0.10 PL: - a few fixes for Python 2.4 compatibility +# (all changes flagged with [PL]) +# 2006-02-22 v0.11 PL: - a few fixes for some Office 2003 documents which raise +# exceptions in _OleStream.__init__() +# 2006-06-09 v0.12 PL: - fixes for files above 6.8MB (DIFAT in loadfat) +# - added some constants +# - added header values checks +# - added some docstrings +# - getsect: bugfix in case sectors >512 bytes +# - getsect: added conformity checks +# - DEBUG_MODE constant to activate debug display +# 2007-09-04 v0.13 PL: - improved/translated (lots of) comments +# - updated license +# - converted tabs to 4 spaces +# 2007-11-19 v0.14 PL: - added OleFileIO._raise_defect() to adapt sensitivity +# - improved _unicode() to use Python 2.x unicode support +# - fixed bug in _OleDirectoryEntry +# 2007-11-25 v0.15 PL: - added safety checks to detect FAT loops +# - fixed _OleStream which didn't check stream size +# - added/improved many docstrings and comments +# - moved helper functions _unicode and _clsid out of +# OleFileIO class +# - improved OleFileIO._find() to add Unix path syntax +# - OleFileIO._find() is now case-insensitive +# - added get_type() and get_rootentry_name() +# - rewritten loaddirectory and _OleDirectoryEntry +# 2007-11-27 v0.16 PL: - added _OleDirectoryEntry.kids_dict +# - added detection of duplicate filenames in storages +# - added detection of duplicate references to streams +# - added get_size() and exists() to _OleDirectoryEntry +# - added isOleFile to check header before parsing +# - added __all__ list to control public keywords in pydoc +# 2007-12-04 v0.17 PL: - added _load_direntry to fix a bug in loaddirectory +# - improved _unicode(), added workarounds for Python <2.3 +# - added set_debug_mode and -d option to set debug mode +# - fixed bugs in OleFileIO.open and _OleDirectoryEntry +# - added safety check in main for large or binary +# properties +# - allow size>0 for storages for some implementations +# 2007-12-05 v0.18 PL: - fixed several bugs in handling of FAT, MiniFAT and +# streams +# - added option '-c' in main to check all streams +# 2009-12-10 v0.19 PL: - bugfix for 32 bit arrays on 64 bits platforms +# (thanks to Ben G. and Martijn for reporting the bug) +# 2009-12-11 v0.20 PL: - bugfix in OleFileIO.open when filename is not plain str +# 2010-01-22 v0.21 PL: - added support for big-endian CPUs such as PowerPC Macs +# 2012-02-16 v0.22 PL: - fixed bug in getproperties, patch by chuckleberryfinn +# (https://bitbucket.org/decalage/olefileio_pl/issue/7) +# - added close method to OleFileIO (fixed issue #2) +# 2012-07-25 v0.23 PL: - added support for file-like objects (patch by mete0r_kr) +# 2013-05-05 v0.24 PL: - getproperties: added conversion from filetime to python +# datetime +# - main: displays properties with date format +# - new class OleMetadata to parse standard properties +# - added get_metadata method +# 2013-05-07 v0.24 PL: - a few improvements in OleMetadata +# 2013-05-24 v0.25 PL: - getproperties: option to not convert some timestamps +# - OleMetaData: total_edit_time is now a number of seconds, +# not a timestamp +# - getproperties: added support for VT_BOOL, VT_INT, V_UINT +# - getproperties: filter out null chars from strings +# - getproperties: raise non-fatal defects instead of +# exceptions when properties cannot be parsed properly +# 2013-05-27 PL: - getproperties: improved exception handling +# - _raise_defect: added option to set exception type +# - all non-fatal issues are now recorded, and displayed +# when run as a script +# 2013-07-11 v0.26 PL: - added methods to get modification and creation times +# of a directory entry or a storage/stream +# - fixed parsing of direntry timestamps +# 2013-07-24 PL: - new options in listdir to list storages and/or streams +# 2014-02-04 v0.30 PL: - upgraded code to support Python 3.x by Martin Panter +# - several fixes for Python 2.6 (xrange, MAGIC) +# - reused i32 from Pillow's _binary +# 2014-07-18 v0.31 - preliminary support for 4K sectors +# 2014-07-27 v0.31 PL: - a few improvements in OleFileIO.open (header parsing) +# - Fixed loadfat for large files with 4K sectors (issue #3) +# 2014-07-30 v0.32 PL: - added write_sect to write sectors to disk +# - added write_mode option to OleFileIO.__init__ and open +# 2014-07-31 PL: - fixed padding in write_sect for Python 3, added checks +# - added write_stream to write a stream to disk +# 2014-09-26 v0.40 PL: - renamed OleFileIO_PL to olefile +# 2014-11-09 NE: - added support for Jython (Niko Ehrenfeuchter) +# 2014-11-13 v0.41 PL: - improved isOleFile and OleFileIO.open to support OLE +# data in a string buffer and file-like objects. +# 2014-11-21 PL: - updated comments according to Pillow's commits +# 2015-01-24 v0.42 PL: - changed the default path name encoding from Latin-1 +# to UTF-8 on Python 2.x (Unicode on Python 3.x) +# - added path_encoding option to override the default +# - fixed a bug in _list when a storage is empty + +#----------------------------------------------------------------------------- +# TODO (for version 1.0): +# + get rid of print statements, to simplify Python 2.x and 3.x support +# + add is_stream and is_storage +# + remove leading and trailing slashes where a path is used +# + add functions path_list2str and path_str2list +# + fix how all the methods handle unicode str and/or bytes as arguments +# + add path attrib to _OleDirEntry, set it once and for all in init or +# append_kids (then listdir/_list can be simplified) +# - TESTS with Linux, MacOSX, Python 1.5.2, various files, PIL, ... +# - add underscore to each private method, to avoid their display in +# pydoc/epydoc documentation - Remove it for classes to be documented +# - replace all raised exceptions with _raise_defect (at least in OleFileIO) +# - merge code from _OleStream and OleFileIO.getsect to read sectors +# (maybe add a class for FAT and MiniFAT ?) +# - add method to check all streams (follow sectors chains without storing all +# stream in memory, and report anomalies) +# - use _OleDirectoryEntry.kids_dict to improve _find and _list ? +# - fix Unicode names handling (find some way to stay compatible with Py1.5.2) +# => if possible avoid converting names to Latin-1 +# - review DIFAT code: fix handling of DIFSECT blocks in FAT (not stop) +# - rewrite OleFileIO.getproperties +# - improve docstrings to show more sample uses +# - see also original notes and FIXME below +# - remove all obsolete FIXMEs +# - OleMetadata: fix version attrib according to +# http://msdn.microsoft.com/en-us/library/dd945671%28v=office.12%29.aspx + +# IDEAS: +# - in OleFileIO._open and _OleStream, use size=None instead of 0x7FFFFFFF for +# streams with unknown size +# - use arrays of int instead of long integers for FAT/MiniFAT, to improve +# performance and reduce memory usage ? (possible issue with values >2^31) +# - provide tests with unittest (may need write support to create samples) +# - move all debug code (and maybe dump methods) to a separate module, with +# a class which inherits OleFileIO ? +# - fix docstrings to follow epydoc format +# - add support for big endian byte order ? +# - create a simple OLE explorer with wxPython + +# FUTURE EVOLUTIONS to add write support: +# see issue #6 on Bitbucket: +# https://bitbucket.org/decalage/olefileio_pl/issue/6/improve-olefileio_pl-to-write-ole-files + +#----------------------------------------------------------------------------- +# NOTES from PIL 1.1.6: + +# History: +# 1997-01-20 fl Created +# 1997-01-22 fl Fixed 64-bit portability quirk +# 2003-09-09 fl Fixed typo in OleFileIO.loadfat (noted by Daniel Haertle) +# 2004-02-29 fl Changed long hex constants to signed integers +# +# Notes: +# FIXME: sort out sign problem (eliminate long hex constants) +# FIXME: change filename to use "a/b/c" instead of ["a", "b", "c"] +# FIXME: provide a glob mechanism function (using fnmatchcase) +# +# Literature: +# +# "FlashPix Format Specification, Appendix A", Kodak and Microsoft, +# September 1996. +# +# Quotes: +# +# "If this document and functionality of the Software conflict, +# the actual functionality of the Software represents the correct +# functionality" -- Microsoft, in the OLE format specification + +#------------------------------------------------------------------------------ + + +import io +import sys +import struct +import array +import os.path +import datetime + +#=== COMPATIBILITY WORKAROUNDS ================================================ + +# [PL] Define explicitly the public API to avoid private objects in pydoc: +#TODO: add more +# __all__ = ['OleFileIO', 'isOleFile', 'MAGIC'] + +# For Python 3.x, need to redefine long as int: +if str is not bytes: + long = int + +# Need to make sure we use xrange both on Python 2 and 3.x: +try: + # on Python 2 we need xrange: + iterrange = xrange +except: + # no xrange, for Python 3 it was renamed as range: + iterrange = range + +# [PL] workaround to fix an issue with array item size on 64 bits systems: +if array.array('L').itemsize == 4: + # on 32 bits platforms, long integers in an array are 32 bits: + UINT32 = 'L' +elif array.array('I').itemsize == 4: + # on 64 bits platforms, integers in an array are 32 bits: + UINT32 = 'I' +elif array.array('i').itemsize == 4: + # On 64 bit Jython, signed integers ('i') are the only way to store our 32 + # bit values in an array in a *somewhat* reasonable way, as the otherwise + # perfectly suited 'H' (unsigned int, 32 bits) results in a completely + # unusable behaviour. This is most likely caused by the fact that Java + # doesn't have unsigned values, and thus Jython's "array" implementation, + # which is based on "jarray", doesn't have them either. + # NOTE: to trick Jython into converting the values it would normally + # interpret as "signed" into "unsigned", a binary-and operation with + # 0xFFFFFFFF can be used. This way it is possible to use the same comparing + # operations on all platforms / implementations. The corresponding code + # lines are flagged with a 'JYTHON-WORKAROUND' tag below. + UINT32 = 'i' +else: + raise ValueError('Need to fix a bug with 32 bit arrays, please contact author...') + + +# [PL] These workarounds were inspired from the Path module +# (see http://www.jorendorff.com/articles/python/path/) +try: + basestring +except NameError: + basestring = str + +# [PL] Experimental setting: if True, OLE filenames will be kept in Unicode +# if False (default PIL behaviour), all filenames are converted to Latin-1. +KEEP_UNICODE_NAMES = True + +if sys.version_info[0] < 3: + # On Python 2.x, the default encoding for path names is UTF-8: + DEFAULT_PATH_ENCODING = 'utf-8' +else: + # On Python 3.x, the default encoding for path names is Unicode (None): + DEFAULT_PATH_ENCODING = None + + +#=== DEBUGGING =============================================================== + +#TODO: replace this by proper logging + +# [PL] DEBUG display mode: False by default, use set_debug_mode() or "-d" on +# command line to change it. +DEBUG_MODE = False + + +def debug_print(msg): + print(msg) + + +def debug_pass(msg): + pass + + +debug = debug_pass + + +def set_debug_mode(debug_mode): + """ + Set debug mode on or off, to control display of debugging messages. + :param mode: True or False + """ + global DEBUG_MODE, debug + DEBUG_MODE = debug_mode + if debug_mode: + debug = debug_print + else: + debug = debug_pass + + +#=== CONSTANTS =============================================================== + +# magic bytes that should be at the beginning of every OLE file: +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' + +# [PL]: added constants for Sector IDs (from AAF specifications) +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector + +# [PL]: added constants for Directory Entry IDs (from AAF specifications) +MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID +NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry + +# [PL] object types in storage (from AAF specifications) +STGTY_EMPTY = 0 # empty directory entry (according to OpenOffice.org doc) +STGTY_STORAGE = 1 # element is a storage object +STGTY_STREAM = 2 # element is a stream object +STGTY_LOCKBYTES = 3 # element is an ILockBytes object +STGTY_PROPERTY = 4 # element is an IPropertyStorage object +STGTY_ROOT = 5 # element is a root storage + + +# +# -------------------------------------------------------------------- +# property types + +VT_EMPTY = 0; VT_NULL = 1; VT_I2 = 2; VT_I4 = 3; VT_R4 = 4; VT_R8 = 5; VT_CY = 6; +VT_DATE = 7; VT_BSTR = 8; VT_DISPATCH = 9; VT_ERROR = 10; VT_BOOL = 11; +VT_VARIANT = 12; VT_UNKNOWN = 13; VT_DECIMAL = 14; VT_I1 = 16; VT_UI1 = 17; +VT_UI2 = 18; VT_UI4 = 19; VT_I8 = 20; VT_UI8 = 21; VT_INT = 22; VT_UINT = 23; +VT_VOID = 24; VT_HRESULT = 25; VT_PTR = 26; VT_SAFEARRAY = 27; VT_CARRAY = 28; +VT_USERDEFINED = 29; VT_LPSTR = 30; VT_LPWSTR = 31; VT_FILETIME = 64; +VT_BLOB = 65; VT_STREAM = 66; VT_STORAGE = 67; VT_STREAMED_OBJECT = 68; +VT_STORED_OBJECT = 69; VT_BLOB_OBJECT = 70; VT_CF = 71; VT_CLSID = 72; +VT_VECTOR = 0x1000; + +# map property id to name (for debugging purposes) + +VT = {} +for keyword, var in list(vars().items()): + if keyword[:3] == "VT_": + VT[var] = keyword + +# +# -------------------------------------------------------------------- +# Some common document types (root.clsid fields) + +WORD_CLSID = "00020900-0000-0000-C000-000000000046" +#TODO: check Excel, PPT, ... + +# [PL]: Defect levels to classify parsing errors - see OleFileIO._raise_defect() +DEFECT_UNSURE = 10 # a case which looks weird, but not sure it's a defect +DEFECT_POTENTIAL = 20 # a potential defect +DEFECT_INCORRECT = 30 # an error according to specifications, but parsing + # can go on +DEFECT_FATAL = 40 # an error which cannot be ignored, parsing is + # impossible + +# Minimal size of an empty OLE file, with 512-bytes sectors = 1536 bytes +# (this is used in isOleFile and OleFile.open) +MINIMAL_OLEFILE_SIZE = 1536 + +# [PL] add useful constants to __all__: +# for key in list(vars().keys()): +# if key.startswith('STGTY_') or key.startswith('DEFECT_'): +# __all__.append(key) + + +#=== FUNCTIONS =============================================================== + +def isOleFile(filename): + """ + Test if a file is an OLE container (according to the magic bytes in its header). + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read and seek methods), + it is parsed as-is. + + :returns: True if OLE, False otherwise. + """ + # check if filename is a string-like or file-like object: + if hasattr(filename, 'read'): + # file-like object: use it directly + header = filename.read(len(MAGIC)) + # just in case, seek back to start of file: + filename.seek(0) + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + header = filename[:len(MAGIC)] + else: + # string-like object: filename of file on disk + header = open(filename, 'rb').read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +if bytes is str: + # version for Python 2.x + def i8(c): + return ord(c) +else: + # version for Python 3.x + def i8(c): + return c if c.__class__ is int else c[0] + + +#TODO: replace i16 and i32 with more readable struct.unpack equivalent? + +def i16(c, o = 0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return struct.unpack(" len(fat): + raise IOError('malformed OLE document, stream too large') + # optimization(?): data is first a list of strings, and join() is called + # at the end to concatenate all in one string. + # (this may not be really useful with recent Python versions) + data = [] + # if size is zero, then first sector index should be ENDOFCHAIN: + if size == 0 and sect != ENDOFCHAIN: + debug('size == 0 and sect != ENDOFCHAIN:') + raise IOError('incorrect OLE sector index for empty stream') + # [PL] A fixed-length for loop is used instead of an undefined while + # loop to avoid DoS attacks: + for i in range(nb_sectors): + # Sector index may be ENDOFCHAIN, but only if size was unknown + if sect == ENDOFCHAIN: + if unknown_size: + break + else: + # else this means that the stream is smaller than declared: + debug('sect=ENDOFCHAIN before expected size') + raise IOError('incomplete OLE stream') + # sector index should be within FAT: + if sect < 0 or sect >= len(fat): + debug('sect=%d (%X) / len(fat)=%d' % (sect, sect, len(fat))) + debug('i=%d / nb_sectors=%d' % (i, nb_sectors)) +## tmp_data = b"".join(data) +## f = open('test_debug.bin', 'wb') +## f.write(tmp_data) +## f.close() +## debug('data read so far: %d bytes' % len(tmp_data)) + raise IOError('incorrect OLE FAT, sector index out of range') + #TODO: merge this code with OleFileIO.getsect() ? + #TODO: check if this works with 4K sectors: + try: + fp.seek(offset + sectorsize * sect) + except: + debug('sect=%d, seek=%d, filesize=%d' % + (sect, offset+sectorsize*sect, filesize)) + raise IOError('OLE sector index out of range') + sector_data = fp.read(sectorsize) + # [PL] check if there was enough data: + # Note: if sector is the last of the file, sometimes it is not a + # complete sector (of 512 or 4K), so we may read less than + # sectorsize. + if len(sector_data) != sectorsize and sect != (len(fat)-1): + debug('sect=%d / len(fat)=%d, seek=%d / filesize=%d, len read=%d' % + (sect, len(fat), offset+sectorsize*sect, filesize, len(sector_data))) + debug('seek+len(read)=%d' % (offset+sectorsize*sect+len(sector_data))) + raise IOError('incomplete OLE sector') + data.append(sector_data) + # jump to next sector in the FAT: + try: + sect = fat[sect] & 0xFFFFFFFF # JYTHON-WORKAROUND + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + data = b"".join(data) + # Data is truncated to the actual stream size: + if len(data) >= size: + data = data[:size] + # actual stream size is stored for future use: + self.size = size + elif unknown_size: + # actual stream size was not known, now we know the size of read + # data: + self.size = len(data) + else: + # read data is less than expected: + debug('len(data)=%d, size=%d' % (len(data), size)) + raise IOError('OLE stream size is less than declared') + # when all data is read in memory, BytesIO constructor is called + io.BytesIO.__init__(self, data) + # Then the _OleStream object can be used as a read-only file object. + + +#--- _OleDirectoryEntry ------------------------------------------------------- + +class _OleDirectoryEntry(object): + + """ + OLE2 Directory Entry + """ + # [PL] parsing code moved from OleFileIO.loaddirectory + + # struct to parse directory entries: + # <: little-endian byte order, standard sizes + # (note: this should guarantee that Q returns a 64 bits int) + # 64s: string containing entry name in unicode (max 31 chars) + null char + # H: uint16, number of bytes used in name buffer, including null = (len+1)*2 + # B: uint8, dir entry type (between 0 and 5) + # B: uint8, color: 0=black, 1=red + # I: uint32, index of left child node in the red-black tree, NOSTREAM if none + # I: uint32, index of right child node in the red-black tree, NOSTREAM if none + # I: uint32, index of child root node if it is a storage, else NOSTREAM + # 16s: CLSID, unique identifier (only used if it is a storage) + # I: uint32, user flags + # Q (was 8s): uint64, creation timestamp or zero + # Q (was 8s): uint64, modification timestamp or zero + # I: uint32, SID of first sector if stream or ministream, SID of 1st sector + # of stream containing ministreams if root entry, 0 otherwise + # I: uint32, total stream size in bytes if stream (low 32 bits), 0 otherwise + # I: uint32, total stream size in bytes if stream (high 32 bits), 0 otherwise + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + # size of a directory entry: 128 bytes + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, olefile): + """ + Constructor for an _OleDirectoryEntry object. + Parses a 128-bytes entry from the OLE Directory stream. + + :param entry : string (must be 128 bytes long) + :param sid : index of this directory entry in the OLE file directory + :param olefile: OleFileIO containing this directory entry + """ + self.sid = sid + # ref to olefile is stored for future use + self.olefile = olefile + # kids is a list of children entries, if this entry is a storage: + # (list of _OleDirectoryEntry objects) + self.kids = [] + # kids_dict is a dictionary of children entries, indexed by their + # name in lowercase: used to quickly find an entry, and to detect + # duplicates + self.kids_dict = {} + # flag used to detect if the entry is referenced more than once in + # directory: + self.used = False + # decode DirEntry + ( + name, + namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + sizeLow, + sizeHigh + ) = struct.unpack(_OleDirectoryEntry.STRUCT_DIRENTRY, entry) + if self.entry_type not in [STGTY_ROOT, STGTY_STORAGE, STGTY_STREAM, STGTY_EMPTY]: + olefile.raise_defect(DEFECT_INCORRECT, 'unhandled OLE storage type') + # only first directory entry can (and should) be root: + if self.entry_type == STGTY_ROOT and sid != 0: + olefile.raise_defect(DEFECT_INCORRECT, 'duplicate OLE root entry') + if sid == 0 and self.entry_type != STGTY_ROOT: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect OLE root entry') + #debug (struct.unpack(fmt_entry, entry[:len_entry])) + # name should be at most 31 unicode characters + null character, + # so 64 bytes in total (31*2 + 2): + if namelength > 64: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect DirEntry name length') + # if exception not raised, namelength is set to the maximum value: + namelength = 64 + # only characters without ending null char are kept: + name = name[:(namelength-2)] + #TODO: check if the name is actually followed by a null unicode character ([MS-CFB] 2.6.1) + #TODO: check if the name does not contain forbidden characters: + # [MS-CFB] 2.6.1: "The following characters are illegal and MUST NOT be part of the name: '/', '\', ':', '!'." + # name is converted from UTF-16LE to the path encoding specified in the OleFileIO: + self.name = olefile._decode_utf16_str(name) + + debug('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + debug(' - type: %d' % self.entry_type) + debug(' - sect: %d' % self.isectStart) + debug(' - SID left: %d, right: %d, child: %d' % (self.sid_left, + self.sid_right, self.sid_child)) + + # sizeHigh is only used for 4K sectors, it should be zero for 512 bytes + # sectors, BUT apparently some implementations set it as 0xFFFFFFFF, 1 + # or some other value so it cannot be raised as a defect in general: + if olefile.sectorsize == 512: + if sizeHigh != 0 and sizeHigh != 0xFFFFFFFF: + debug('sectorsize=%d, sizeLow=%d, sizeHigh=%d (%X)' % + (olefile.sectorsize, sizeLow, sizeHigh, sizeHigh)) + olefile.raise_defect(DEFECT_UNSURE, 'incorrect OLE stream size') + self.size = sizeLow + else: + self.size = sizeLow + (long(sizeHigh) << 32) + debug(' - size: %d (sizeLow=%d, sizeHigh=%d)' % (self.size, sizeLow, sizeHigh)) + + self.clsid = _clsid(clsid) + # a storage should have a null size, BUT some implementations such as + # Word 8 for Mac seem to allow non-null values => Potential defect: + if self.entry_type == STGTY_STORAGE and self.size != 0: + olefile.raise_defect(DEFECT_POTENTIAL, 'OLE storage with size>0') + # check if stream is not already referenced elsewhere: + if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: + if self.size < olefile.minisectorcutoff \ + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + # ministream object + minifat = True + else: + minifat = False + olefile._check_duplicate_stream(self.isectStart, minifat) + + def build_storage_tree(self): + """ + Read and build the red-black tree attached to this _OleDirectoryEntry + object, if it is a storage. + Note that this method builds a tree of all subentries, so it should + only be called for the root object once. + """ + debug('build_storage_tree: SID=%d - %s - sid_child=%d' + % (self.sid, repr(self.name), self.sid_child)) + if self.sid_child != NOSTREAM: + # if child SID is not NOSTREAM, then this entry is a storage. + # Let's walk through the tree of children to fill the kids list: + self.append_kids(self.sid_child) + + # Note from OpenOffice documentation: the safest way is to + # recreate the tree because some implementations may store broken + # red-black trees... + + # in the OLE file, entries are sorted on (length, name). + # for convenience, we sort them on name instead: + # (see rich comparison methods in this class) + self.kids.sort() + + def append_kids(self, child_sid): + """ + Walk through red-black tree of children of this directory entry to add + all of them to the kids list. (recursive method) + + :param child_sid : index of child directory entry to use, or None when called + first time for the root. (only used during recursion) + """ + # [PL] this method was added to use simple recursion instead of a complex + # algorithm. + # if this is not a storage or a leaf of the tree, nothing to do: + if child_sid == NOSTREAM: + return + # check if child SID is in the proper range: + if child_sid < 0 or child_sid >= len(self.olefile.direntries): + self.olefile.raise_defect(DEFECT_FATAL, 'OLE DirEntry index out of range') + # get child direntry: + child = self.olefile._load_direntry(child_sid) #direntries[child_sid] + debug('append_kids: child_sid=%d - %s - sid_left=%d, sid_right=%d, sid_child=%d' + % (child.sid, repr(child.name), child.sid_left, child.sid_right, child.sid_child)) + # the directory entries are organized as a red-black tree. + # (cf. Wikipedia for details) + # First walk through left side of the tree: + self.append_kids(child.sid_left) + # Check if its name is not already used (case-insensitive): + name_lower = child.name.lower() + if name_lower in self.kids_dict: + self.olefile.raise_defect(DEFECT_INCORRECT, + "Duplicate filename in OLE storage") + # Then the child_sid _OleDirectoryEntry object is appended to the + # kids list and dictionary: + self.kids.append(child) + self.kids_dict[name_lower] = child + # Check if kid was not already referenced in a storage: + if child.used: + self.olefile.raise_defect(DEFECT_INCORRECT, + 'OLE Entry referenced more than once') + child.used = True + # Finally walk through right side of the tree: + self.append_kids(child.sid_right) + # Afterwards build kid's own tree if it's also a storage: + child.build_storage_tree() + + def __eq__(self, other): + "Compare entries by name" + return self.name == other.name + + def __lt__(self, other): + "Compare entries by name" + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + # Reflected __lt__() and __le__() will be used for __gt__() and __ge__() + + #TODO: replace by the same function as MS implementation ? + # (order by name length first, then case-insensitive order) + + def dump(self, tab = 0): + "Dump this entry, and all its subentries (for debug purposes only)" + TYPES = ["(invalid)", "(storage)", "(stream)", "(lockbytes)", + "(property)", "(root)"] + print(" "*tab + repr(self.name), TYPES[self.entry_type], end=' ') + if self.entry_type in (STGTY_STREAM, STGTY_ROOT): + print(self.size, "bytes", end=' ') + print() + if self.entry_type in (STGTY_STORAGE, STGTY_ROOT) and self.clsid: + print(" "*tab + "{%s}" % self.clsid) + + for kid in self.kids: + kid.dump(tab + 2) + + def getmtime(self): + """ + Return modification time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.modifyTime == 0: + return None + return filetime2datetime(self.modifyTime) + + def getctime(self): + """ + Return creation time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.createTime == 0: + return None + return filetime2datetime(self.createTime) + + +#--- OleFileIO ---------------------------------------------------------------- + +class OleFileIO(object): + """ + OLE container object + + This class encapsulates the interface to an OLE 2 structured + storage file. Use the :py:meth:`~PIL.OleFileIO.OleFileIO.listdir` and + :py:meth:`~PIL.OleFileIO.OleFileIO.openstream` methods to + access the contents of this file. + + Object names are given as a list of strings, one for each subentry + level. The root entry should be omitted. For example, the following + code extracts all image streams from a Microsoft Image Composer file:: + + ole = OleFileIO("fan.mic") + + for entry in ole.listdir(): + if entry[1:2] == "Image": + fin = ole.openstream(entry) + fout = open(entry[0:1], "wb") + while True: + s = fin.read(8192) + if not s: + break + fout.write(s) + + You can use the viewer application provided with the Python Imaging + Library to view the resulting files (which happens to be standard + TIFF files). + """ + + def __init__(self, filename=None, raise_defects=DEFECT_FATAL, + write_mode=False, debug=False, path_encoding=DEFAULT_PATH_ENCODING): + """ + Constructor for the OleFileIO class. + + :param filename: file to open. + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param raise_defects: minimal level for defects to be raised as exceptions. + (use DEFECT_FATAL for a typical application, DEFECT_INCORRECT for a + security-oriented application, see source code for details) + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. + + :param debug: bool, set debug mode + + :param path_encoding: None or str, name of the codec to use for path + names (streams and storages), or None for Unicode. + Unicode by default on Python 3+, UTF-8 on Python 2.x. + (new in olefile 0.42, was hardcoded to Latin-1 until olefile v0.41) + """ + set_debug_mode(debug) + # minimal level for defects to be raised as exceptions: + self._raise_defects_level = raise_defects + # list of defects/issues not raised as exceptions: + # tuples of (exception type, message) + self.parsing_issues = [] + self.write_mode = write_mode + self.path_encoding = path_encoding + self._filesize = None + self.fp = None + if filename: + self.open(filename, write_mode=write_mode) + + def raise_defect(self, defect_level, message, exception_type=IOError): + """ + This method should be called for any defect found during file parsing. + It may raise an IOError exception according to the minimal level chosen + for the OleFileIO object. + + :param defect_level: defect level, possible values are: + + - DEFECT_UNSURE : a case which looks weird, but not sure it's a defect + - DEFECT_POTENTIAL : a potential defect + - DEFECT_INCORRECT : an error according to specifications, but parsing can go on + - DEFECT_FATAL : an error which cannot be ignored, parsing is impossible + + :param message: string describing the defect, used with raised exception. + :param exception_type: exception class to be raised, IOError by default + """ + # added by [PL] + if defect_level >= self._raise_defects_level: + raise exception_type(message) + else: + # just record the issue, no exception raised: + self.parsing_issues.append((exception_type, message)) + + def _decode_utf16_str(self, utf16_str, errors='replace'): + """ + Decode a string encoded in UTF-16 LE format, as found in the OLE + directory or in property streams. Return a string encoded + according to the path_encoding specified for the OleFileIO object. + + :param utf16_str: bytes string encoded in UTF-16 LE format + :param errors: str, see python documentation for str.decode() + :return: str, encoded according to path_encoding + """ + unicode_str = utf16_str.decode('UTF-16LE', errors) + if self.path_encoding: + # an encoding has been specified for path names: + return unicode_str.encode(self.path_encoding, errors) + else: + # path_encoding=None, return the Unicode string as-is: + return unicode_str + + def open(self, filename, write_mode=False): + """ + Open an OLE2 file in read-only or read/write mode. + Read and parse the header, FAT and directory. + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. (ignored if filename is not a path) + """ + self.write_mode = write_mode + # [PL] check if filename is a string-like or file-like object: + # (it is better to check for a read() method) + if hasattr(filename, 'read'): + #TODO: also check seek and tell methods? + # file-like object: use it directly + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + # convert it to BytesIO + self.fp = io.BytesIO(filename) + else: + # string-like object: filename of file on disk + if self.write_mode: + # open file in mode 'read with update, binary' + # According to https://docs.python.org/2/library/functions.html#open + # 'w' would truncate the file, 'a' may only append on some Unixes + mode = 'r+b' + else: + # read-only mode by default + mode = 'rb' + self.fp = open(filename, mode) + # obtain the filesize by using seek and tell, which should work on most + # file-like objects: + #TODO: do it above, using getsize with filename when possible? + #TODO: fix code to fail with clear exception when filesize cannot be obtained + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + + # lists of streams in FAT and MiniFAT, to detect duplicate references + # (list of indexes of first sectors of each stream) + self._used_streams_fat = [] + self._used_streams_minifat = [] + + header = self.fp.read(512) + + if len(header) != 512 or header[:8] != MAGIC: + self.raise_defect(DEFECT_FATAL, "not an OLE2 structured storage file") + + # [PL] header structure according to AAF specifications: + ##Header + ##struct StructuredStorageHeader { // [offset from start (bytes), length (bytes)] + ##BYTE _abSig[8]; // [00H,08] {0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, + ## // 0x1a, 0xe1} for current version + ##CLSID _clsid; // [08H,16] reserved must be zero (WriteClassStg/ + ## // GetClassFile uses root directory class id) + ##USHORT _uMinorVersion; // [18H,02] minor version of the format: 33 is + ## // written by reference implementation + ##USHORT _uDllVersion; // [1AH,02] major version of the dll/format: 3 for + ## // 512-byte sectors, 4 for 4 KB sectors + ##USHORT _uByteOrder; // [1CH,02] 0xFFFE: indicates Intel byte-ordering + ##USHORT _uSectorShift; // [1EH,02] size of sectors in power-of-two; + ## // typically 9 indicating 512-byte sectors + ##USHORT _uMiniSectorShift; // [20H,02] size of mini-sectors in power-of-two; + ## // typically 6 indicating 64-byte mini-sectors + ##USHORT _usReserved; // [22H,02] reserved, must be zero + ##ULONG _ulReserved1; // [24H,04] reserved, must be zero + ##FSINDEX _csectDir; // [28H,04] must be zero for 512-byte sectors, + ## // number of SECTs in directory chain for 4 KB + ## // sectors + ##FSINDEX _csectFat; // [2CH,04] number of SECTs in the FAT chain + ##SECT _sectDirStart; // [30H,04] first SECT in the directory chain + ##DFSIGNATURE _signature; // [34H,04] signature used for transactions; must + ## // be zero. The reference implementation + ## // does not support transactions + ##ULONG _ulMiniSectorCutoff; // [38H,04] maximum size for a mini stream; + ## // typically 4096 bytes + ##SECT _sectMiniFatStart; // [3CH,04] first SECT in the MiniFAT chain + ##FSINDEX _csectMiniFat; // [40H,04] number of SECTs in the MiniFAT chain + ##SECT _sectDifStart; // [44H,04] first SECT in the DIFAT chain + ##FSINDEX _csectDif; // [48H,04] number of SECTs in the DIFAT chain + ##SECT _sectFat[109]; // [4CH,436] the SECTs of first 109 FAT sectors + ##}; + + # [PL] header decoding: + # '<' indicates little-endian byte ordering for Intel (cf. struct module help) + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + debug("fmt_header size = %d, +FAT = %d" % (header_size, header_size + 109*4)) + header1 = header[:header_size] + ( + self.Sig, + self.clsid, + self.MinorVersion, + self.DllVersion, + self.ByteOrder, + self.SectorShift, + self.MiniSectorShift, + self.Reserved, self.Reserved1, + self.csectDir, + self.csectFat, + self.sectDirStart, + self.signature, + self.MiniSectorCutoff, + self.MiniFatStart, + self.csectMiniFat, + self.sectDifStart, + self.csectDif + ) = struct.unpack(fmt_header, header1) + debug(struct.unpack(fmt_header, header1)) + + if self.Sig != MAGIC: + # OLE signature should always be present + self.raise_defect(DEFECT_FATAL, "incorrect OLE signature") + if self.clsid != bytearray(16): + # according to AAF specs, CLSID should always be zero + self.raise_defect(DEFECT_INCORRECT, "incorrect CLSID in OLE header") + debug("MinorVersion = %d" % self.MinorVersion) + debug("DllVersion = %d" % self.DllVersion) + if self.DllVersion not in [3, 4]: + # version 3: usual format, 512 bytes per sector + # version 4: large format, 4K per sector + self.raise_defect(DEFECT_INCORRECT, "incorrect DllVersion in OLE header") + debug("ByteOrder = %X" % self.ByteOrder) + if self.ByteOrder != 0xFFFE: + # For now only common little-endian documents are handled correctly + self.raise_defect(DEFECT_FATAL, "incorrect ByteOrder in OLE header") + # TODO: add big-endian support for documents created on Mac ? + # But according to [MS-CFB] ? v20140502, ByteOrder MUST be 0xFFFE. + self.SectorSize = 2**self.SectorShift + debug("SectorSize = %d" % self.SectorSize) + if self.SectorSize not in [512, 4096]: + self.raise_defect(DEFECT_INCORRECT, "incorrect SectorSize in OLE header") + if (self.DllVersion == 3 and self.SectorSize != 512) \ + or (self.DllVersion == 4 and self.SectorSize != 4096): + self.raise_defect(DEFECT_INCORRECT, "SectorSize does not match DllVersion in OLE header") + self.MiniSectorSize = 2**self.MiniSectorShift + debug("MiniSectorSize = %d" % self.MiniSectorSize) + if self.MiniSectorSize not in [64]: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorSize in OLE header") + if self.Reserved != 0 or self.Reserved1 != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect OLE header (non-null reserved bytes)") + debug("csectDir = %d" % self.csectDir) + # Number of directory sectors (only allowed if DllVersion != 3) + if self.SectorSize == 512 and self.csectDir != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect csectDir in OLE header") + debug("csectFat = %d" % self.csectFat) + # csectFat = number of FAT sectors in the file + debug("sectDirStart = %X" % self.sectDirStart) + # sectDirStart = 1st sector containing the directory + debug("signature = %d" % self.signature) + # Signature should be zero, BUT some implementations do not follow this + # rule => only a potential defect: + # (according to MS-CFB, may be != 0 for applications supporting file + # transactions) + if self.signature != 0: + self.raise_defect(DEFECT_POTENTIAL, "incorrect OLE header (signature>0)") + debug("MiniSectorCutoff = %d" % self.MiniSectorCutoff) + # MS-CFB: This integer field MUST be set to 0x00001000. This field + # specifies the maximum size of a user-defined data stream allocated + # from the mini FAT and mini stream, and that cutoff is 4096 bytes. + # Any user-defined data stream larger than or equal to this cutoff size + # must be allocated as normal sectors from the FAT. + if self.MiniSectorCutoff != 0x1000: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorCutoff in OLE header") + debug("MiniFatStart = %X" % self.MiniFatStart) + debug("csectMiniFat = %d" % self.csectMiniFat) + debug("sectDifStart = %X" % self.sectDifStart) + debug("csectDif = %d" % self.csectDif) + + # calculate the number of sectors in the file + # (-1 because header doesn't count) + self.nb_sect = ((filesize + self.SectorSize-1) // self.SectorSize) - 1 + debug("Number of sectors in the file: %d" % self.nb_sect) + #TODO: change this test, because an OLE file MAY contain other data + # after the last sector. + + # file clsid + self.clsid = _clsid(header[8:24]) + + #TODO: remove redundant attributes, and fix the code which uses them? + self.sectorsize = self.SectorSize #1 << i16(header, 30) + self.minisectorsize = self.MiniSectorSize #1 << i16(header, 32) + self.minisectorcutoff = self.MiniSectorCutoff # i32(header, 56) + + # check known streams for duplicate references (these are always in FAT, + # never in MiniFAT): + self._check_duplicate_stream(self.sectDirStart) + # check MiniFAT only if it is not empty: + if self.csectMiniFat: + self._check_duplicate_stream(self.MiniFatStart) + # check DIFAT only if it is not empty: + if self.csectDif: + self._check_duplicate_stream(self.sectDifStart) + + # Load file allocation tables + self.loadfat(header) + # Load directory. This sets both the direntries list (ordered by sid) + # and the root (ordered by hierarchy) members. + self.loaddirectory(self.sectDirStart)#i32(header, 48)) + self.ministream = None + self.minifatsect = self.MiniFatStart #i32(header, 60) + + def close(self): + """ + close the OLE file, to release the file object + """ + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + """ + Checks if a stream has not been already referenced elsewhere. + This method should only be called once for each known stream, and only + if stream size is not null. + + :param first_sect: int, index of first sector of the stream in FAT + :param minifat: bool, if True, stream is located in the MiniFAT, else in the FAT + """ + if minifat: + debug('_check_duplicate_stream: sect=%d in MiniFAT' % first_sect) + used_streams = self._used_streams_minifat + else: + debug('_check_duplicate_stream: sect=%d in FAT' % first_sect) + # some values can be safely ignored (not a real stream): + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): + return + used_streams = self._used_streams_fat + #TODO: would it be more efficient using a dict or hash values, instead + # of a list of long ? + if first_sect in used_streams: + self.raise_defect(DEFECT_INCORRECT, 'Stream referenced twice') + else: + used_streams.append(first_sect) + + def dumpfat(self, fat, firstindex=0): + "Displays a part of FAT in human-readable form for debugging purpose" + # [PL] added only for debug + if not DEBUG_MODE: + return + # dictionary to convert special FAT values in human-readable strings + VPL = 8 # values per line (8+1 * 8+1 = 81) + fatnames = { + FREESECT: "..free..", + ENDOFCHAIN: "[ END. ]", + FATSECT: "FATSECT ", + DIFSECT: "DIFSECT " + } + nbsect = len(fat) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = fat[i] + aux = sect & 0xFFFFFFFF # JYTHON-WORKAROUND + if aux in fatnames: + name = fatnames[aux] + else: + if sect == i+1: + name = " --->" + else: + name = "%8X" % sect + print(name, end=" ") + print() + + def dumpsect(self, sector, firstindex=0): + "Displays a sector in a human-readable form, for debugging purpose." + if not DEBUG_MODE: + return + VPL = 8 # number of values per line (8+1 * 8+1 = 81) + tab = array.array(UINT32, sector) + if sys.byteorder == 'big': + tab.byteswap() + nbsect = len(tab) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = tab[i] + name = "%8X" % sect + print(name, end=" ") + print() + + def sect2array(self, sect): + """ + convert a sector to an array of 32 bits unsigned integers, + swapping bytes on big endian CPUs such as PowerPC (old Macs) + """ + a = array.array(UINT32, sect) + # if CPU is big endian, swap bytes: + if sys.byteorder == 'big': + a.byteswap() + return a + + def loadfat_sect(self, sect): + """ + Adds the indexes of the given sector to the FAT + + :param sect: string containing the first FAT sector, or array of long integers + :returns: index of last FAT sector. + """ + # a FAT sector is an array of ulong integers. + if isinstance(sect, array.array): + # if sect is already an array it is directly used + fat1 = sect + else: + # if it's a raw sector, it is parsed in an array + fat1 = self.sect2array(sect) + self.dumpsect(sect) + # The FAT is a sector chain starting at the first index of itself. + for isect in fat1: + isect = isect & 0xFFFFFFFF # JYTHON-WORKAROUND + debug("isect = %X" % isect) + if isect == ENDOFCHAIN or isect == FREESECT: + # the end of the sector chain has been reached + debug("found end of sector chain") + break + # read the FAT sector + s = self.getsect(isect) + # parse it as an array of 32 bits integers, and add it to the + # global FAT array + nextfat = self.sect2array(s) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + """ + Load the FAT table. + """ + # The 1st sector of the file contains sector numbers for the first 109 + # FAT sectors, right after the header which is 76 bytes long. + # (always 109, whatever the sector size: 512 bytes = 76+4*109) + # Additional sectors are described by DIF blocks + + sect = header[76:512] + debug("len(sect)=%d, so %d integers" % (len(sect), len(sect)//4)) + #fat = [] + # [PL] FAT is an array of 32 bits unsigned ints, it's more effective + # to use an array than a list in Python. + # It's initialized as empty first: + self.fat = array.array(UINT32) + self.loadfat_sect(sect) + #self.dumpfat(self.fat) +## for i in range(0, len(sect), 4): +## ix = i32(sect, i) +## # [PL] if ix == -2 or ix == -1: # ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## if ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## break +## s = self.getsect(ix) +## #fat = fat + [i32(s, i) for i in range(0, len(s), 4)] +## fat = fat + array.array(UINT32, s) + if self.csectDif != 0: + # [PL] There's a DIFAT because file is larger than 6.8MB + # some checks just in case: + if self.csectFat <= 109: + # there must be at least 109 blocks in header and the rest in + # DIFAT, so number of sectors must be >109. + self.raise_defect(DEFECT_INCORRECT, 'incorrect DIFAT, not enough sectors') + if self.sectDifStart >= self.nb_sect: + # initial DIFAT block index must be valid + self.raise_defect(DEFECT_FATAL, 'incorrect DIFAT, first index out of range') + debug("DIFAT analysis...") + # We compute the necessary number of DIFAT sectors : + # Number of pointers per DIFAT sector = (sectorsize/4)-1 + # (-1 because the last pointer is the next DIFAT sector number) + nb_difat_sectors = (self.sectorsize//4)-1 + # (if 512 bytes: each DIFAT sector = 127 pointers + 1 towards next DIFAT sector) + nb_difat = (self.csectFat-109 + nb_difat_sectors-1)//nb_difat_sectors + debug("nb_difat = %d" % nb_difat) + if self.csectDif != nb_difat: + raise IOError('incorrect DIFAT') + isect_difat = self.sectDifStart + for i in iterrange(nb_difat): + debug("DIFAT block %d, sector %X" % (i, isect_difat)) + #TODO: check if corresponding FAT SID = DIFSECT + sector_difat = self.getsect(isect_difat) + difat = self.sect2array(sector_difat) + self.dumpsect(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + # last DIFAT pointer is next DIFAT sector: + isect_difat = difat[nb_difat_sectors] + debug("next DIFAT sector: %X" % isect_difat) + # checks: + if isect_difat not in [ENDOFCHAIN, FREESECT]: + # last DIFAT pointer value must be ENDOFCHAIN or FREESECT + raise IOError('incorrect end of DIFAT') +## if len(self.fat) != self.csectFat: +## # FAT should contain csectFat blocks +## print("FAT length: %d instead of %d" % (len(self.fat), self.csectFat)) +## raise IOError('incorrect DIFAT') + # since FAT is read from fixed-size sectors, it may contain more values + # than the actual number of sectors in the file. + # Keep only the relevant sector indexes: + if len(self.fat) > self.nb_sect: + debug('len(fat)=%d, shrunk to nb_sect=%d' % (len(self.fat), self.nb_sect)) + self.fat = self.fat[:self.nb_sect] + debug('\nFAT:') + self.dumpfat(self.fat) + + def loadminifat(self): + """ + Load the MiniFAT table. + """ + # MiniFAT is stored in a standard sub-stream, pointed to by a header + # field. + # NOTE: there are two sizes to take into account for this stream: + # 1) Stream size is calculated according to the number of sectors + # declared in the OLE header. This allocated stream may be more than + # needed to store the actual sector indexes. + # (self.csectMiniFat is the number of sectors of size self.SectorSize) + stream_size = self.csectMiniFat * self.SectorSize + # 2) Actually used size is calculated by dividing the MiniStream size + # (given by root entry size) by the size of mini sectors, *4 for + # 32 bits indexes: + nb_minisectors = (self.root.size + self.MiniSectorSize-1) // self.MiniSectorSize + used_size = nb_minisectors * 4 + debug('loadminifat(): minifatsect=%d, nb FAT sectors=%d, used_size=%d, stream_size=%d, nb MiniSectors=%d' % + (self.minifatsect, self.csectMiniFat, used_size, stream_size, nb_minisectors)) + if used_size > stream_size: + # This is not really a problem, but may indicate a wrong implementation: + self.raise_defect(DEFECT_INCORRECT, 'OLE MiniStream is larger than MiniFAT') + # In any case, first read stream_size: + s = self._open(self.minifatsect, stream_size, force_FAT=True).read() + # [PL] Old code replaced by an array: + # self.minifat = [i32(s, i) for i in range(0, len(s), 4)] + self.minifat = self.sect2array(s) + # Then shrink the array to used size, to avoid indexes out of MiniStream: + debug('MiniFAT shrunk from %d to %d sectors' % (len(self.minifat), nb_minisectors)) + self.minifat = self.minifat[:nb_minisectors] + debug('loadminifat(): len=%d' % len(self.minifat)) + debug('\nMiniFAT:') + self.dumpfat(self.minifat) + + def getsect(self, sect): + """ + Read given sector from file on disk. + + :param sect: int, sector index + :returns: a string containing the sector data. + """ + # From [MS-CFB]: A sector number can be converted into a byte offset + # into the file by using the following formula: + # (sector number + 1) x Sector Size. + # This implies that sector #0 of the file begins at byte offset Sector + # Size, not at 0. + + # [PL] the original code in PIL was wrong when sectors are 4KB instead of + # 512 bytes: + # self.fp.seek(512 + self.sectorsize * sect) + # [PL]: added safety checks: + # print("getsect(%X)" % sect) + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('getsect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + sector = self.fp.read(self.sectorsize) + if len(sector) != self.sectorsize: + debug('getsect(): sect=%X, read=%d, sectorsize=%d' % + (sect, len(sector), self.sectorsize)) + self.raise_defect(DEFECT_FATAL, 'incomplete OLE sector') + return sector + + def write_sect(self, sect, data, padding=b'\x00'): + """ + Write given sector to file on disk. + + :param sect: int, sector index + :param data: bytes, sector data + :param padding: single byte, padding character if data < sector size + """ + if not isinstance(data, bytes): + raise TypeError("write_sect: data must be a bytes string") + if not isinstance(padding, bytes) or len(padding) != 1: + raise TypeError("write_sect: padding must be a bytes string of 1 char") + #TODO: we could allow padding=None for no padding at all + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('write_sect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + if len(data) < self.sectorsize: + # add padding + data += padding * (self.sectorsize - len(data)) + elif len(data) < self.sectorsize: + raise ValueError("Data is larger than sector size") + self.fp.write(data) + + def loaddirectory(self, sect): + """ + Load the directory. + + :param sect: sector index of directory stream. + """ + # The directory is stored in a standard + # substream, independent of its size. + + # open directory stream as a read-only file: + # (stream size is not known in advance) + self.directory_fp = self._open(sect) + + # [PL] to detect malformed documents and avoid DoS attacks, the maximum + # number of directory entries can be calculated: + max_entries = self.directory_fp.size // 128 + debug('loaddirectory: size=%d, max_entries=%d' % + (self.directory_fp.size, max_entries)) + + # Create list of directory entries + # self.direntries = [] + # We start with a list of "None" object + self.direntries = [None] * max_entries +## for sid in iterrange(max_entries): +## entry = fp.read(128) +## if not entry: +## break +## self.direntries.append(_OleDirectoryEntry(entry, sid, self)) + # load root entry: + root_entry = self._load_direntry(0) + # Root entry is the first entry: + self.root = self.direntries[0] + # read and build all storage trees, starting from the root: + self.root.build_storage_tree() + + def _load_direntry(self, sid): + """ + Load a directory entry from the directory. + This method should only be called once for each storage/stream when + loading the directory. + + :param sid: index of storage/stream in the directory. + :returns: a _OleDirectoryEntry object + + :exception IOError: if the entry has always been referenced. + """ + # check if SID is OK: + if sid < 0 or sid >= len(self.direntries): + self.raise_defect(DEFECT_FATAL, "OLE directory index out of range") + # check if entry was already referenced: + if self.direntries[sid] is not None: + self.raise_defect(DEFECT_INCORRECT, + "double reference for OLE stream/storage") + # if exception not raised, return the object + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = _OleDirectoryEntry(entry, sid, self) + return self.direntries[sid] + + def dumpdirectory(self): + """ + Dump directory (for debugging only) + """ + self.root.dump() + + def _open(self, start, size = 0x7FFFFFFF, force_FAT=False): + """ + Open a stream, either in FAT or MiniFAT according to its size. + (openstream helper) + + :param start: index of first sector + :param size: size of stream (or nothing if size is unknown) + :param force_FAT: if False (default), stream will be opened in FAT or MiniFAT + according to size. If True, it will always be opened in FAT. + """ + debug('OleFileIO.open(): sect=%d, size=%d, force_FAT=%s' % + (start, size, str(force_FAT))) + # stream size is compared to the MiniSectorCutoff threshold: + if size < self.minisectorcutoff and not force_FAT: + # ministream object + if not self.ministream: + # load MiniFAT if it wasn't already done: + self.loadminifat() + # The first sector index of the miniFAT stream is stored in the + # root directory entry: + size_ministream = self.root.size + debug('Opening MiniStream: sect=%d, size=%d' % + (self.root.isectStart, size_ministream)) + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return _OleStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + # standard stream + return _OleStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, + sectorsize=self.sectorsize, fat=self.fat, + filesize=self._filesize) + + def _list(self, files, prefix, node, streams=True, storages=False): + """ + listdir helper + + :param files: list of files to fill in + :param prefix: current location in storage tree (list of names) + :param node: current node (_OleDirectoryEntry object) + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + """ + prefix = prefix + [node.name] + for entry in node.kids: + if entry.entry_type == STGTY_STORAGE: + # this is a storage + if storages: + # add it to the list + files.append(prefix[1:] + [entry.name]) + # check its kids + self._list(files, prefix, entry, streams, storages) + elif entry.entry_type == STGTY_STREAM: + # this is a stream + if streams: + # add it to the list + files.append(prefix[1:] + [entry.name]) + else: + self.raise_defect(DEFECT_INCORRECT, 'The directory tree contains an entry which is not a stream nor a storage.') + + def listdir(self, streams=True, storages=False): + """ + Return a list of streams and/or storages stored in this file + + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + :returns: list of stream and/or storage paths + """ + files = [] + self._list(files, [], self.root, streams, storages) + return files + + def _find(self, filename): + """ + Returns directory entry of given filename. (openstream helper) + Note: this method is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: sid of requested filename + :exception IOError: if file not found + """ + + # if filename is a string instead of a list, split it on slashes to + # convert to a list: + if isinstance(filename, basestring): + filename = filename.split('/') + # walk across storage tree, following given path: + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + else: + raise IOError("file not found") + node = kid + return node.sid + + def openstream(self, filename): + """ + Open a stream as a read-only file object (BytesIO). + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: file object (read-only) + :exception IOError: if filename not found, or if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this file is not a stream") + return self._open(entry.isectStart, entry.size) + + def write_stream(self, stream_name, data): + """ + Write a stream to disk. For now, it is only possible to replace an + existing stream by data of the same size. + + :param stream_name: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :param data: bytes, data to be written, must be the same size as the original + stream. + """ + if not isinstance(data, bytes): + raise TypeError("write_stream: data must be a bytes string") + sid = self._find(stream_name) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this is not a stream") + size = entry.size + if size != len(data): + raise ValueError("write_stream: data must be the same size as the existing stream") + if size < self.minisectorcutoff: + raise NotImplementedError("Writing a stream in MiniFAT is not implemented yet") + sect = entry.isectStart + # number of sectors to write + nb_sectors = (size + (self.sectorsize-1)) // self.sectorsize + debug('nb_sectors = %d' % nb_sectors) + for i in range(nb_sectors): + # try: + # self.fp.seek(offset + self.sectorsize * sect) + # except: + # debug('sect=%d, seek=%d' % + # (sect, offset+self.sectorsize*sect)) + # raise IOError('OLE sector index out of range') + # extract one sector from data, the last one being smaller: + if i < (nb_sectors-1): + data_sector = data[i*self.sectorsize:(i+1)*self.sectorsize] + #TODO: comment this if it works + assert(len(data_sector) == self.sectorsize) + else: + data_sector = data[i*self.sectorsize:] + # TODO: comment this if it works + debug('write_stream: size=%d sectorsize=%d data_sector=%d size%%sectorsize=%d' + % (size, self.sectorsize, len(data_sector), size % self.sectorsize)) + assert(len(data_sector) % self.sectorsize == size % self.sectorsize) + self.write_sect(sect, data_sector) +# self.fp.write(data_sector) + # jump to next sector in the FAT: + try: + sect = self.fat[sect] + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + + def get_type(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container, and return its type. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: False if object does not exist, its entry type (>0) otherwise: + + - STGTY_STREAM: a stream + - STGTY_STORAGE: a storage + - STGTY_ROOT: the root entry + """ + try: + sid = self._find(filename) + entry = self.direntries[sid] + return entry.entry_type + except: + return False + + def getmtime(self, filename): + """ + Return modification time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getmtime() + + def getctime(self, filename): + """ + Return creation time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if creation time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getctime() + + def exists(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container. + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: True if object exist, else False. + """ + try: + sid = self._find(filename) + return True + except: + return False + + def get_size(self, filename): + """ + Return size of a stream in the OLE container, in bytes. + + :param filename: path of stream in storage tree (see openstream for syntax) + :returns: size in bytes (long integer) + :exception IOError: if file not found + :exception TypeError: if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + #TODO: Should it return zero instead of raising an exception ? + raise TypeError('object is not an OLE stream') + return entry.size + + def get_rootentry_name(self): + """ + Return root entry name. Should usually be 'Root Entry' or 'R' in most + implementations. + """ + return self.root.name + + def getproperties(self, filename, convert_time=False, no_conversion=None): + """ + Return properties described in substream. + + :param filename: path of stream in storage tree (see openstream for syntax) + :param convert_time: bool, if True timestamps will be converted to Python datetime + :param no_conversion: None or list of int, timestamps not to be converted + (for example total editing time is not a real timestamp) + + :returns: a dictionary of values indexed by id (integer) + """ + # REFERENCE: [MS-OLEPS] https://msdn.microsoft.com/en-us/library/dd942421.aspx + # make sure no_conversion is a list, just to simplify code below: + if no_conversion is None: + no_conversion = [] + # stream path as a string to report exceptions: + streampath = filename + if not isinstance(streampath, str): + streampath = '/'.join(streampath) + + fp = self.openstream(filename) + + data = {} + + try: + # header + s = fp.read(28) + clsid = _clsid(s[8:24]) + + # format id + s = fp.read(20) + fmtid = _clsid(s[:16]) + fp.seek(i32(s, 16)) + + # get section + s = b"****" + fp.read(i32(fp.read(4))-4) + # number of properties: + num_props = i32(s, 4) + except BaseException as exc: + # catch exception while parsing property header, and only raise + # a DEFECT_INCORRECT then return an empty dict, because this is not + # a fatal error when parsing the whole file + msg = 'Error while parsing properties header in stream %s: %s' % ( + repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + return data + + for i in range(num_props): + try: + id = 0 # just in case of an exception + id = i32(s, 8+i*8) + offset = i32(s, 12+i*8) + type = i32(s, offset) + + debug('property id=%d: type=%d offset=%X' % (id, type, offset)) + + # test for common types first (should perhaps use + # a dictionary instead?) + + if type == VT_I2: # 16-bit signed integer + value = i16(s, offset+4) + if value >= 32768: + value = value - 65536 + elif type == VT_UI2: # 2-byte unsigned integer + value = i16(s, offset+4) + elif type in (VT_I4, VT_INT, VT_ERROR): + # VT_I4: 32-bit signed integer + # VT_ERROR: HRESULT, similar to 32-bit signed integer, + # see http://msdn.microsoft.com/en-us/library/cc230330.aspx + value = i32(s, offset+4) + elif type in (VT_UI4, VT_UINT): # 4-byte unsigned integer + value = i32(s, offset+4) # FIXME + elif type in (VT_BSTR, VT_LPSTR): + # CodePageString, see http://msdn.microsoft.com/en-us/library/dd942354.aspx + # size is a 32 bits integer, including the null terminator, and + # possibly trailing or embedded null chars + #TODO: if codepage is unicode, the string should be converted as such + count = i32(s, offset+4) + value = s[offset+8:offset+8+count-1] + # remove all null chars: + value = value.replace(b'\x00', b'') + elif type == VT_BLOB: + # binary large object (BLOB) + # see http://msdn.microsoft.com/en-us/library/dd942282.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_LPWSTR: + # UnicodeString + # see http://msdn.microsoft.com/en-us/library/dd942313.aspx + # "the string should NOT contain embedded or additional trailing + # null characters." + count = i32(s, offset+4) + value = self._decode_utf16_str(s[offset+8:offset+8+count*2]) + elif type == VT_FILETIME: + value = long(i32(s, offset+4)) + (long(i32(s, offset+8)) << 32) + # FILETIME is a 64-bit int: "number of 100ns periods + # since Jan 1,1601". + if convert_time and id not in no_conversion: + debug('Converting property #%d to python datetime, value=%d=%fs' + % (id, value, float(value) / 10000000)) + # convert FILETIME to Python datetime.datetime + # inspired from http://code.activestate.com/recipes/511425-filetime-to-datetime/ + _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) + debug('timedelta days=%d' % (value//(10*1000000*3600*24))) + value = _FILETIME_null_date + datetime.timedelta(microseconds=value//10) + else: + # legacy code kept for backward compatibility: returns a + # number of seconds since Jan 1,1601 + value = value // 10000000 # seconds + elif type == VT_UI1: # 1-byte unsigned integer + value = i8(s[offset+4]) + elif type == VT_CLSID: + value = _clsid(s[offset+4:offset+20]) + elif type == VT_CF: + # PropertyIdentifier or ClipboardData?? + # see http://msdn.microsoft.com/en-us/library/dd941945.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_BOOL: + # VARIANT_BOOL, 16 bits bool, 0x0000=Fals, 0xFFFF=True + # see http://msdn.microsoft.com/en-us/library/cc237864.aspx + value = bool(i16(s, offset+4)) + else: + value = None # everything else yields "None" + debug('property id=%d: type=%d not implemented in parser yet' % (id, type)) + + # missing: VT_EMPTY, VT_NULL, VT_R4, VT_R8, VT_CY, VT_DATE, + # VT_DECIMAL, VT_I1, VT_I8, VT_UI8, + # see http://msdn.microsoft.com/en-us/library/dd942033.aspx + + # FIXME: add support for VT_VECTOR + # VT_VECTOR is a 32 uint giving the number of items, followed by + # the items in sequence. The VT_VECTOR value is combined with the + # type of items, e.g. VT_VECTOR|VT_BSTR + # see http://msdn.microsoft.com/en-us/library/dd942011.aspx + + # print("%08x" % id, repr(value), end=" ") + # print("(%s)" % VT[i32(s, offset) & 0xFFF]) + + data[id] = value + except BaseException as exc: + # catch exception while parsing each property, and only raise + # a DEFECT_INCORRECT, because parsing can go on + msg = 'Error while parsing property id %d in stream %s: %s' % ( + id, repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + + return data + + def get_metadata(self): + """ + Parse standard properties streams, return an OleMetadata object + containing all the available metadata. + (also stored in the metadata attribute of the OleFileIO object) + + new in version 0.25 + """ + self.metadata = OleMetadata() + self.metadata.parse_properties(self) + return self.metadata + +# +# -------------------------------------------------------------------- +# This script can be used to dump the directory of any OLE2 structured +# storage file. + +if __name__ == "__main__": + + # [PL] display quick usage info if launched from command-line + if len(sys.argv) <= 1: + print('olefile version %s %s - %s' % (__version__, __date__, __author__)) + print( +""" +Launched from the command line, this script parses OLE files and prints info. + +Usage: olefile.py [-d] [-c] [file2 ...] + +Options: +-d : debug mode (displays a lot of debug information, for developers only) +-c : check all streams (for debugging purposes) + +For more information, see http://www.decalage.info/olefile +""") + sys.exit() + + check_streams = False + for filename in sys.argv[1:]: + # try: + # OPTIONS: + if filename == '-d': + # option to switch debug mode on: + set_debug_mode(True) + continue + if filename == '-c': + # option to switch check streams mode on: + check_streams = True + continue + + ole = OleFileIO(filename)#, raise_defects=DEFECT_INCORRECT) + print("-" * 68) + print(filename) + print("-" * 68) + ole.dumpdirectory() + for streamname in ole.listdir(): + if streamname[-1][0] == "\005": + print(streamname, ": properties") + props = ole.getproperties(streamname, convert_time=True) + props = sorted(props.items()) + for k, v in props: + # [PL]: avoid to display too large or binary values: + if isinstance(v, (basestring, bytes)): + if len(v) > 50: + v = v[:50] + if isinstance(v, bytes): + # quick and dirty binary check: + for c in (1, 2, 3, 4, 5, 6, 7, 11, 12, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31): + if c in bytearray(v): + v = '(binary data)' + break + print(" ", k, v) + + if check_streams: + # Read all streams to check if there are errors: + print('\nChecking streams...') + for streamname in ole.listdir(): + # print name using repr() to convert binary chars to \xNN: + print('-', repr('/'.join(streamname)), '-', end=' ') + st_type = ole.get_type(streamname) + if st_type == STGTY_STREAM: + print('size %d' % ole.get_size(streamname)) + # just try to read stream in memory: + ole.openstream(streamname) + else: + print('NOT a stream : type=%d' % st_type) + print() + +# for streamname in ole.listdir(): +# # print name using repr() to convert binary chars to \xNN: +# print('-', repr('/'.join(streamname)),'-', end=' ') +# print(ole.getmtime(streamname)) +# print() + + print('Modification/Creation times of all directory entries:') + for entry in ole.direntries: + if entry is not None: + print('- %s: mtime=%s ctime=%s' % (entry.name, + entry.getmtime(), entry.getctime())) + print() + + # parse and display metadata: + meta = ole.get_metadata() + meta.dump() + print() + # [PL] Test a few new methods: + root = ole.get_rootentry_name() + print('Root entry name: "%s"' % root) + if ole.exists('worddocument'): + print("This is a Word document.") + print("type of stream 'WordDocument':", ole.get_type('worddocument')) + print("size :", ole.get_size('worddocument')) + if ole.exists('macros/vba'): + print("This document may contain VBA macros.") + + # print parsing issues: + print('\nNon-fatal issues raised during parsing:') + if ole.parsing_issues: + for exctype, msg in ole.parsing_issues: + print('- %s: %s' % (exctype.__name__, msg)) + else: + print('None') +## except IOError as v: +## print("***", "cannot read", file, "-", v) + +# this code was developed while listening to The Wedding Present "Sea Monsters" diff --git a/server/www/packages/packages-windows/x86/PIL/PSDraw.py b/server/www/packages/packages-windows/x86/PIL/PSDraw.py new file mode 100644 index 0000000..d4e7b18 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PSDraw.py @@ -0,0 +1,235 @@ +# +# The Python Imaging Library +# $Id$ +# +# simple postscript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import EpsImagePlugin +import sys + +## +# Simple Postscript graphics interface. + + +class PSDraw(object): + """ + Sets up printing to the given file. If **file** is omitted, + :py:attr:`sys.stdout` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + fp = sys.stdout + self.fp = fp + + def _fp_write(self, to_write): + if bytes is str or self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, 'UTF-8')) + + def begin_document(self, id=None): + """Set up printing of a document. (Write Postscript DSC header.)""" + # FIXME: incomplete + self._fp_write("%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n") + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write Postscript DSC footer.)""" + self._fp_write("%%EndDocument\n" + "restore showpage\n" + "%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A Postscript font name + :param size: Size in points. + """ + if font not in self.isofont: + # reencode font + self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % + (font, font)) + self.isofont[font] = 1 + # rough + self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + Postscript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + xy = xy0 + xy1 + self._fp_write("%d %d %d %d Vl\n" % xy) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A 4-tuple of integers whose order and function is currently + undocumented. + + Hint: the tuple is passed into this format string: + + .. code-block:: python + + %d %d M %d %d 0 Vr\n + """ + self._fp_write("%d %d M %d %d 0 Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = "\\(".join(text.split("(")) + text = "\\)".join(text.split(")")) + xy = xy + (text,) + self._fp_write("%d %d M (%s) S\n" % xy) + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = float(im.size[0] * 72) / dpi + y = float(im.size[1] * 72) / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self._fp_write("%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, None, 0) + self._fp_write("\ngrestore\n") + +# -------------------------------------------------------------------- +# Postscript driver + +# +# EDROFF.PS -- Postscript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +EDROFF_PS = """\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- Postscript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = """\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup neg 0 exch rlineto + exch neg 0 rlineto + 0 exch rlineto + 100 div setgray fill 0 setgray } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = """\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/server/www/packages/packages-windows/x86/PIL/PaletteFile.py b/server/www/packages/packages-windows/x86/PIL/PaletteFile.py new file mode 100644 index 0000000..ef50fee --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PaletteFile.py @@ -0,0 +1,55 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL._binary import o8 + + +## +# File handler for Teragon-style palette files. + +class PaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[0:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..4f415ff --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PalmImagePlugin.py @@ -0,0 +1,241 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from PIL import Image, ImageFile, _binary + +__version__ = "1.0" + +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues),)) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for i in range(len(_Palm8BitColormapValues)): + palettedata = palettedata + _Palm8BitColormapValues[i] + for i in range(256 - len(_Palm8BitColormapValues)): + palettedata = palettedata + (0, 0, 0) + image.putpalette(palettedata) + return image + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = { + "custom-colormap": 0x4000, + "is-compressed": 0x8000, + "has-transparent": 0x2000, + } + +_COMPRESSION_TYPES = { + "none": 0xFF, + "rle": 0x01, + "scanline": 0x00, + } + +o8 = _binary.o8 +o16b = _binary.o16be + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + +def _save(im, fp, filename, check=0): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif (im.mode == "L" and + "bpp" in im.encoderinfo and + im.encoderinfo["bpp"] in (1, 2, 4)): + + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): + + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise IOError("cannot write mode %s as Palm" % im.mode) + + if check: + return check + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == 'RGB': + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2])) + elif colormapmode == 'RGBA': + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2])) + + # now convert data to raw form + ImageFile._save( + im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..b53635a --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PcdImagePlugin.py @@ -0,0 +1,59 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +i8 = _binary.i8 + + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = i8(s[1538]) & 3 + if orientation == 1: + self.tile_post_rotate = 90 # hack + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self.size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py b/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py new file mode 100644 index 0000000..c200690 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PcfFontFile.py @@ -0,0 +1,252 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile +from PIL import _binary + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = (1 << 0) +PCF_ACCELERATORS = (1 << 1) +PCF_METRICS = (1 << 2) +PCF_BITMAPS = (1 << 3) +PCF_INK_METRICS = (1 << 4) +PCF_BDF_ENCODINGS = (1 << 5) +PCF_SWIDTHS = (1 << 6) +PCF_GLYPH_NAMES = (1 << 7) +PCF_BDF_ACCELERATORS = (1 << 8) + +BYTES_PER_ROW = [ + lambda bits: ((bits+7) >> 3), + lambda bits: ((bits+15) >> 3) & ~1, + lambda bits: ((bits+31) >> 3) & ~3, + lambda bits: ((bits+63) >> 3) & ~7, +] + +i8 = _binary.i8 +l16 = _binary.i16le +l32 = _binary.i32le +b16 = _binary.i16be +b32 = _binary.i32be + + +def sz(s, o): + return s[o:s.index(b"\0", o)] + + +## +# Font file plugin for the X11 PCF format. + +class PcfFontFile(FontFile.FontFile): + + name = "name" + + def __init__(self, fp): + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + FontFile.FontFile.__init__(self) + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch in range(256): + ix = encoding[ch] + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), 1) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xff00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, 0) + ) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, attributes) + ) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise IOError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmapSizes = [] + for i in range(4): + bitmapSizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmapSizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i+1] + bitmaps.append( + Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)) + ) + + return bitmaps + + def _load_encoding(self): + + # map character code to bitmap index + encoding = [None] * 256 + + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) + firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) + + default = i16(fp.read(2)) + + nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) + + for i in range(nencoding): + encodingOffset = i16(fp.read(2)) + if encodingOffset != 0xFFFF: + try: + encoding[i+firstCol] = encodingOffset + except IndexError: + break # only load ISO-8859-1 glyphs + + return encoding diff --git a/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..9440d53 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PcxImagePlugin.py @@ -0,0 +1,187 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +from PIL import Image, ImageFile, ImagePalette, _binary + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 + +__version__ = "0.6" + + +def _accept(prefix): + return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = i8(s[1]) + bits = i8(s[3]) + planes = i8(s[65]) + stride = i16(s, 66) + logger.debug("PCX version %s, bits %s, planes %s, stride %s", + version, bits, planes, stride) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, 2) + s = self.fp.read(769) + if len(s) == 769 and i8(s[0]) == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i*3+1:i*3+4] != o8(i)*3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise IOError("unknown PCX mode") + + self.mode = mode + self.size = bbox[2]-bbox[0], bbox[3]-bbox[1] + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + +# -------------------------------------------------------------------- +# save PCX files + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + +o16 = _binary.o16le + + +def _save(im, fp, filename, check=0): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as PCX" % im.mode) + + if check: + return check + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], bits, stride) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) + + o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) + + o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) + + o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) + + b"\0"*54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, + (rawmode, bits*planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i)*3) + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") diff --git a/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..7decf0e --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PdfImagePlugin.py @@ -0,0 +1,258 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +from PIL import Image, ImageFile +from PIL._binary import i8 +import io + +__version__ = "0.4" + + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + +def _obj(fp, obj, **dict): + fp.write("%d 0 obj\n" % obj) + if dict: + fp.write("<<\n") + for k, v in dict.items(): + if v is not None: + fp.write("/%s %s\n" % (k, v)) + fp.write(">>\n") + + +def _endobj(fp): + fp.write("endobj\n") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + +def _save(im, fp, filename, save_all=False): + resolution = im.encoderinfo.get("resolution", 72.0) + + # + # make sure image data is available + im.load() + + xref = [0] + + class TextWriter(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def write(self, value): + self.fp.write(value.encode('latin-1')) + + fp = TextWriter(fp) + + fp.write("%PDF-1.2\n") + fp.write("% created by PIL PDF driver " + __version__ + "\n") + + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) + # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports + # Flatedecode (zip compression). + + bits = 8 + params = None + + if im.mode == "1": + filter = "/ASCIIHexDecode" + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "/DCTDecode" + # params = "<< /Predictor 15 /Columns %d >>" % (width-2) + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + elif im.mode == "P": + filter = "/ASCIIHexDecode" + colorspace = "[ /Indexed /DeviceRGB 255 <" + palette = im.im.getpalette("RGB") + for i in range(256): + r = i8(palette[i*3]) + g = i8(palette[i*3+1]) + b = i8(palette[i*3+2]) + colorspace += "%02x%02x%02x " % (r, g, b) + colorspace += "> ]" + procset = "/ImageI" # indexed color + elif im.mode == "RGB": + filter = "/DCTDecode" + colorspace = "/DeviceRGB" + procset = "/ImageC" # color images + elif im.mode == "CMYK": + filter = "/DCTDecode" + colorspace = "/DeviceCMYK" + procset = "/ImageC" # color images + else: + raise ValueError("cannot save mode %s" % im.mode) + + # + # catalogue + + xref.append(fp.tell()) + _obj( + fp, 1, + Type="/Catalog", + Pages="2 0 R") + _endobj(fp) + + # + # pages + numberOfPages = 1 + if save_all: + try: + numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. It is a single frame image + pass + pages = [str(pageNumber*3+4)+" 0 R" + for pageNumber in range(0, numberOfPages)] + + xref.append(fp.tell()) + _obj( + fp, 2, + Type="/Pages", + Count=len(pages), + Kids="["+"\n".join(pages)+"]") + _endobj(fp) + + for pageNumber in range(0, numberOfPages): + im.seek(pageNumber) + + # + # image + + op = io.BytesIO() + + if filter == "/ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", (len(data), 1), None) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "/FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) + else: + raise ValueError("unsupported PDF filter (%s)" % filter) + + # + # Get image characteristics + + width, height = im.size + + xref.append(fp.tell()) + _obj( + fp, pageNumber*3+3, + Type="/XObject", + Subtype="/Image", + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Length=len(op.getvalue()), + Filter=filter, + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace) + + fp.write("stream\n") + fp.fp.write(op.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # page + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+4) + fp.write( + "<<\n/Type /Page\n/Parent 2 0 R\n" + "/Resources <<\n/ProcSet [ /PDF %s ]\n" + "/XObject << /image %d 0 R >>\n>>\n" + "/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % ( + procset, + pageNumber*3+3, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + pageNumber*3+5)) + _endobj(fp) + + # + # page contents + + op = TextWriter(io.BytesIO()) + + op.write( + "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution))) + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue())) + + fp.write("stream\n") + fp.fp.write(op.fp.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # trailer + startxref = fp.tell() + fp.write("xref\n0 %d\n0000000000 65535 f \n" % len(xref)) + for x in xref[1:]: + fp.write("%010d 00000 n \n" % x) + fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref)) + fp.write("startxref\n%d\n%%%%EOF\n" % startxref) + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..fd002d9 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PixarImagePlugin.py @@ -0,0 +1,71 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +# +# helpers + +i16 = _binary.i16le + + +def _accept(prefix): + return prefix[:4] == b"\200\350\000\000" + + +## +# Image plugin for PIXAR raster images. + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label + s = self.fp.read(4) + if s != b"\200\350\000\000": + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self.size = i16(s[418:420]), i16(s[416:418]) + + # get channel/depth descriptions + mode = i16(s[424:426]), i16(s[426:428]) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile, _accept) + +Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py new file mode 100644 index 0000000..18deec5 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PngImagePlugin.py @@ -0,0 +1,840 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +import re +import zlib +import struct + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.9" + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + +is_cid = re.compile(b"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b'^\xff*\x00\xff*$') + +# Maximum decompressed size for a iTXt or zTXt chunk. +# Eliminates decompression bombs where compressed chunks can expand 1000x +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +# Set the maximum total text chunk size. +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + +class ChunkStream(object): + + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + if not hasattr(Image.core, "crc32"): + self.crc = self.crc_skip + + def read(self): + "Fetch a new chunk. Returns header information." + cid = None + + if self.queue: + cid, pos, length = self.queue.pop() + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + raise SyntaxError("broken PNG file (chunk %s)" % repr(cid)) + + return cid, pos, length + + def close(self): + self.queue = self.crc = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + "Call the appropriate chunk handler" + + logger.debug("STREAM %s %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) + + def crc(self, cid, data): + "Read and verify checksum" + + # Skip CRC checks for ancillary chunks if allowed to load truncated images + # 5th byte of first char is 1 [specs, section 5.4] + if ImageFile.LOAD_TRUNCATED_IMAGES and (i8(cid[0]) >> 5 & 1): + self.crc_skip(cid, data) + return + + try: + crc1 = Image.core.crc32(data, Image.core.crc32(cid)) + crc2 = i16(self.fp.read(2)), i16(self.fp.read(2)) + if crc1 != crc2: + raise SyntaxError("broken PNG file (bad header checksum in %s)" + % cid) + except struct.error: + raise SyntaxError("broken PNG file (incomplete checksum in %s)" + % cid) + + def crc_skip(self, cid, data): + "Read checksum. Used if the C module is not present" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + try: + cid, pos, length = self.read() + except struct.error: + raise IOError("truncated PNG file") + + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + @staticmethod + def __new__(cls, text, lang, tkey): + """ + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo(object): + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + + """ + + self.chunks.append((cid, data)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + + zlib.compress(value)) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + + value) + + def add_text(self, key, value, zip=0): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, bool(zip)) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode('latin-1', 'strict') + except UnicodeError: + return self.add_itxt(key, value, zip=bool(zip)) + + if not isinstance(key, bytes): + key = key.encode('latin-1', 'strict') + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + +class PngStream(ChunkStream): + + def __init__(self, fp): + + ChunkStream.__init__(self, fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" % + self.text_memory) + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %s", s[:i]) + logger.debug("Compression method %s", i8(s[i])) + comp_method = i8(s[i]) + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in iCCP chunk" % + comp_method) + try: + icc_profile = _safe_zlib_decompress(s[i+2:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + icc_profile = None + else: + raise + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + self.im_size = i32(s), i32(s[4:]) + try: + self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))] + except: + pass + if i8(s[12]): + self.im_info["interlace"] = 1 + if i8(s[11]): + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + # tRNS contains only one full-transparent entry, + # other entries are full opaque + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + else: + # otherwise, we have a byte string with one alpha value + # for each palette entry + self.im_info["transparency"] = s + elif self.im_mode == "L": + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:]) + return s + + def chunk_gAMA(self, pos, length): + + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + px, py = i32(s), i32(s[4:]) + unit = i8(s[8]) + if unit == 1: # meter + dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = i8(v[0]) + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in zTXt chunk" % + comp_method) + try: + v = _safe_zlib_decompress(v[1:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + v = b"" + else: + raise + except zlib.error: + v = b"" + + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = i8(r[0]), i8(r[1]), r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + else: + raise + except zlib.error: + return s + else: + return s + if bytes is not str: + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + +# -------------------------------------------------------------------- +# PNG reader + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if self.fp.read(8) != _MAGIC: + raise SyntaxError("not a PNG file") + + # + # Parse headers up to the first IDAT chunk + + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%s %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self.size = self.png.im_size + self.info = self.png.im_info + self.text = self.png.im_text # experimental + self.tile = self.png.im_tile + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + self.__idat = length # used by load_read() + + def verify(self): + "Verify PNG file" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + self.fp = None + + def load_prepare(self): + "internal: prepare to read PNG file" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + "internal: read more image data" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT"]: + self.png.push(cid, pos, length) + return b"" + + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + "internal: finished reading image data" + + self.png.close() + self.png = None + + +# -------------------------------------------------------------------- +# PNG writer + +o8 = _binary.o8 +o16 = _binary.o16be +o32 = _binary.o32be + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b'\x01\x00'), + "L;1": ("L;1", b'\x01\x00'), + "L;2": ("L;2", b'\x02\x00'), + "L;4": ("L;4", b'\x04\x00'), + "L": ("L", b'\x08\x00'), + "LA": ("LA", b'\x08\x04'), + "I": ("I;16B", b'\x10\x00'), + "P;1": ("P;1", b'\x01\x03'), + "P;2": ("P;2", b'\x02\x03'), + "P;4": ("P;4", b'\x04\x03'), + "P": ("P", b'\x08\x03'), + "RGB": ("RGB", b'\x08\x02'), + "RGBA": ("RGBA", b'\x08\x06'), +} + + +def putchunk(fp, cid, *data): + "Write a PNG chunk (including CRC field)" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + fp.write(o16(hi) + o16(lo)) + + +class _idat(object): + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +def _save(im, fp, filename, chunk=putchunk, check=0): + # save an image to disk (called by the save method) + + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = 1 << im.encoderinfo["bits"] + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1])//3, 256), 2) + else: + colors = 256 + + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + elif colors <= 16: + bits = 4 + else: + bits = 8 + if bits != 8: + mode = "%s;%d" % (mode, bits) + + # encoder options + if "dictionary" in im.encoderinfo: + dictionary = im.encoderinfo["dictionary"] + else: + dictionary = b"" + + im.encoderconfig = ("optimize" in im.encoderinfo, + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + dictionary) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError: + raise IOError("cannot write mode %s as PNG" % mode) + + if check: + return check + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk(fp, b"IHDR", + o32(im.size[0]), o32(im.size[1]), # 0: size + mode, # 8: depth/type + b'\0', # 10: compression + b'\0', # 11: filter category + b'\0') # 12: interlace flag + + if im.mode == "P": + palette_byte_number = (2 ** bits) * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b'\0' + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get('transparency', + im.info.get('transparency', None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = 2**bits + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b'\xFF' * transparency + b'\0' + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode == "L": + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise IOError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = 2**bits + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk(fp, b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b'\x01') + + info = im.encoderinfo.get("pnginfo") + if info: + for cid, data in info.chunks: + chunk(fp, cid, data) + + icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(icc) + chunk(fp, b"iCCP", data) + + ImageFile._save(im, _idat(fp, chunk), + [("zip", (0, 0)+im.size, 0, rawmode)]) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector(object): + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + crc = o16(hi) + o16(lo) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) + +Image.register_extension(PngImageFile.format, ".png") + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..68073ca --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PpmImagePlugin.py @@ -0,0 +1,174 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +import string + +from PIL import Image, ImageFile + +__version__ = "0.2" + +# +# -------------------------------------------------------------------- + +b_whitespace = string.whitespace +try: + import locale + locale_lang, locale_enc = locale.getlocale() + if locale_enc is None: + locale_lang, locale_enc = locale.getdefaultlocale() + b_whitespace = b_whitespace.decode(locale_enc) +except: + pass +b_whitespace = b_whitespace.encode('ascii', 'ignore') + +MODES = { + # standard + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK" +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _token(self, s=b""): + while True: # read until next whitespace + c = self.fp.read(1) + if not c or c in b_whitespace: + break + if c > b'\x79': + raise ValueError("Expected ASCII value, found binary") + s = s + c + if (len(s) > 9): + raise ValueError("Expected int, got > 9 digits") + return s + + def _open(self): + + # check magic + s = self.fp.read(1) + if s != b"P": + raise SyntaxError("not a PPM file") + mode = MODES[self._token(s)] + + if mode == "1": + self.mode = "1" + rawmode = "1;I" + else: + self.mode = rawmode = mode + + for ix in range(3): + while True: + while True: + s = self.fp.read(1) + if s not in b_whitespace: + break + if s == b"": + raise ValueError("File does not extend beyond magic number") + if s != b"#": + break + s = self.fp.readline() + s = int(self._token(s)) + if ix == 0: + xsize = s + elif ix == 1: + ysize = s + if mode == "1": + break + elif ix == 2: + # maxgrey + if s > 255: + if not mode == 'L': + raise ValueError("Too many colors for band: %s" % s) + if s < 2**16: + self.mode = 'I' + rawmode = 'I;16B' + else: + self.mode = 'I' + rawmode = 'I;32B' + + self.size = xsize, ysize + self.tile = [("raw", + (0, 0, xsize, ysize), + self.fp.tell(), + (rawmode, 0, 1))] + + # ALTERNATIVE: load via builtin debug function + # self.im = Image.core.open_ppm(self.filename) + # self.mode = self.im.mode + # self.size = self.im.size + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + if im.getextrema()[1] < 2**16: + rawmode, head = "I;16B", b"P5" + else: + rawmode, head = "I;32B", b"P5" + elif im.mode == "RGB": + rawmode, head = "RGB", b"P6" + elif im.mode == "RGBA": + rawmode, head = "RGB", b"P6" + else: + raise IOError("cannot write mode %s as PPM" % im.mode) + fp.write(head + ("\n%d %d\n" % im.size).encode('ascii')) + if head == b"P6": + fp.write(b"255\n") + if head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + elif rawmode == "I;16B": + fp.write(b"65535\n") + elif rawmode == "I;32B": + fp.write(b"2147483648\n") + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + +# +# -------------------------------------------------------------------- + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_extension(PpmImageFile.format, ".pbm") +Image.register_extension(PpmImageFile.format, ".pgm") +Image.register_extension(PpmImageFile.format, ".ppm") diff --git a/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..d06e320 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PsdImagePlugin.py @@ -0,0 +1,312 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +__version__ = "0.4" + +from PIL import Image, ImageFile, ImagePalette, _binary + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3) +} + +# +# helpers + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + + +# --------------------------------------------------------------------. +# read PSD images + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if s[:4] != b"8BPS" or i16(s[4:]) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s[22:]) + psd_channels = i16(s[12:]) + psd_mode = i16(s[24:]) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise IOError("not enough channels") + + self.mode = mode + self.size = i32(s[18:]), i32(s[14:]) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + signature = read(4) + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if (len(data) & 1): + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self.layers = _layerinfo(self.fp) + self.fp.seek(end) + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 0 + + @property + def n_frames(self): + return len(self.layers) + + @property + def is_animated(self): + return len(self.layers) > 1 + + def seek(self, layer): + # seek to given layer (1..max) + if layer == self.frame: + return + try: + if layer <= 0: + raise IndexError + name, mode, bbox, tile = self.layers[layer-1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + return name, bbox + except IndexError: + raise EOFError("no such layer") + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.fill(self.mode, self.size, 0) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + +def _layerinfo(file): + # read layerinfo block + layers = [] + read = file.read + for i in range(abs(i16(read(2)))): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + info = [] + mode = [] + types = list(range(i16(read(2)))) + if len(types) > 4: + continue + + for i in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + size = i32(read(4)) + info.append((m, size)) + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + filler = read(12) + name = "" + size = i32(read(4)) + combined = 0 + if size: + length = i32(read(4)) + if length: + mask_y = i32(read(4)) + mask_x = i32(read(4)) + mask_h = i32(read(4)) - mask_y + mask_w = i32(read(4)) - mask_x + file.seek(length - 16, 1) + combined += length + 4 + + length = i32(read(4)) + if length: + file.seek(length, 1) + combined += length + 4 + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode('latin-1', 'replace') + combined += length + 1 + + file.seek(size - combined, 1) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(file, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize*ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append( + ("packbits", bbox, offset, layer) + ) + for y in range(ysize): + offset = offset + i16(bytecount[i:i+2]) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") diff --git a/server/www/packages/packages-windows/x86/PIL/PyAccess.py b/server/www/packages/packages-windows/x86/PIL/PyAccess.py new file mode 100644 index 0000000..c9cbd70 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/PyAccess.py @@ -0,0 +1,318 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +from __future__ import print_function + +import logging +import sys + +from cffi import FFI + + +logger = logging.getLogger(__name__) + + +defs = """ +struct Pixel_RGBA { + unsigned char r,g,b,a; +}; +struct Pixel_I16 { + unsigned char l,r; +}; +""" +ffi = FFI() +ffi.cdef(defs) + + +class PyAccess(object): + + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast('unsigned char **', vals['image8']) + self.image32 = ffi.cast('int **', vals['image32']) + self.image = ffi.cast('unsigned char **', vals['image']) + self.xsize = vals['xsize'] + self.ysize = vals['ysize'] + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + if self.readonly: + raise ValueError('Attempt to putpixel a read only image') + (x, y) = self.check_xy(xy) + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + + (x, y) = self.check_xy(xy) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError('pixel location out of range') + return xy + + +class _PyAccess32_2(PyAccess): + """ PA, LA, stored in first and last bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """ RGB and friends, stored in the first three bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + + +class _PyAccess32_4(PyAccess): + """ RGBA etc, all 4 bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """ 1, L, P, 8 bit images stored as uint8 """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """ I;16 access, native bitendian without conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('unsigned short **', self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """ I;16L access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """ I;16B access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except: + color = min(color[0], 65535) + + pixel.l = color >> 8 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """ Signed Int32 access, native endian """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """ I;32L/B access, with byteswapping conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new('int *', i) + chars = ffi.cast('unsigned char *', orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ + chars[1], chars[0] + return ffi.cast('int *', chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """ 32 bit float access """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('float **', self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = {'1': _PyAccess8, + 'L': _PyAccess8, + 'P': _PyAccess8, + 'LA': _PyAccess32_2, + 'La': _PyAccess32_2, + 'PA': _PyAccess32_2, + 'RGB': _PyAccess32_3, + 'LAB': _PyAccess32_3, + 'HSV': _PyAccess32_3, + 'YCbCr': _PyAccess32_3, + 'RGBA': _PyAccess32_4, + 'RGBa': _PyAccess32_4, + 'RGBX': _PyAccess32_4, + 'CMYK': _PyAccess32_4, + 'F': _PyAccessF, + 'I': _PyAccessI32_N, + } + +if sys.byteorder == 'little': + mode_map['I;16'] = _PyAccessI16_N + mode_map['I;16L'] = _PyAccessI16_N + mode_map['I;16B'] = _PyAccessI16_B + + mode_map['I;32L'] = _PyAccessI32_N + mode_map['I;32B'] = _PyAccessI32_Swap +else: + mode_map['I;16'] = _PyAccessI16_L + mode_map['I;16L'] = _PyAccessI16_L + mode_map['I;16B'] = _PyAccessI16_N + + mode_map['I;32L'] = _PyAccessI32_Swap + mode_map['I;32B'] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..f890c7e --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/SgiImagePlugin.py @@ -0,0 +1,89 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# History: +# 1995-09-10 fl Created +# +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16be + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +## +# Image plugin for SGI images. + +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + s = self.fp.read(512) + if i16(s) != 474: + raise ValueError("Not an SGI image file") + + # relevant header entries + compression = i8(s[2]) + + # bytes, dimension, zsize + layout = i8(s[3]), i16(s[4:]), i16(s[10:]) + + # determine mode from bytes/zsize + if layout == (1, 2, 1) or layout == (1, 1, 1): + self.mode = "L" + elif layout == (1, 3, 3): + self.mode = "RGB" + elif layout == (1, 3, 4): + self.mode = "RGBA" + else: + raise ValueError("Unsupported SGI image mode") + + # size + self.size = i16(s[6:]), i16(s[8:]) + + # decoder info + if compression == 0: + offset = 512 + pagesize = self.size[0]*self.size[1]*layout[0] + self.tile = [] + for layer in self.mode: + self.tile.append( + ("raw", (0, 0)+self.size, offset, (layer, 0, -1))) + offset = offset + pagesize + elif compression == 1: + raise ValueError("SGI RLE encoding not supported") + +# +# registry + +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) + +Image.register_extension(SgiImageFile.format, ".bw") +Image.register_extension(SgiImageFile.format, ".rgb") +Image.register_extension(SgiImageFile.format, ".rgba") +Image.register_extension(SgiImageFile.format, ".sgi") + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..07f623c --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/SpiderImagePlugin.py @@ -0,0 +1,322 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# http://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# http://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# + +from __future__ import print_function + +from PIL import Image, ImageFile +import os +import struct +import sys + + +def isInt(f): + try: + i = int(f) + if f-i == 0: + return 1 + else: + return 0 + except ValueError: + return 0 + except OverflowError: + return 0 + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no.of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + # print "labrec = %d, labbyt = %d, lenbyt = %d" % (labrec,labbyt,lenbyt) + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + fp = open(filename, 'rb') + f = fp.read(92) # read 23 * 4 bytes + fp.close() + t = struct.unpack('>23f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack('<23f', f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack('>27f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack('<27f', f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error: + raise SyntaxError("not a valid Spider file") + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self.size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [ + ("raw", (0, 0) + self.size, offset, + (self.rawmode, 0, 1))] + self.__fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + raise EOFError("attempt to seek in a non-stack file") + if frame >= self._nimages: + raise EOFError("attempt to seek past end of file") + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self.__fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum-minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + " create a list of Image.images for use in montage " + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print("unable to find %s" % img) + continue + try: + im = Image.open(img).convert2byte() + except: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info['filename'] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = 1024 / lenbyt + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + hdr = [] + nvalues = int(labbyt / 4) + for i in range(nvalues): + hdr.append(0.0) + + if len(hdr) < 23: + return [] + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + hdrstr = [] + for v in hdr: + hdrstr.append(struct.pack('f', v)) + return hdrstr + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert('F') + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise IOError("Error creating Spider header") + + # write the SPIDER header + try: + fp = open(filename, 'wb') + except: + raise IOError("Unable to open %s for writing" % filename) + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + fp.close() + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension(SpiderImageFile.format, ext) + _save(im, fp, filename) + +# -------------------------------------------------------------------- + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if not sys.argv[1:]: + print("Syntax: python SpiderImagePlugin.py [infile] [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + outfile = "" + if len(sys.argv[1:]) > 1: + outfile = sys.argv[2] + + im = Image.open(filename) + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=' ') + print(im.getextrema()) + + if outfile != "": + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + "saving a flipped version of %s as %s " % + (os.path.basename(filename), outfile)) + im.save(outfile, SpiderImageFile.format) diff --git a/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py new file mode 100644 index 0000000..af63144 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/SunImagePlugin.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + +i32 = _binary.i32be + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + + +## +# Image plugin for Sun raster files. + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # HEAD + s = self.fp.read(32) + if i32(s) != 0x59a66a95: + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self.size = i32(s[4:8]), i32(s[8:12]) + + depth = i32(s[12:16]) + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + self.mode, rawmode = "RGB", "BGR" + else: + raise SyntaxError("unsupported mode") + + compression = i32(s[20:24]) + + if i32(s[24:28]) != 0: + length = i32(s[28:32]) + offset = offset + length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(length)) + if self.mode == "L": + self.mode = rawmode = "P" + + stride = (((self.size[0] * depth + 7) // 8) + 3) & (~3) + + if compression == 1: + self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] + elif compression == 2: + self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] + +# +# registry + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/server/www/packages/packages-windows/x86/PIL/TarIO.py b/server/www/packages/packages-windows/x86/PIL/TarIO.py new file mode 100644 index 0000000..4e5115b --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/TarIO.py @@ -0,0 +1,57 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +from PIL import ContainerIO + + +## +# A file object that provides read access to a given member of a TAR +# file. + +class TarIO(ContainerIO.ContainerIO): + + ## + # Create file object. + # + # @param tarfile Name of TAR file. + # @param file Name of member file. + + def __init__(self, tarfile, file): + + fh = open(tarfile, "rb") + + while True: + + s = fh.read(512) + if len(s) != 512: + raise IOError("unexpected end of tar file") + + name = s[:100].decode('utf-8') + i = name.find('\0') + if i == 0: + raise IOError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + fh.seek((size + 511) & (~511), 1) + + # Open region + ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size) diff --git a/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..a75ce29 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/TgaImagePlugin.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + + +# +# -------------------------------------------------------------------- +# Read RGA file + +i8 = _binary.i8 +i16 = _binary.i16le + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + idlen = i8(s[0]) + + colormaptype = i8(s[1]) + imagetype = i8(s[2]) + + depth = i8(s[16]) + + flags = i8(s[17]) + + self.size = i16(s[12:]), i16(s[14:]) + + # validate header fields + if colormaptype not in (0, 1) or\ + self.size[0] <= 0 or self.size[1] <= 0 or\ + depth not in (1, 8, 16, 24, 32): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + if orientation == 0x20: + orientation = 1 + elif not orientation: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if idlen: + self.info["id_section"] = self.fp.read(idlen) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;16", b"\0"*2*start + self.fp.read(2*size)) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0"*3*start + self.fp.read(3*size)) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0"*4*start + self.fp.read(4*size)) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [("tga_rle", (0, 0)+self.size, + self.fp.tell(), (rawmode, orientation, depth))] + else: + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), (rawmode, 0, orientation))] + except KeyError: + pass # cannot decode + +# +# -------------------------------------------------------------------- +# Write TGA file + +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename, check=0): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TGA" % im.mode) + + if check: + return check + + if colormaptype: + colormapfirst, colormaplength, colormapentry = 0, 256, 24 + else: + colormapfirst, colormaplength, colormapentry = 0, 0, 0 + + if im.mode == "RGBA": + flags = 8 + else: + flags = 0 + + orientation = im.info.get("orientation", -1) + if orientation > 0: + flags = flags | 0x20 + + fp.write(b"\000" + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags)) + + if colormaptype: + fp.write(im.im.getpalette("RGB", "BGR")) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extension(TgaImageFile.format, ".tga") diff --git a/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..524d42a --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/TiffImagePlugin.py @@ -0,0 +1,1504 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division, print_function + +from PIL import Image, ImageFile +from PIL import ImagePalette +from PIL import _binary +from PIL import TiffTags + +import collections +from fractions import Fraction +from numbers import Number, Rational + +import io +import itertools +import os +import struct +import sys +import warnings + +from .TiffTags import TYPES + + +__version__ = "1.3.5" +DEBUG = False # Needs to be merged with the new logging approach. + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +i8 = _binary.i8 +o8 = _binary.o8 + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", +} + +COMPRESSION_INFO_REV = dict([(v, k) for (k, v) in COMPRESSION_INFO.items()]) + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + + (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + + (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I;16S", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I;16BS", "I;16BS"), + + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I;32BS", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + + (II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +class IFDRational(Rational): + """ Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ('_numerator', '_denominator', '_val') + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + self._denominator = denominator + self._numerator = value + self._val = float(1) + + if type(value) == Fraction: + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value + + if type(value) == IFDRational: + self._denominator = value.denominator + self._numerator = value.numerator + self._val = value._val + return + + if denominator == 0: + self._val = float('nan') + return + + elif denominator == 1: + if sys.hexversion < 0x2070000 and type(value) == float: + # python 2.6 is different. + self._val = Fraction.from_float(value) + else: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(a): + return a._numerator + + @property + def denominator(a): + return a._denominator + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return (self.numerator, self.denominator) + + f = self._val.limit_denominator(max_denominator) + return (f.numerator, f.denominator) + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self, other): + return self._val == other + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val, op)(*args) + return delegate + + """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', + 'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero', + 'ceil', 'floor', 'round'] + print "\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a) + """ + + __add__ = _delegate('__add__') + __radd__ = _delegate('__radd__') + __sub__ = _delegate('__sub__') + __rsub__ = _delegate('__rsub__') + __div__ = _delegate('__div__') + __rdiv__ = _delegate('__rdiv__') + __mul__ = _delegate('__mul__') + __rmul__ = _delegate('__rmul__') + __truediv__ = _delegate('__truediv__') + __rtruediv__ = _delegate('__rtruediv__') + __floordiv__ = _delegate('__floordiv__') + __rfloordiv__ = _delegate('__rfloordiv__') + __mod__ = _delegate('__mod__') + __rmod__ = _delegate('__rmod__') + __pow__ = _delegate('__pow__') + __rpow__ = _delegate('__rpow__') + __pos__ = _delegate('__pos__') + __neg__ = _delegate('__neg__') + __abs__ = _delegate('__abs__') + __trunc__ = _delegate('__trunc__') + __lt__ = _delegate('__lt__') + __gt__ = _delegate('__gt__') + __le__ = _delegate('__le__') + __ge__ = _delegate('__ge__') + __nonzero__ = _delegate('__nonzero__') + __ceil__ = _delegate('__ceil__') + __floor__ = _delegate('__floor__') + __round__ = _delegate('__round__') + + +class ImageFileDirectory_v2(collections.MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + `~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError("not a TIFF file (header %r not valid)" % ifh) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.reset() + self.next, = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def as_dict(self): + """Return a dictionary of the image's tags. + + .. deprecated:: 3.0.0 + """ + warnings.warn("as_dict() is deprecated. " + + "Please use dict(ifd) instead.", DeprecationWarning) + return dict(self) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return dict((TiffTags.lookup(code).name, value) + for code, value in self.items()) + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = val, + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + if bytes is str: + def has_key(self, tag): + return tag in self + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + if bytes is str: + basetypes += unicode, + + info = TiffTags.lookup(tag) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = 7 + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = 5 + elif all(isinstance(v, int) for v in values): + if all(v < 2 ** 16 for v in values): + self.tagtype[tag] = 3 + else: + self.tagtype[tag] = 4 + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = 12 + else: + if bytes is str: + # Never treat data as binary by default on Python 2. + self.tagtype[tag] = 2 + else: + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = 2 + + if self.tagtype[tag] == 7 and bytes is not str: + values = [value.encode("ascii", 'replace') if isinstance(value, str) else value] + + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + if info.length == 1: + if legacy_api and self.tagtype[tag] in [5, 10]: + values = values, + dest[tag], = values + else: + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from PIL.TiffTags import TYPES + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func + return func + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func + return func + return decorator + + def _register_basic(idx_fmt_name): + from PIL.TiffTags import TYPES + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( + self._unpack("{0}{1}".format(len(data) // size, fmt), data)) + _write_dispatch[idx] = lambda self, *values: ( + b"".join(self._pack(fmt, value) for value in values)) + + list(map(_register_basic, + [(3, "H", "short"), (4, "L", "long"), + (6, "b", "signed byte"), (8, "h", "signed short"), + (9, "l", "signed long"), (11, "f", "float"), (12, "d", "double")])) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return data + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if sys.version_info[0] == 2: + value = value.decode('ascii', 'replace') + return b"" + value.encode('ascii', 'replace') + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{0}L".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) + for frac in values) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{0}l".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) + for frac in values) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise IOError("Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % + (size, len(ret))) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("tag: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + if DEBUG: + print("- unsupported type", typ) + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + offset, = self._unpack("L", data) + if DEBUG: + print("Tag Location: %s - Data Location: %s" % + (here, offset), end=" ") + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn("Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d. " + "Skipping tag %s" % (size, len(data), tag)) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + if DEBUG: + if size > 32: + print("- value: " % size) + else: + print("- value:", self[tag]) + + self.next, = self._unpack("L", self._ensure_read(fp, 4)) + except IOError as msg: + warnings.warn(str(msg)) + return + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + # FIXME What about tagdata? + fp.write(self._pack("H", len(self._tags_v2))) + + entries = [] + offset = fp.tell() + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + if DEBUG: + print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("save: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + if len(data) >= 16: + print("- value: " % len(data)) + else: + print("- value:", values) + + # count is sum of lengths for string and arbitrary data + count = len(data) if typ in [2, 7] else len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError( + "multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + if DEBUG > 1: + print(tag, typ, count, repr(value), repr(data)) + fp.write(self._pack("HHL4s", tag, typ, count, value)) + + # -- overwrite here for multi-page -- + fp.write(b"\0\0\0\0") # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + fp.write(data) + if len(data) & 1: + fp.write(b"\0") + + return offset + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print ifd[key] + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + `~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + def __init__(self, *args, **kwargs): + ImageFileDirectory_v2.__init__(self, *args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + @classmethod + def from_v2(cls, original): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = val, + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + + def _open(self): + "Open the first image in a TIFF file" + + # Header + ifh = self.fp.read(8) + + # image file directory (tag dictionary) + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy tag/ifd entries will be filled in later + self.tag = self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + self._is_animated = None + + if DEBUG: + print("*** TiffImageFile._open ***") + print("- __first:", self.__first) + print("- ifh: ", ifh) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + "Select a given frame as current image" + self._seek(max(frame, 0)) # Questionable backwards compatibility. + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + if DEBUG: + print("Seeking to frame %s, on frame %s, " + "__next %s, location: %s" % + (frame, self.__frame, self.__next, self.fp.tell())) + # reset python3 buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + if DEBUG: + print("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + "Return the current frame number" + return self.__frame + + def _decoder(self, rawmode, layer, tile=None): + "Setup decoder contexts" + + args = None + if rawmode == "RGB" and self._planar_configuration == 2: + rawmode = rawmode[layer] + compression = self._compression + if compression == "raw": + args = (rawmode, 0, 1) + elif compression == "jpeg": + args = rawmode, "" + if JPEGTABLES in self.tag_v2: + # Hack to handle abbreviated JPEG headers + # FIXME This will fail with more than one value + self.tile_prefix, = self.tag_v2[JPEGTABLES] + elif compression == "packbits": + args = rawmode + elif compression == "tiff_lzw": + args = rawmode + if PREDICTOR in self.tag_v2: + # Section 14: Differencing Predictor + self.decoderconfig = (self.tag_v2[PREDICTOR],) + + if ICCPROFILE in self.tag_v2: + self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + + return args + + def load(self): + if self.use_load_libtiff: + return self._load_libtiff() + return super(TiffImageFile, self).load() + + def _load_libtiff(self): + """ Overload method triggered when we detect a compressed tiff + Calls out to libtiff """ + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.load_prepare() + + if not len(self.tile) == 1: + raise IOError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = self.tile[0][3] + (self.tag_v2.offset,) + decoder = Image._getdecoder(self.mode, 'libtiff', args, + self.decoderconfig) + try: + decoder.setimage(self.im, extents) + except ValueError: + raise IOError("Couldn't set the image") + + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an IOError if there's no underlying fp. Easier to + # deal with here by reordering. + if DEBUG: + print("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif hasattr(self.fp, "fileno"): + # we've got a actual file on disk, pass in the fp. + if DEBUG: + print("have fileno, calling fileno version of the decoder.") + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + if DEBUG: + print("don't have fileno or getvalue. just reading") + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + # libtiff closed the fp in a, we need to close self.fp, if possible + if hasattr(self.fp, 'close'): + if not self.__next: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise IOError(err) + + self.load_end() + + return Image.Image.load(self) + + def _setup(self): + "Setup this image object based on current tags" + + if 0xBC01 in self.tag_v2: + raise IOError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + fillorder = self.tag_v2.get(FILLORDER, 1) + + if DEBUG: + print("*** Summary ***") + print("- compression:", self._compression) + print("- photometric_interpretation:", photo) + print("- planar_configuration:", self._planar_configuration) + print("- fill_order:", fillorder) + + # size + xsize = self.tag_v2.get(IMAGEWIDTH) + ysize = self.tag_v2.get(IMAGELENGTH) + self.size = xsize, ysize + + if DEBUG: + print("- size:", self.size) + + sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if (len(sampleFormat) > 1 + and max(sampleFormat) == min(sampleFormat) == 1): + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + sampleFormat = (1,) + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, photo, sampleFormat, fillorder, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError: + if DEBUG: + print("- unsupported format") + raise SyntaxError("unknown pixel mode") + + if DEBUG: + print("- raw mode:", rawmode) + print("- pil mode:", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, 1) + yres = self.tag_v2.get(Y_RESOLUTION, 1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT, 1) + if resunit == 2: # dots per inch + self.info["dpi"] = xres, yres + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = xres * 2.54, yres * 2.54 + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = l = 0 + self.tile = [] + self.use_load_libtiff = False + if STRIPOFFSETS in self.tag_v2: + # striped image + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3", + "group4", "tiff_jpeg", + "tiff_adobe_deflate", + "tiff_thunderscan", + "tiff_deflate", + "tiff_sgilog", + "tiff_sgilog24", + "tiff_raw_16"]: + # if DEBUG: + # print "Activating g4 compression for whole file" + + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # use the _load_libtiff function. + + self.use_load_libtiff = True + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and \ + os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell for py3 + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except IOError: + # io.BytesIO have a fileno, but returns an IOError if + # it doesn't use a file descriptor. + fp = False + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + key = ( + self.tag_v2.prefix, photo, sampleFormat, 1, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode: + rawmode = 'I;16N' + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, fp) + self.tile.append( + (self._compression, + (0, 0, w, ysize), + 0, a)) + a = None + + else: + for i in range(len(offsets)): + a = self._decoder(rawmode, l, i) + self.tile.append( + (self._compression, + (0, min(y, ysize), w, min(y+h, ysize)), + offsets[i], a)) + if DEBUG: + print("tiles: ", self.tile) + y = y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + elif TILEOFFSETS in self.tag_v2: + # tiled image + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + a = None + for o in self.tag_v2[TILEOFFSETS]: + if not a: + a = self._decoder(rawmode, l) + # FIXME: this doesn't work if the image size + # is not a multiple of the tile size... + self.tile.append( + (self._compression, + (x, y, x+w, y+h), + o, a)) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + else: + if DEBUG: + print("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # fixup palette descriptor + + if self.mode == "P": + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TIFF" % im.mode) + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get('compression', + im.info.get('compression', 'raw')) + + libtiff = WRITE_LIBTIFF or compression != 'raw' + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + if DEBUG: + print("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except: + pass # might not be an IFD, Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, 'tag_v2'): + # preserve tags from original TIFF image file + for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, + IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype.get(key, None) + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [(IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright")]: + name_with_spaces = name.replace("_", " ") + if "_" in name and name_with_spaces in im.encoderinfo: + warnings.warn("%r is deprecated; use %r instead" % + (name_with_spaces, name), DeprecationWarning) + ifd[key] = im.encoderinfo[name.replace("_", " ")] + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode == "P": + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) + + # data orientation + stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + ifd[ROWSPERSTRIP] = im.size[1] + ifd[STRIPBYTECOUNTS] = stride * im.size[1] + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if DEBUG: + print("Saving using libtiff encoder") + print("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS] + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, 'tag'): + legacy_ifd = im.tag.to_v2() + for tag, value in itertools.chain(ifd.items(), + getattr(im, 'tag_v2', {}).items(), + legacy_ifd.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. It will segfault if it attempts + # to add a custom tag without the dictionary entry + # + # UNDONE -- add code for the custom dictionary + if tag not in TiffTags.LIBTIFF_CORE: + continue + if tag not in atts and tag not in blocklist: + if isinstance(value, unicode if bytes is str else str): + atts[tag] = value.encode('ascii', 'replace') + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if DEBUG: + print("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ('I;16B', 'I;16'): + rawmode = 'I;16N' + + a = (rawmode, compression, _fp, filename, atts) + # print(im.mode, compression, a, im.encoderconfig) + e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) + e.setimage(im.im, (0, 0)+im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16*1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + + else: + offset = ifd.save(fp) + + ImageFile._save(im, fp, [ + ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) + ]) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) + +Image.register_extension(TiffImageFile.format, ".tif") +Image.register_extension(TiffImageFile.format, ".tiff") + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/server/www/packages/packages-windows/x86/PIL/TiffTags.py b/server/www/packages/packages-windows/x86/PIL/TiffTags.py new file mode 100644 index 0000000..ecc63ba --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/TiffTags.py @@ -0,0 +1,442 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=None, length=0, enum=None): + return super(TagInfo, cls).__new__( + cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + return self.enum.get(value, value) + + +def lookup(tag): + """ + :param tag: Integer tag number + :returns: Taginfo namedtuple, From the TAGS_V2 info if possible, + otherwise just populating the value and name from TAGS. + If the tag is not recognized, "unknown" is returned for the name + + """ + + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown'))) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# +# The length here differs from the length in the tiff spec. For +# numbers, the tiff spec is for the number of fields returned. We +# agree here. For string-like types, the tiff spec uses the length of +# field in bytes. In Pillow, we are using the number of expected +# fields, in general 1 for string-like types. + + +BYTE = 1 +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 +UNDEFINED = 7 +SIGNED_RATIONAL = 10 +DOUBLE = 12 + +TAGS_V2 = { + + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ("Compression", SHORT, 1, + {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4, + "LZW": 5, "JPEG": 6, "PackBits": 32773}), + + 262: ("PhotometricInterpretation", SHORT, 1, + {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RGB Palette": 3, + "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892}), # Adobe DNG + 263: ("Threshholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellLength", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + + 280: ("MinSampleValue", LONG, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), + 297: ("PageNumber", SHORT, 2), + + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticities", SHORT, 6), + + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + + 340: ("SMinSampleValue", DOUBLE, 0), + 341: ("SMaxSampleValue", DOUBLE, 0), + 342: ("TransferRange", SHORT, 6), + + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", LONG, 0), + + 33432: ("Copyright", ASCII, 1), + + # FIXME add more tags here + 34665: ("ExifIFD", SHORT, 1), + 34675: ('ICCProfile', UNDEFINED, 1), + 34853: ('GPSInfoIFD', BYTE, 1), + + # MPInfo + 45056: ("MPFVersion", UNDEFINED, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", UNDEFINED, 1), + 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), + 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), + 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), + 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), + 45579: ("YawAngle", SIGNED_RATIONAL, 1), + 45580: ("PitchAngle", SIGNED_RATIONAL, 1), + 45581: ("RollAngle", SIGNED_RATIONAL, 1), + + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 1), + 50839: ("ImageJMetaData", UNDEFINED, 1) +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = {347: 'JPEGTables', + 700: 'XMP', + + # Additional Exif Info + 32932: 'Wang Annotation', + 33434: 'ExposureTime', + 33437: 'FNumber', + 33445: 'MD FileTag', + 33446: 'MD ScalePixel', + 33447: 'MD ColorTable', + 33448: 'MD LabName', + 33449: 'MD SampleInfo', + 33450: 'MD PrepDate', + 33451: 'MD PrepTime', + 33452: 'MD FileUnits', + 33550: 'ModelPixelScaleTag', + 33723: 'IptcNaaInfo', + 33918: 'INGR Packet Data Tag', + 33919: 'INGR Flag Registers', + 33920: 'IrasB Transformation Matrix', + 33922: 'ModelTiepointTag', + 34264: 'ModelTransformationTag', + 34377: 'PhotoshopInfo', + 34735: 'GeoKeyDirectoryTag', + 34736: 'GeoDoubleParamsTag', + 34737: 'GeoAsciiParamsTag', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 34908: 'HylaFAX FaxRecvParams', + 34909: 'HylaFAX FaxSubAddress', + 34910: 'HylaFAX FaxRecvTime', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTImeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37724: 'ImageSourceData', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubSec', + 37521: 'SubSecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 40960: 'FlashPixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 40965: 'InteroperabilityIFD', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42112: 'GDAL_METADATA', + 42113: 'GDAL_NODATA', + 42240: 'Gamma', + 50215: 'Oce Scanjob Description', + 50216: 'Oce Application Selector', + 50217: 'Oce Identification Number', + 50218: 'Oce ImageLogic Characteristics', + + # Adobe DNG + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50740: 'DNGPrivateData', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + 50784: 'Alias Layer Metadata' + } + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +LIBTIFF_CORE = set([255, 256, 257, 258, 259, 262, 263, 266, 274, 277, + 278, 280, 281, 340, 341, 282, 283, 284, 286, 287, + 296, 297, 321, 320, 338, 32995, 322, 323, 32998, + 32996, 339, 32997, 330, 531, 530, 301, 532, 333, + # as above + 269 # this has been in our tests forever, and works + ]) + +LIBTIFF_CORE.remove(320) # Array of short, crashes +LIBTIFF_CORE.remove(301) # Array of short, crashes +LIBTIFF_CORE.remove(532) # Array of long, crashes + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for tiled images in libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/server/www/packages/packages-windows/x86/PIL/WalImageFile.py b/server/www/packages/packages-windows/x86/PIL/WalImageFile.py new file mode 100644 index 0000000..0cbd1ca --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/WalImageFile.py @@ -0,0 +1,128 @@ +# encoding: utf-8 +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# NOTE: This format cannot be automatically recognized, so the reader +# is not registered for use with Image.open(). To open a WAL file, use +# the WalImageFile.open() function instead. + +# This reader is based on the specification available from: +# http://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +# and has been tested with a few sample files found using google. + +from __future__ import print_function + +from PIL import Image, _binary + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i32 = _binary.i32le + + +## +# Load texture from a Quake2 WAL texture file. +#

    +# By default, a Quake2 standard palette is attached to the texture. +# To override the palette, use the putpalette method. +# +# @param filename WAL file name, or an opened file handle. +# @return An image instance. + +def open(filename): + # FIXME: modify to return a WalImageFile instance instead of + # plain Image object ? + + if hasattr(filename, "read"): + fp = filename + else: + fp = builtins.open(filename, "rb") + + # read header fields + header = fp.read(32+24+32+12) + size = i32(header, 32), i32(header, 36) + offset = i32(header, 40) + + # load pixel data + fp.seek(offset) + + im = Image.frombytes("P", size, fp.read(size[0] * size[1])) + im.putpalette(quake2palette) + + im.format = "WAL" + im.format_description = "Quake2 Texture" + + # strings are null-terminated + im.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56:56+32].split(b"\0", 1)[0] + if next_name: + im.info["next_name"] = next_name + + return im + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans H盲ggstr枚m + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..6837b53 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/WebPImagePlugin.py @@ -0,0 +1,80 @@ +from PIL import Image +from PIL import ImageFile +from io import BytesIO +from PIL import _webp + + +_VALID_WEBP_MODES = { + "RGB": True, + "RGBA": True, + } + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless + } + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + return is_riff_file_format and is_webp_file and is_valid_vp8_mode + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + + def _open(self): + data, width, height, self.mode, icc_profile, exif = \ + _webp.WebPDecode(self.fp.read()) + + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + + self.size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + + def _getexif(self): + from PIL.JpegImagePlugin import _getexif + return _getexif(self) + + +def _save(im, fp, filename): + image_mode = im.mode + if im.mode not in _VALID_WEBP_MODES: + raise IOError("cannot write mode %s as WEBP" % image_mode) + + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + exif + ) + if data is None: + raise IOError("cannot write file as WEBP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +Image.register_save(WebPImageFile.format, _save) + +Image.register_extension(WebPImageFile.format, ".webp") +Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..3163210 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/WmfImagePlugin.py @@ -0,0 +1,173 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +_handler = None + +if str != bytes: + long = int + + +## +# Install application-specific WMF image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler(object): + + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", "BGR", (im.size[0]*3 + 3) & -4, -1 + ) + + register_handler(WmfHandler()) + +# -------------------------------------------------------------------- + +word = _binary.i16le + + +def short(c, o=0): + v = word(c, o) + if v >= 32768: + v -= 65536 + return v + +dword = _binary.i32le + + +# +# -------------------------------------------------------------------- +# Read WMF file + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or + prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + self.info["dpi"] = 72 + + # print self.mode, self.size, self.info + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif dword(s) == 1 and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = dword(s, 8) + y0 = dword(s, 12) + x1 = dword(s, 16) + y1 = dword(s, 20) + + # get frame (in 0.01 millimeter units) + frame = dword(s, 24), dword(s, 28), dword(s, 32), dword(s, 36) + + # normalize size to 72 dots per inch + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0]) + ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self.size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("WMF save handler not installed") + _handler.save(im, fp, filename) + +# +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extension(WmfStubImageFile.format, ".wmf") +Image.register_extension(WmfStubImageFile.format, ".emf") diff --git a/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..9fe9ca1 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/XVThumbImagePlugin.py @@ -0,0 +1,79 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.1" + +o8 = _binary.o8 + +_MAGIC = b"P7 332" + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + +def _accept(prefix): + return prefix[:6] == _MAGIC + + +## +# Image plugin for XV thumbnail images. + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + if self.fp.read(6) != _MAGIC: + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if s[0] != b'#': + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self.size = int(s[0:1]), int(s[1:2]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [ + ("raw", (0, 0)+self.size, + self.fp.tell(), (self.mode, 0, 1) + )] + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..bca8828 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/XbmImagePlugin.py @@ -0,0 +1,96 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL import Image, ImageFile + +__version__ = "0.6" + +# XBM header +xbm_head = re.compile( + b"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + b"[\\000-\\377]*_bits\\[\\]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if m: + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = ( + int(m.group("xhot")), int(m.group("yhot")) + ) + + self.mode = "1" + self.size = xsize, ysize + + self.tile = [("xbm", (0, 0)+self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as XBM" % im.mode) + + fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii')) + fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii')) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii')) + fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii')) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py b/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..556adb8 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/XpmImagePlugin.py @@ -0,0 +1,130 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8, o8 + +__version__ = "0.2" + +# XPM header +xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self.size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for i in range(pal): + + s = self.fp.readline() + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] in b'\r\n': + s = s[:-1] + + c = i8(s[1]) + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i+1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[0:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = (o8((rgb >> 16) & 255) + + o8((rgb >> 8) & 255) + + o8(rgb & 255)) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1:xsize+1].ljust(xsize) + + self.fp = None + + return b"".join(s) + +# +# Registry + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/server/www/packages/packages-windows/x86/PIL/__init__.py b/server/www/packages/packages-windows/x86/PIL/__init__.py new file mode 100644 index 0000000..e5dcf43 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/__init__.py @@ -0,0 +1,60 @@ +# +# The Python Imaging Library. +# $Id$ +# +# package placeholder +# +# Copyright (c) 1999 by Secret Labs AB. +# +# See the README file for information on usage and redistribution. +# + +# ;-) + +VERSION = '1.1.7' # PIL version +PILLOW_VERSION = '3.3.0' # Pillow + +_plugins = ['BmpImagePlugin', + 'BufrStubImagePlugin', + 'CurImagePlugin', + 'DcxImagePlugin', + 'DdsImagePlugin', + 'EpsImagePlugin', + 'FitsStubImagePlugin', + 'FliImagePlugin', + 'FpxImagePlugin', + 'FtexImagePlugin', + 'GbrImagePlugin', + 'GifImagePlugin', + 'GribStubImagePlugin', + 'Hdf5StubImagePlugin', + 'IcnsImagePlugin', + 'IcoImagePlugin', + 'ImImagePlugin', + 'ImtImagePlugin', + 'IptcImagePlugin', + 'JpegImagePlugin', + 'Jpeg2KImagePlugin', + 'McIdasImagePlugin', + 'MicImagePlugin', + 'MpegImagePlugin', + 'MpoImagePlugin', + 'MspImagePlugin', + 'PalmImagePlugin', + 'PcdImagePlugin', + 'PcxImagePlugin', + 'PdfImagePlugin', + 'PixarImagePlugin', + 'PngImagePlugin', + 'PpmImagePlugin', + 'PsdImagePlugin', + 'SgiImagePlugin', + 'SpiderImagePlugin', + 'SunImagePlugin', + 'TgaImagePlugin', + 'TiffImagePlugin', + 'WebPImagePlugin', + 'WmfImagePlugin', + 'XbmImagePlugin', + 'XpmImagePlugin', + 'XVThumbImagePlugin'] diff --git a/server/www/packages/packages-windows/x86/PIL/_binary.py b/server/www/packages/packages-windows/x86/PIL/_binary.py new file mode 100644 index 0000000..2f5e8ff --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/_binary.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + +from struct import unpack, pack + +if bytes is str: + def i8(c): + return ord(c) + + def o8(i): + return chr(i & 255) +else: + def i8(c): + return c if c.__class__ is int else c[0] + + def o8(i): + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +# TODO: replace with more readable struct.unpack equivalent +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return unpack("H", c[o:o+2])[0] + + +def i32be(c, o=0): + return unpack(">I", c[o:o+4])[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) + +# End of file diff --git a/server/www/packages/packages-windows/x86/PIL/_imaging.pyd b/server/www/packages/packages-windows/x86/PIL/_imaging.pyd new file mode 100644 index 0000000..cecd80d Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_imaging.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/_imagingcms.pyd b/server/www/packages/packages-windows/x86/PIL/_imagingcms.pyd new file mode 100644 index 0000000..6ed1456 Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_imagingcms.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/_imagingft.pyd b/server/www/packages/packages-windows/x86/PIL/_imagingft.pyd new file mode 100644 index 0000000..3617f51 Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_imagingft.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/_imagingmath.pyd b/server/www/packages/packages-windows/x86/PIL/_imagingmath.pyd new file mode 100644 index 0000000..845a52f Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_imagingmath.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/_imagingmorph.pyd b/server/www/packages/packages-windows/x86/PIL/_imagingmorph.pyd new file mode 100644 index 0000000..f15966b Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_imagingmorph.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/_imagingtk.pyd b/server/www/packages/packages-windows/x86/PIL/_imagingtk.pyd new file mode 100644 index 0000000..7f02fe0 Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_imagingtk.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/_tkinter_finder.py b/server/www/packages/packages-windows/x86/PIL/_tkinter_finder.py new file mode 100644 index 0000000..df41591 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/_tkinter_finder.py @@ -0,0 +1,20 @@ +""" Find compiled module linking to Tcl / Tk libraries +""" +import sys + +if sys.version_info[0] > 2: + from tkinter import _tkinter as tk +else: + from Tkinter import tkinter as tk + +if hasattr(sys, 'pypy_find_executable'): + # Tested with packages at https://bitbucket.org/pypy/pypy/downloads. + # PyPies 1.6, 2.0 do not have tkinter built in. PyPy3-2.3.1 gives an + # OSError trying to import tkinter. Otherwise: + try: # PyPy 5.1, 4.0.0, 2.6.1, 2.6.0 + TKINTER_LIB = tk.tklib_cffi.__file__ + except AttributeError: + # PyPy3 2.4, 2.1-beta1; PyPy 2.5.1, 2.5.0, 2.4.0, 2.3, 2.2, 2.1 + TKINTER_LIB = tk.tkffi.verifier.modulefilename +else: + TKINTER_LIB = tk.__file__ diff --git a/server/www/packages/packages-windows/x86/PIL/_util.py b/server/www/packages/packages-windows/x86/PIL/_util.py new file mode 100644 index 0000000..51c6f68 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/_util.py @@ -0,0 +1,27 @@ +import os + +if bytes is str: + def isStringType(t): + return isinstance(t, basestring) + + def isPath(f): + return isinstance(f, basestring) +else: + def isStringType(t): + return isinstance(t, str) + + def isPath(f): + return isinstance(f, (bytes, str)) + + +# Checks if an object is a string, and that it points to a directory. +def isDirectory(f): + return isPath(f) and os.path.isdir(f) + + +class deferred_error(object): + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/server/www/packages/packages-windows/x86/PIL/_webp.pyd b/server/www/packages/packages-windows/x86/PIL/_webp.pyd new file mode 100644 index 0000000..cbaab26 Binary files /dev/null and b/server/www/packages/packages-windows/x86/PIL/_webp.pyd differ diff --git a/server/www/packages/packages-windows/x86/PIL/features.py b/server/www/packages/packages-windows/x86/PIL/features.py new file mode 100644 index 0000000..fd87f09 --- /dev/null +++ b/server/www/packages/packages-windows/x86/PIL/features.py @@ -0,0 +1,67 @@ +from PIL import Image + +modules = { + "pil": "PIL._imaging", + "tkinter": "PIL._imagingtk", + "freetype2": "PIL._imagingft", + "littlecms2": "PIL._imagingcms", + "webp": "PIL._webp", + "transp_webp": ("WEBP", "WebPDecoderBuggyAlpha") +} + + +def check_module(feature): + if feature not in modules: + raise ValueError("Unknown module %s" % feature) + + module = modules[feature] + + method_to_call = None + if type(module) is tuple: + module, method_to_call = module + + try: + imported_module = __import__(module) + except ImportError: + # If a method is being checked, None means that + # rather than the method failing, the module required for the method + # failed to be imported first + return None if method_to_call else False + + if method_to_call: + method = getattr(imported_module, method_to_call) + return method() is True + else: + return True + + +def get_supported_modules(): + supported_modules = [] + for feature in modules: + if check_module(feature): + supported_modules.append(feature) + return supported_modules + +codecs = { + "jpg": "jpeg", + "jpg_2000": "jpeg2k", + "zlib": "zip", + "libtiff": "libtiff" +} + + +def check_codec(feature): + if feature not in codecs: + raise ValueError("Unknown codec %s" % feature) + + codec = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def get_supported_codecs(): + supported_codecs = [] + for feature in codecs: + if check_codec(feature): + supported_codecs.append(feature) + return supported_codecs diff --git a/server/www/teleport/.idea/teleport.iml b/server/www/teleport/.idea/teleport.iml new file mode 100644 index 0000000..a57301b --- /dev/null +++ b/server/www/teleport/.idea/teleport.iml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/server/www/teleport/.idea/webResources.xml b/server/www/teleport/.idea/webResources.xml new file mode 100644 index 0000000..79efac6 --- /dev/null +++ b/server/www/teleport/.idea/webResources.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/server/www/teleport/app/__init__.py b/server/www/teleport/app/__init__.py new file mode 100644 index 0000000..633f866 --- /dev/null +++ b/server/www/teleport/app/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- + diff --git a/server/www/teleport/app/eom_app/__init__.py b/server/www/teleport/app/eom_app/__init__.py new file mode 100644 index 0000000..633f866 --- /dev/null +++ b/server/www/teleport/app/eom_app/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- + diff --git a/server/www/teleport/app/eom_app/app/__init__.py b/server/www/teleport/app/eom_app/app/__init__.py new file mode 100644 index 0000000..ba13bf5 --- /dev/null +++ b/server/www/teleport/app/eom_app/app/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- + +from .core import SwxCore +from eom_common.eomcore.logger import * + +__all__ = ['run'] + + +def run(options): + log.initialize() + # log.set_attribute(min_level=LOG_VERBOSE, log_datetime=False, trace_error=TRACE_ERROR_NONE) + # log.set_attribute(min_level=LOG_VERBOSE, trace_error=TRACE_ERROR_NONE) + log.set_attribute(min_level=LOG_DEBUG, trace_error=TRACE_ERROR_FULL) + + _app = SwxCore() + if not _app.init(options): + return 1 + + return _app.run() diff --git a/server/www/teleport/app/eom_app/app/configs.py b/server/www/teleport/app/eom_app/app/configs.py new file mode 100644 index 0000000..1299ccc --- /dev/null +++ b/server/www/teleport/app/eom_app/app/configs.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +import os +import sys + +from eom_common.eomcore.logger import log + +__all__ = ['app_cfg'] + + +class SwxDict(dict): + """ + 鍙互鍍忓睘鎬т竴鏍疯闂瓧鍏哥殑 Key锛寁ar.key 绛夊悓浜 var['key'] + """ + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + # print(self.__class__.__name__) + raise + + def __setattr__(self, name, val): + self[name] = val + + +def swx_dict(obj): + """ + 灏嗕竴涓璞′腑鐨刣ict杞彉涓篍omDict绫诲瀷 + """ + if isinstance(obj, dict): + ret = SwxDict() + for k in obj: + # ret[k] = obj[k] + if isinstance(obj[k], dict): + ret[k] = swx_dict(obj[k]) + else: + ret[k] = obj[k] + else: + ret = obj + return ret + + +class ConfigFile(SwxDict): + def __init__(self, **kwargs): + super().__init__(**kwargs) + # self.__file_name = None + # self.__save_indent = 0 + # self.__loaded = False + + def load_str(self, module, code): + m = type(sys)(module) + m.__module_class__ = type(sys) + m.__file__ = module + + try: + exec(compile(code, module, 'exec'), m.__dict__) + except Exception as e: + log.e('%s\n' % str(e)) + # print(str(e)) + # if eom_dev_conf.debug: + # raise + return False + + for y in m.__dict__: + if '__' == y[:2]: + continue + if isinstance(m.__dict__[y], dict): + self[y] = SwxDict() + self._assign_dict(m.__dict__[y], self[y]) + else: + self[y] = m.__dict__[y] + + return True + + def load(self, full_path, must_exists=True): + try: + f = open(full_path, encoding='utf8') + code = f.read() + f.close() + self.__loaded = True + except IOError: + if must_exists: + log.e('Can not load config file: %s\n' % full_path) + return False + + module = os.path.basename(full_path) + if not self.load_str(module, code): + return False + + self.__file_name = full_path + return True + + """ + def save(self, filename=None): + if filename is None and not self.__loaded: + # log.w('Can not save config file without file name.\n') + return False + _file_name = filename + if _file_name is None: + _file_name = self.__file_name + if _file_name is None: + log.e('Do not known which file to save to.\n') + return False + + # 鎺掑簭鍚庝繚瀛 + m = [(k, self[k]) for k in sorted(self.keys())] + + self.__save_indent = 0 + s = self._save(m) + + # 灏濊瘯鍔犺浇鐢熸垚鐨勮淇濆瓨鐨勯厤缃瓧绗︿覆锛屽鏋滃姞杞芥垚鍔燂紝鍒欎繚瀛樺埌鏂囦欢锛屽惁鍒欐姤閿 + x = ConfigFile() + if not x.load_str('_eom_tmp_cfg_data_', s): + log.e('Cannot generate config for save.\n') + return False + + f = open(_file_name, 'w') + f.write('# -*- coding: utf-8 -*-\n\n') + f.write(s) + f.close() + return True + + def _save(self, var): + s = '' + for (k, v) in var: + if self.__save_indent == 0 and k.find('_ConfigFile__') == 0: + # 鏈被鐨勬垚鍛樺彉閲忎笉鐢ㄤ繚瀛 + continue + + if isinstance(v, dict): + if self.__save_indent > 0: + s += "\n%s'%s' : {\n" % ('\t' * self.__save_indent, k) + else: + s += "\n%s%s = {\n" % ('\t' * self.__save_indent, k) + + self.__save_indent += 1 + m = [(x, v[x]) for x in sorted(v.keys())] + s += self._save(m) + self.__save_indent -= 1 + if self.__save_indent > 0: + s += "%s},\n\n" % ('\t' * self.__save_indent) + else: + s += "%s}\n\n" % ('\t' * self.__save_indent) + + else: + if isinstance(v, str): + val = "'%s'" % v.replace("'", "\\'") + else: + val = v + + if self.__save_indent > 0: + s += "%s'%s' : %s,\n" % ('\t' * self.__save_indent, k, val) + else: + s += "%s%s = %s\n" % ('\t' * self.__save_indent, k, val) + return s + """ + + def _assign_dict(self, _from, _to): + for y in _from: + if isinstance(_from[y], dict): + _to[y] = SwxDict() + self._assign_dict(_from[y], _to[y]) + else: + _to[y] = _from[y] + + +_cfg = ConfigFile() +del ConfigFile + + +def app_cfg(): + global _cfg + return _cfg + + +if __name__ == '__main__': + cfg = ConfigFile() diff --git a/server/www/teleport/app/eom_app/app/core.py b/server/www/teleport/app/eom_app/app/core.py new file mode 100644 index 0000000..aa320e4 --- /dev/null +++ b/server/www/teleport/app/eom_app/app/core.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +import os +import sys + +import tornado.httpserver +import tornado.ioloop +import tornado.netutil +import tornado.process +import tornado.web +# from eom_app.controller import controllers + +# from eom_common.eomcore.eom_mysql import get_mysql_pool +from eom_common.eomcore.eom_sqlite import get_sqlite_pool +import eom_common.eomcore.utils as utils +from eom_common.eomcore.logger import log +from .configs import app_cfg +from .session import swx_session +cfg = app_cfg() + + +class SwxCore: + def __init__(self): + # self._cfg = ConfigFile() + pass + + def init(self, options): + cfg.debug = False + + cfg.dev_mode = options['dev_mode'] + + if 'log_path' not in options: + return False + else: + cfg.log_path = options['log_path'] + + if not os.path.exists(cfg.log_path): + utils.make_dir(cfg.log_path) + if not os.path.exists(cfg.log_path): + log.e('Can not create log path.\n') + return False + + if 'app_path' not in options: + return False + else: + cfg.app_path = options['app_path'] + + if not self._load_config(options): + return False + + if 'static_path' in options: + cfg.static_path = options['static_path'] + else: + cfg.static_path = os.path.join(options['app_path'], 'static') + + if 'data_path' in options: + cfg.data_path = options['data_path'] + else: + cfg.data_path = os.path.join(options['app_path'], 'data') + + if 'template_path' in options: + cfg.template_path = options['template_path'] + else: + cfg.template_path = os.path.join(options['app_path'], 'view') + + if 'res_path' in options: + cfg.res_path = options['res_path'] + else: + cfg.res_path = os.path.join(options['app_path'], 'res') + + if not swx_session().init(): + return False + + # get_mysql_pool().init(cfg.mysql_ip, cfg.mysql_port, cfg.mysql_user, cfg.mysql_pass) + # db_path = os.path.join(cfg.data_path, 'ts_db.db') + get_sqlite_pool().init(cfg.data_path) + + var_js = os.path.join(cfg.static_path, 'js', 'var.js') + try: + # if not os.path.exists(var_js): + f = open(var_js, 'w') + f.write("\"use strict\";\nvar teleport_ip = \"{}\";\n".format(get_sqlite_pool().get_config_server_ip())) + f.close() + except Exception: + log.e('can not load config: server_ip.\n') + return False + + return True + + def _load_config(self, options): + if 'cfg_path' in options: + _cfg_path = options['cfg_path'] + else: + _cfg_path = os.path.join(options['app_path'], 'conf') + + _cfg_file = os.path.join(_cfg_path, 'web.conf') + if not cfg.load(_cfg_file): + return False + + cfg.cfg_path = _cfg_path + + return True + + @staticmethod + def _daemon(): + # fork for daemon. + if sys.platform == 'win32': + # log.v('os.fork() not support Windows, operation ignored.\n') + return True + + try: + pid = os.fork() + if pid > 0: + # log.w('parent #1 exit.\n') + # return False + os._exit(0) + except OSError: + log.e('fork #1 failed.\n') + os._exit(1) + + # Detach from parent env. + os.chdir('/') + os.umask(0) + os.setsid() + + # Second fork. + try: + pid = os.fork() + if pid > 0: + # log.w('parent #2 exit.\n') + # return False + os._exit(0) + except OSError: + log.e('fork #2 failed.\n') + # return False + os._exit(1) + + # OK I'm daemon now. + for f in sys.stdout, sys.stderr: + f.flush() + si = open('/dev/null', 'r') + so = open('/dev/null', 'a+') + se = open('/dev/null', 'a+') + os.dup2(si.fileno(), sys.stdin.fileno()) + os.dup2(so.fileno(), sys.stdout.fileno()) + os.dup2(se.fileno(), sys.stderr.fileno()) + + # test print() not raise exception. + # print('good.') + + return True + + def run(self): + + settings = { + # + 'cookie_secret': '8946svdABGD345fg98uhIaefEBePIfegOIakjFH43oETzK', + + 'login_url': '/auth/login', + + # 鎸囧畾闈欐佹枃浠剁殑璺緞锛岄〉闈㈡ā鏉夸腑鍙互鐢 {{ static_url('css/main.css') }} 鐨勬柟寮忚皟鐢 + 'static_path': cfg.static_path, + + # 鎸囧畾妯℃澘鏂囦欢鐨勮矾寰 + 'template_path': cfg.template_path, + + # 闃叉璺ㄧ珯浼犺姹傦紝鍙傝 http://old.sebug.net/paper/books/tornado/#_7 + 'xsrf_cookies': False, + + 'autoescape': 'xhtml_escape', + + # 'ui_modules': ui_modules, + # 'debug': True, + + # Debug Mode. + 'compiled_template_cache': False, + 'static_hash_cache': False, + } + + # if cfg.debug: + # settings['compiled_template_cache'] = False + # settings['static_hash_cache'] = False + # settings['compiled_template_cache'] = False + # settings['static_hash_cache'] = False + + from eom_app.controller import controllers + web_app = tornado.web.Application(controllers, **settings) + + # if sys.platform == 'win32': + # web_app.listen(cfg.server_port) + # log.v('Web Server start on http://127.0.0.1:{}\n'.format(cfg.server_port)) + # tornado.ioloop.IOLoop.instance().start() + # else: + # if not cfg.debug: + # if not self._daemon(): + # return False + # # 杩涘叆daemon妯″紡浜嗭紝涓嶅啀鍏佽杈撳嚭淇℃伅鍒版帶鍒跺彴浜 + # log.set_attribute(console=False, filename='/var/log/eom/ts-backend.log') + # log.v('\n=====================================\n') + # + # def _run(port): + # log.v('Web Server start on http://127.0.0.1:{}\n'.format(port)) + # web_app.listen(port) + # tornado.ioloop.IOLoop.instance().start() + # log.w('a tornado io-loop exit.\n') + # + # jobs = list() + # port = cfg.server_port + # for x in range(cfg.server_worker): + # p = multiprocessing.Process(target=_run, args=(port,)) + # jobs.append(p) + # p.start() + # port = port + 1 + # + # else: + # # sockets = tornado.netutil.bind_sockets(cfg.server_port) + # # tornado.process.fork_processes(2) + # # server = tornado.httpserver.HTTPServer(web_app) + # # server.add_sockets(sockets) + # web_app.listen(cfg.server_port) + # log.v('Web Server start on http://127.0.0.1:{}\n'.format(cfg.server_port)) + # tornado.ioloop.IOLoop.instance().start() + + # server = tornado.httpserver.HTTPServer(web_app, ssl_options={ + # 'certfile': os.path.join(cfg.cfg_path, 'ssl', 'server.pem'), + # 'keyfile': os.path.join(cfg.cfg_path, 'ssl', 'server.key') + # }) + # if sys.platform == 'win32': + # log.set_attribute(console=False, filename='/var/log/eom_ts/ts-backend.log') + # else: + # log.set_attribute(console=False, filename='/var/log/eom_ts/ts-backend.log') + + log.v('Web Server start on http://127.0.0.1:{}\n'.format(cfg.server_port)) + + server = tornado.httpserver.HTTPServer(web_app) + try: + server.listen(cfg.server_port) + except: + log.e('Can not listen on port {}, maybe it been used by another application.\n'.format(cfg.server_port)) + return 0 + + if not cfg.dev_mode: + log_file = os.path.join(cfg.log_path, 'ts-web.log') + log.set_attribute(console=False, filename=log_file) + + tornado.ioloop.IOLoop.instance().start() + return 0 diff --git a/server/www/teleport/app/eom_app/app/session.py b/server/www/teleport/app/eom_app/app/session.py new file mode 100644 index 0000000..8d6c0a2 --- /dev/null +++ b/server/www/teleport/app/eom_app/app/session.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- + +import pickle + +from pymemcache.client.base import Client as mem_client +from .configs import app_cfg + +cfg = app_cfg() + +SESSION_EXPIRE = 3600 # 60*60 + + +# SESSION_EXPIRE = 1800 # 30*60 +# SESSION_EXPIRE = 30 + +class SwxSession: + """ + :type _mem_client: pymemcache.client.base.Client + """ + + def __init__(self): + import builtins + if '__swx_session__' in builtins.__dict__: + raise RuntimeError('SwxSession object exists, you can not create more than one instance.') + self._session_dict = dict() + + def init(self): + return True + + def add(self, s_id, value): + self._session_dict[s_id] = value + + def set(self, s_id, value): + self._session_dict[s_id] = value + + def get(self, s_id, _default=None): + if s_id in self._session_dict: + v = self._session_dict[s_id] + else: + v = _default + return v + + +def swx_session(): + """ + 鍙栧緱 SwxSession 鐨勫敮涓瀹炰緥 + + :rtype : SwxSession + """ + + import builtins + if '__swx_session__' not in builtins.__dict__: + builtins.__dict__['__swx_session__'] = SwxSession() + return builtins.__dict__['__swx_session__'] diff --git a/server/www/teleport/app/eom_app/controller/__init__.py b/server/www/teleport/app/eom_app/controller/__init__.py new file mode 100644 index 0000000..6691186 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/__init__.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- + +import os +from . import auth +from . import host +from . import cert +from . import user +from . import pwd +from . import set +from . import group +from . import index +import tornado.web + +from eom_app.app.configs import app_cfg +# from . import error +# from . import support +cfg = app_cfg() + +__all__ = ['controllers'] + +controllers = [ + (r'/', index.IndexHandler), + + (r'/auth/login', auth.LoginHandler), + (r'/auth/verify-user', auth.VerifyUser), + (r'/auth/logout', auth.LogoutHandler), + (r'/auth/get-captcha', auth.GetCaptchaHandler), + (r'/auth/verify-captcha', auth.VerifyCaptchaHandler), + (r'/auth/verify-ticket', auth.VerifyTicketHandler), + (r'/auth/modify-pwd', auth.ModifyPwd), + + (r'/group/list', group.GetListHandler), + (r'/group/', group.IndexHandler), + (r'/group', group.IndexHandler), + + (r'/cert/list', cert.GetListHandler), + (r'/cert/', cert.IndexHandler), + (r'/cert', cert.IndexHandler), + + (r'/pwd', pwd.IndexHandler), + (r'/user', user.IndexHandler), + (r'/user/list', user.GetListHandler), + + # add another path to static-path + (r"/log/replay/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(cfg.data_path, 'replay')}), + (r'/log/list', user.LogList), + (r'/log/record/(.*)/(.*)', user.RecordHandler), + (r'/log/command-log/(.*)/(.*)', user.ComandLogHandler), + (r'/log/get-record-header', user.RecordGetHeader), + (r'/log/get-record-file-info', user.RecordGetInfo), + (r'/log/delete-log', user.DeleteLog), + (r'/log/play-rdp/(.*)/(.*)', user.PlayRdpHandler), + (r'/log/', user.LogHandler), + (r'/log', user.LogHandler), + + (r'/exit', auth.LogoutHandler), + + (r'/user/delete-user', user.DeleteUser), + (r'/user/modify-user', user.ModifyUser), + (r'/user/add-user', user.AddUser), + (r'/user/lock-user', user.LockUser), + (r'/user/reset-user', user.ResetUser), + (r'/user/host-list', user.HostList), + (r'/user/alloc-host', user.AllocHost), + (r'/user/alloc-host-user', user.AllocHostUser), + (r'/user/delete-host', user.DeleteHost), + (r'/user/delete-host-user', user.DeleteHostUser), + (r'/user/auth/(.*)', user.AuthHandler), + + + (r'/host/list', host.GetListHandler), + (r'/host/add-host', host.AddHost), + (r'/host/lock-host', host.LockHost), + (r'/host/delete-host', host.DeleteHost), + (r'/host/export-host', host.ExportHost), + (r'/host/get-cert-list', host.GetCertList), + (r'/host/add-cert', host.AddCert), + (r'/host/delete-cert', host.DeleteCert), + (r'/host/update-cert', host.UpdateCert), + (r'/host/get-group-list', host.GetGrouplist), + (r'/host/add-group', host.AddGroup), + (r'/host/update-group', host.UpdateGroup), + (r'/host/delete-group', host.DeleteGroup), + (r'/host/add-host-to-group', host.AddHostToGroup), + (r'/host/get-host-extend-info', host.GetHostExtendInfo), + (r'/host/update-host-extend-info', host.UpdateHostExtendInfo), + (r'/host/update', host.UpdateHandler), + (r'/host/load-file', host.LoadFile), + (r'/host/', host.IndexHandler), + (r'/host', host.IndexHandler), + (r'/host/get-session-id', host.GetSessionId), + (r'/host/admin-get-session-id', host.AdminGetSessionId), + (r'/host/admin-fast-get-session-id', host.AdminFastGetSessionId), + + (r'/host/sys-user/list', host.SysUserList), + (r'/host/sys-user/add', host.SysUserAdd), + (r'/host/sys-user/update', host.SysUserUpdate), + (r'/host/sys-user/delete', host.SysUserDelete), + + (r'/set/update-config', set.UpdateConfig), + # (r'/set/os-operator', set.OsOperator), + (r'/set/', set.IndexHandler), + (r'/set', set.IndexHandler), + + # (r'/dl/', index.DownloadHandler), + # (r'/dl', index.DownloadHandler), + + (r'/EXIT-4E581FEFD7AB497D833D71A51C61D898', index.ExitHandler), + +] diff --git a/server/www/teleport/app/eom_app/controller/auth.py b/server/www/teleport/app/eom_app/controller/auth.py new file mode 100644 index 0000000..f395dd8 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/auth.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- + +import json +import random +from random import Random + +from eom_app.module import user +from eom_common.eomcore.logger import * +from .base import SwxBaseHandler, SwxJsonpHandler, SwxAuthJsonHandler +from .helper.captcha import gen_captcha + + +class LoginHandler(SwxBaseHandler): + def get(self): + ref = self.get_argument('ref', '/') + + self.render('auth/login.mako', reference=ref, captcha_random=random.random()) + + +class VerifyUser(SwxJsonpHandler): + def get(self): + code = self.get_session('captcha') + if code is None: + self.write_jsonp(-1) + return + + captcha = self.get_argument('captcha', None) + username = self.get_argument('username', None) + userpwd = self.get_argument('userpwd', None) + + if captcha is None or username is None: + self.write_jsonp(-1) + return + if code.lower() != captcha.lower(): + self.write_jsonp(-1) + return + + self.del_session('captcha') + + # log.v('try to set-session.\n') + try: + user_id, account_type, nickname = user.verify_user(username, userpwd) + if user_id == 0: + self.write_jsonp(-1) + return + + _user = self.get_session('user') + if _user is None: + _user = dict() + _user['id'] = 0 + _user['name'] = 'guest' + _user['nick_name'] = '璁垮' + _user['status'] = 0 + _user['phone_num'] = '110' + _user['type'] = 0 + _user['permission'] = 0 + _user['is_login'] = False + + _user['id'] = user_id + _user['is_login'] = True + _user['name'] = username + _user['nick_name'] = nickname + _user['type'] = account_type + + self.set_session('user', _user) + # log.v('set session ok.\n') + return self.write_jsonp(0) + + except: + log.e('can not set session.') + self.write_jsonp(-1) + + +class LogoutHandler(SwxBaseHandler): + def get(self): + user = self.get_current_user() + user['is_login'] = False + self.set_session('user', user) + + # self.render('login/login.mako', captcha_random=random.random()) + self.redirect('/auth/login') + + +class GetCaptchaHandler(SwxBaseHandler): + def get(self): + code, img_data = gen_captcha() + self.set_session('captcha', code) + self.set_header('Content-Type', 'image/jpeg') + self.write(img_data) + + +class VerifyCaptchaHandler(SwxJsonpHandler): + def get(self): + code = self.get_session('captcha') + if code is None: + self.write_jsonp(-1) + return + + captcha = self.get_argument('captcha', None) + if captcha is None: + self.write_jsonp(-1) + return + + if code.lower() != captcha.lower(): + self.write_jsonp(-1) + return + + self.write_jsonp(0) + + +class VerifyTicketHandler(SwxJsonpHandler): + def get(self): + # print('verify-ticket') + + code = self.get_session('captcha') + if code is None: + self.write_jsonp(-1) + return + + captcha = self.get_argument('captcha', None) + username = self.get_argument('username', None) + user_id = self.get_argument('user_id', None) + ticket = self.get_argument('ticket', None) + + if captcha is None or username is None or ticket is None: + self.write_jsonp(-1) + return + + if code.lower() != captcha.lower(): + self.write_jsonp(-1) + return + + self.del_session('captcha') + + # if not self.is_ticket_valid(username, ticket): + # self.write_jsonp(-1) + # return + + # log.v('try to set-session.\n') + try: + _user = user.get_user_by_id(user_id) + if _user is None: + self.write_jsonp(-1) + return + + # _user = dict() + # _user['id'] = user_id + # # user['account'] = username # login-name + # _user['name'] = username # real-name + _user['is_login'] = True + + self.set_session('user', _user) + # log.v('set session ok.\n') + + self.write_jsonp(0) + except: + log.e('can not set session.') + self.write_jsonp(-1) + + +# +# class QuickLoginHandler(SwxJsonpHandler): +# def get(self): +# # code = self.get_session('captcha') +# # if code is None: +# # self.write_jsonp(-1) +# # return +# +# # captcha = self.get_argument('captcha', None) +# # username = self.get_argument('username', None) +# user_id = self.get_argument('uid', None) +# ticket = self.get_argument('ticket', None) +# +# # if captcha is None or username is None or ticket is None: +# # self.write_jsonp(-1) +# # return +# # +# # if code.lower() != captcha.lower(): +# # self.write_jsonp(-1) +# # return +# +# # self.del_session('captcha') +# +# if not self.is_ticket_valid(ticket): +# self.write_jsonp(-1) +# return +# +# _user = user.get_user_by_id(user_id) +# if _user is None: +# self.write_jsonp(-1) +# return +# +# # _user = dict() +# # _user['id'] = user_id +# # # user['account'] = username # login-name +# # _user['name'] = username # real-name +# _user['is_login'] = True +# +# log.v('quick login ok, try to set session.\n') +# try: +# self.set_session('user', _user) +# log.v('set session ok.\n') +# self.write_jsonp(0) +# except: +# log.v('set session failed.\n') +# self.write_jsonp(1) + + +class ModifyPwd(SwxAuthJsonHandler): + def post(self): + # print('verify-ticket') + + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + _old_pwd = args['o_pwd'] + _new_pwd = args['n_pwd'] + + if _old_pwd is None or _new_pwd is None: + self.write_json(-1) + return + + user_info = self.get_current_user() + try: + ret = user.modify_pwd(_old_pwd, _new_pwd, user_info['id']) + code = dict() + code['code'] = ret + self.write_json(0, data=code) + except: + log.e('can not set session.') + self.write_json(-1) + + +# +# class GetEncData(SwxAuthJsonHandler): +# def post(self): +# # print('verify-ticket') +# +# args = self.get_argument('args', None) +# if args is not None: +# args = json.loads(args) +# # print('args', args) +# else: +# # ret = {'code':-1} +# self.write_json(-1) +# return +# _pwd = args['pwd'] +# +# if _pwd is None: +# self.write_json(-1) +# return +# +# try: +# ret, data = user.get_enc_data_helper(_pwd) +# code = dict() +# code['code'] = ret +# code['data'] = data +# self.write_json(0, data=code) +# except: +# log.e('can not set session.') +# self.write_json(-1) + + +def random_str(randomlength=8): + _str = '' + chars = 'AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz0123456789' + length = len(chars) - 1 + _random = Random() + for i in range(randomlength): + _str += chars[_random.randint(0, length)] + return _str diff --git a/server/www/teleport/app/eom_app/controller/base.py b/server/www/teleport/app/eom_app/controller/base.py new file mode 100644 index 0000000..c4ea6f5 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- + +import binascii +import os +import time +from urllib.parse import quote + +import mako.lookup +import mako.template +import tornado.web +from tornado.escape import json_encode + +from eom_app.app.session import swx_session + + +class SwxBaseHandler(tornado.web.RequestHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + self._s_id = None + self._s_val = dict() + + def initialize(self): + template_path = self.get_template_path() + self.lookup = mako.lookup.TemplateLookup(directories=[template_path], input_encoding='utf-8', output_encoding='utf-8') + + def render_string(self, template_name, **kwargs): + template = self.lookup.get_template(template_name) + namespace = self.get_template_namespace() + namespace.update(kwargs) + return template.render(**namespace) + + def render(self, template_path, **kwargs): + self.finish(self.render_string(template_path, **kwargs)) + + def prepare(self): + + if self.application.settings.get("xsrf_cookies"): + x = self.xsrf_token + + self._s_id = self.get_cookie('_sid') + if self._s_id is None: + self._s_id = 'ywl_{}_{}'.format(int(time.time()), binascii.b2a_hex(os.urandom(8)).decode()) + self.set_cookie('_sid', self._s_id) + swx_session().add(self._s_id, self._s_val) + else: + # print('sid:', self._s_id) + self._s_val = swx_session().get(self._s_id) + if self._s_val is None: + self._s_val = dict() + swx_session().add(self._s_id, self._s_val) + + def set_session(self, name, value): + self._s_val[name] = value + swx_session().set(self._s_id, self._s_val) + + def get_session(self, name, default=None): + if name in self._s_val: + return self._s_val[name] + else: + return default + + def del_session(self, name): + if name in self._s_val: + del self._s_val[name] + + def get_current_user(self): + # return self.get_secure_cookie('user') + user = self.get_session('user') + if user is None: + user = dict() + user['id'] = 0 + user['name'] = 'guest' + user['nick_name'] = '璁垮' + user['status'] = 0 + user['phone_num'] = '110' + user['type'] = 0 + user['permission'] = 0 + user['is_login'] = False + + return user + + +class SwxJsonpHandler(SwxBaseHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + self._js_callback = '' + + def prepare(self): + super().prepare() + + self._js_callback = self.get_argument('callback', None) + if self._js_callback is None: + raise RuntimeError('no callback in URL param.') + + def write_jsonp(self, err_code, data=None): + + self.write(self._js_callback) + self.write('({code:') + self.write('{}'.format(err_code)) + + if data is None: + self.write('})') + return + + if not isinstance(data, dict): + raise RuntimeError('jsonp data should be dict.') + + self.write(',data:') + self.write(json_encode(data)) + self.write('})') + + +class SwxJsonHandler(SwxBaseHandler): + """ + 鎵鏈夎繑鍥濲SON鏁版嵁鐨勬帶鍒跺櫒鍧囦粠鏈被闆嗘垚锛岃繑鍥炵殑鏁版嵁鏍煎紡涓寰嬪寘鍚笁涓瓧娈碉細code/msg/data + code: 0=鎴愬姛锛屽叾浠=澶辫触 + msg: 瀛楃涓诧紝涓鑸敤浜巆ode涓洪潪闆舵槸锛屾寚鍑洪敊璇師鍥 + data: 涓鑸敤浜庢垚鍔熸搷浣滅殑杩斿洖鐨勪笟鍔℃暟鎹 + """ + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + def write_json(self, code, message='', data=None): + if not isinstance(code, int): + raise RuntimeError('`code` must be a integer.') + if not isinstance(message, str): + raise RuntimeError('`msg` must be a string.') + + if data is None: + data = list() + + _ret = {'code':code, 'message':message, 'data':data} + + self.set_header("Content-Type", "application/json") + self.write(json_encode(_ret)) + + def write_raw_json(self, data=None): + + if data is None: + data = list() + + self.set_header("Content-Type", "application/json") + self.write(json_encode(data)) + + +class SwxAuthHandler(SwxBaseHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + def prepare(self): + super().prepare() + + reference = self.request.uri + + user = self.get_current_user() + if not user['is_login']: + if reference != '/auth/login': + x = quote(reference) + self.redirect('/auth/login?ref={}'.format(x)) + else: + self.redirect('/auth/login') + + +class SwxAdminHandler(SwxBaseHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + def prepare(self): + super().prepare() + + reference = self.request.uri + + user = self.get_current_user() + if user['type'] != 100: + if reference != '/auth/login': + x = quote(reference) + self.redirect('/auth/login?ref={}'.format(x)) + else: + self.redirect('/auth/login') + + +class SwxAuthJsonpHandler(SwxBaseHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + +class SwxAuthJsonHandler(SwxJsonHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + def prepare(self): + super().prepare() + + reference = self.request.uri + + user = self.get_current_user() + if not user['is_login']: + if reference != '/auth/login': + x = quote(reference) + self.redirect('/auth/login?ref={}'.format(x)) + else: + self.redirect('/auth/login') + + +class SwxAdminJsonHandler(SwxJsonHandler): + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + + def prepare(self): + super().prepare() + + reference = self.request.uri + + user = self.get_current_user() + if user['type'] != 100: + if reference != '/auth/login': + self.write_json(-99) diff --git a/server/www/teleport/app/eom_app/controller/cert.py b/server/www/teleport/app/eom_app/controller/cert.py new file mode 100644 index 0000000..2da31a0 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/cert.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +from eom_app.app.configs import app_cfg +from eom_app.module import host +from .base import SwxAdminHandler, SwxAdminJsonHandler + +cfg = app_cfg() + + +class IndexHandler(SwxAdminHandler): + def get(self): + self.render('cert/index.mako') + + +class GetListHandler(SwxAdminJsonHandler): + def post(self): + _certs = host.get_cert_list() + ret = dict() + ret['page_index'] = 10 + ret['total'] = len(_certs) + ret['data'] = _certs + self.write_json(0, data=ret) diff --git a/server/www/teleport/app/eom_app/controller/group.py b/server/www/teleport/app/eom_app/controller/group.py new file mode 100644 index 0000000..34b2c3d --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/group.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +from eom_app.app.configs import app_cfg +from eom_app.module import host +from .base import SwxAdminHandler, SwxAdminJsonHandler + +cfg = app_cfg() + + +class IndexHandler(SwxAdminHandler): + def get(self): + self.render('group/index.mako') + + +class GetListHandler(SwxAdminJsonHandler): + def post(self): + group_list = host.get_group_list() + ret = dict() + ret['page_index'] = 10 + ret['total'] = len(group_list) + ret['data'] = group_list + self.write_json(0, data=ret) diff --git a/server/www/teleport/app/eom_app/controller/helper/__init__.py b/server/www/teleport/app/eom_app/controller/helper/__init__.py new file mode 100644 index 0000000..633f866 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/helper/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- + diff --git a/server/www/teleport/app/eom_app/controller/helper/captcha.py b/server/www/teleport/app/eom_app/controller/helper/captcha.py new file mode 100644 index 0000000..205246f --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/helper/captcha.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- + +import random +from io import BytesIO, StringIO +from os import path + +from eom_app.app.configs import app_cfg +from wheezy.captcha.image import background +from wheezy.captcha.image import captcha +from wheezy.captcha.image import curve +from wheezy.captcha.image import noise +from wheezy.captcha.image import offset +from wheezy.captcha.image import rotate +from wheezy.captcha.image import smooth +from wheezy.captcha.image import text +from wheezy.captcha.image import warp + + +cfg = app_cfg() +_chars = 'ACDEFHJKLMNPQRTVWXY34679' + + +def gen_captcha(): + _font_dir = path.join(cfg.res_path, 'fonts') + captcha_image_t = captcha( + width=136, + height=36, + drawings=[ + background(color='#eeeeee'), + text(fonts=[ + path.join(_font_dir, '001.ttf') + ], + font_sizes=(28, 34, 36, 32), + color='#63a8f5', + squeeze_factor=1.1, + drawings=[ + warp(dx_factor=0.05, dy_factor=0.05), + rotate(angle=15), + offset() + ]), + curve(color='#af6fff', width=2, number=9), + noise(), + smooth() + ]) + + chars_t = random.sample(_chars, 4) + image = captcha_image_t(chars_t) + + out = BytesIO() + image.save(out, "jpeg", quality=90) + # web.header('Content-Type','image/jpeg') + return ''.join(chars_t), out.getvalue() + diff --git a/server/www/teleport/app/eom_app/controller/host.py b/server/www/teleport/app/eom_app/controller/host.py new file mode 100644 index 0000000..afee869 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/host.py @@ -0,0 +1,1103 @@ +# -*- coding: utf-8 -*- + +import time +import csv +import os +import urllib +import urllib.parse +import urllib.request + +from eom_app.module import host +from eom_app.module.common import * +from eom_common.eomcore.logger import * +from .base import SwxAuthHandler, SwxAuthJsonHandler + +cfg = app_cfg() + + +class IndexHandler(SwxAuthHandler): + def get(self): + _user = self.get_session('user') + if _user is None: + return self.write(-1) + + static_path = cfg.static_path + var_js = os.path.join(static_path, 'js', 'var.js') + try: + f = open(var_js, 'w') + _type = _user['type'] + config_list = host.get_config_list() + ts_server = dict() + ts_server['ip'] = config_list['ts_server_ip'] + ts_server['ssh_port'] = config_list['ts_server_ssh_port'] + ts_server['rdp_port'] = config_list['ts_server_rdp_port'] + ts_server['telnet_port'] = config_list['ts_server_telnet_port'] + f.write("\"use strict\";\nvar teleport_ip = \"{}\";\n".format(ts_server['ip'])) + except Exception as e: + return self.write(-1) + finally: + f.close() + + if _type >= 100: + group_list = host.get_group_list() + cert_list = host.get_cert_list() + self.render('host/admin_index.mako', + group_list=group_list, + cert_list=cert_list, + ts_server=ts_server) + else: + group_list = host.get_group_list() + + if config_list is None: + return + + self.render('host/common_index.mako', + group_list=group_list, + ts_server=ts_server) + + +class LoadFile(SwxAuthJsonHandler): + def get(self): + pass + + def post(self): + """ + csv瀵煎叆瑙勫垯锛 + 姣忎竴琛岀殑鏁版嵁鏍煎紡锛 鍒嗙粍ID,鎿嶄綔绯荤粺,IP鍦板潃,绔彛,绯荤粺鐢ㄦ埛,绯荤粺瀵嗙爜,鍗忚,瀵嗛挜ID,鐘舵,璁よ瘉绫诲瀷,鎻忚堪 + 鍥犱负涓绘満鐨勫敮涓鎬у湪浜 `IP鍦板潃 + 绔彛`锛屼笖鍏佽涓涓 `IP鍦板潃 + 绔彛` 瀵瑰簲澶氫釜绯荤粺鐢ㄦ埛锛屽洜姝わ紝姣忎竴琛岀殑鏁版嵁鍑犱箮娌℃湁闄愬埗銆 + 鍦ㄥ鍏ユ椂锛 + 1. 瀵规瘡涓涓涓娆¢亣鍒扮殑 `IP鍦板潃 + 绔彛` 缁勫悎锛屽氨鍦 ts_host_info 琛ㄤ腑鍔犱竴涓潯鐩紝骞跺湪 ts_auth_info 琛ㄤ腑鍔犲叆涓涓敤鎴枫 + 2. 瀵逛簬闈炵涓娆¢亣鍒扮殑 `IP鍦板潃 + 绔彛` 缁勫悎锛屽垯浠呬粎鍦 ts_auth_info 琛ㄤ腑鍔犱竴涓敤鎴凤紝涓嶆洿鏀 ts_host_info 琛ㄤ腑鐨勭幇鏈夋暟鎹 + 3. `IP鍦板潃 + 绔彛 + 鐢ㄦ埛` 鐨勭粍鍚堜笉鑳介噸澶嶃 + 4. 绌鸿璺宠繃锛屾暟鎹牸寮忎笉姝g‘鐨勮烦杩囥 + """ + ret = dict() + ret['code'] = 0 + ret['msg'] = list() # 璁板綍璺宠繃鐨勮锛堟牸寮忎笉姝g‘锛屾垨鑰呮暟鎹噸澶嶇瓑锛 + csv_filename = '' + + try: + upload_path = os.path.join(os.path.dirname(__file__), 'csv-files') # 鏂囦欢鐨勬殏瀛樿矾寰 + if not os.path.exists(upload_path): + os.mkdir(upload_path) + file_metas = self.request.files['csvfile'] # 鎻愬彇琛ㄥ崟涓榥ame鈥欎负鈥榝ile鈥欑殑鏂囦欢鍏冩暟鎹 + for meta in file_metas: + now = time.localtime(time.time()) + tmp_name = 'upload-{:04d}{:02d}{:02d}{:02d}{:02d}{:02d}.csv'.format(now.tm_year, now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min, now.tm_sec) + csv_filename = os.path.join(upload_path, tmp_name) + with open(csv_filename, 'wb') as up: + up.write(meta['body']) + + # file encode maybe utf8 or gbk... check it out. + file_encode = None + with open(csv_filename, encoding='gbk') as up: + try: + up.readlines() + file_encode = 'gbk' + except: + log.e('open file:{} -1\n'.format(csv_filename)) + + if file_encode is None: + with open(csv_filename, encoding='utf8') as up: + try: + up.readlines() + file_encode = 'utf8' + except: + log.e('open file:{} -2\n'.format(csv_filename)) + + if file_encode is None: + os.remove(csv_filename) + self.write_json(-2) + log.e('file {} unknown encode.\n'.format(csv_filename)) + return + + with open(csv_filename, encoding=file_encode) as up: + csv_reader = csv.reader(up) + is_first_line = True + for csv_recorder in csv_reader: + # 璺宠繃绗竴琛岋紝閭f槸鏍煎紡璇存槑 + if is_first_line: + is_first_line = False + continue + + # 绌鸿鍒欏拷鐣 + if len(csv_recorder) <= 1: + continue + + # 鏍煎紡閿欒鍒欒褰曞湪妗堬紝鐒跺悗缁х画 + if len(csv_recorder) != 13: + ret['msg'].append({'reason':'鏍煎紡閿欒', 'line':', '.join(csv_recorder)}) + continue + + # pro_type = int(line[6]) + # host_port = int(line[3]) + + host_args = dict() + user_args = dict() + # 鍒嗙粍ID, 鎿嶄綔绯荤粺, IP鍦板潃, 绔彛, 鍗忚, 鐘舵, 鎻忚堪, 绯荤粺鐢ㄦ埛, 绯荤粺瀵嗙爜, 鏄惁鍔犲瘑,闄勫姞鍙傛暟, 瀵嗛挜ID, 璁よ瘉绫诲瀷 + + host_args['group_id'] = int(csv_recorder[0]) + host_args['host_sys_type'] = int(csv_recorder[1]) + host_args['host_ip'] = csv_recorder[2] + host_args['host_port'] = csv_recorder[3] + host_args['protocol'] = csv_recorder[4] + host_args['host_lock'] = csv_recorder[5] + host_args['host_desc'] = csv_recorder[6] + # 鍔犲叆涓涓富鏈猴紙濡傛灉宸茬粡瀛樺湪锛屽垯鐩存帴杩斿洖宸插瓨鍦ㄧ殑鏉$洰鐨刪ost_id锛 + host_id = host.add_host(host_args, must_not_exists=False) + if host_id < 0: + ret['msg'].append({'reason': '娣诲姞涓绘満澶辫触锛屾搷浣滄暟鎹簱澶辫触', 'line': ', '.join(csv_recorder)}) + continue + + user_args['host_id'] = host_id + user_args['user_name'] = csv_recorder[7] + user_pswd = csv_recorder[8] + is_encrpty = int(csv_recorder[9]) + user_args['user_param'] = csv_recorder[10].replace('\\n', '\n') + user_args['cert_id'] = int(csv_recorder[11]) + auth_mode = int(csv_recorder[12]) + user_args['auth_mode'] = auth_mode + user_args['user_pswd'] = '' + ret_code = 0 + if auth_mode == 0: + pass + elif auth_mode == 1: + try: + if is_encrpty == 0: + ret_code, tmp_pswd = get_enc_data(user_pswd) + else: + tmp_pswd = user_pswd + user_args['user_pswd'] = tmp_pswd + except Exception: + ret_code = -1 + log.e('get_enc_data() failed.\n') + + if 0 != ret_code: + ret['msg'].append({'reason': '鍔犲瘑鐢ㄦ埛瀵嗙爜澶辫触锛屽彲鑳藉師鍥狅細Teleport鏍稿績鏈嶅姟鏈惎鍔', 'line': ', '.join(csv_recorder)}) + log.e('get_enc_data() failed, error={}\n'.format(ret_code)) + continue + + elif auth_mode == 2: + pass + # user_args['cert_id'] = int(csv_recorder[7]) + else: + ret['msg'].append({'reason': '鏈煡鐨勮璇佹ā寮', 'line': ', '.join(csv_recorder)}) + log.e('auth_mode unknown\n') + continue + + uid = host.sys_user_add(user_args) + if uid < 0: + if uid == -100: + ret['msg'].append({'reason': '娣诲姞鐧诲綍璐﹀彿澶辫触锛岃处鍙峰凡瀛樺湪', 'line': ', '.join(csv_recorder)}) + else: + ret['msg'].append({'reason': '娣诲姞鐧诲綍璐﹀彿澶辫触锛屾搷浣滄暟鎹簱澶辫触', 'line': ', '.join(csv_recorder)}) + # log.e('sys_user_add() failed.\n') + + ret = json.dumps(ret).encode('utf8') + self.write(ret) + except: + log.e('error\n') + ret['code'] = -1 + ret = json.dumps(ret).encode('utf8') + self.write(ret) + + finally: + if os.path.exists(csv_filename): + os.remove(csv_filename) + + # self.write_json(0) + + +class GetListHandler(SwxAuthJsonHandler): + def post(self): + _user = self.get_session('user') + if _user is None: + return self.write(-1) + + _type = _user['type'] + _uname = _user['name'] + + filter = dict() + user = self.get_current_user() + order = dict() + order['name'] = 'host_id' + order['asc'] = True + limit = dict() + limit['page_index'] = 0 + limit['per_page'] = 25 + + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + + tmp = list() + _filter = args['filter'] + for i in _filter: + if i == 'host_sys_type' and _filter[i] == 0: + tmp.append(i) + continue + if i == 'host_group' and _filter[i] == 0: + tmp.append(i) + continue + if i == 'search': + _x = _filter[i].strip() + if len(_x) == 0: + tmp.append(i) + continue + + for i in tmp: + del _filter[i] + + filter.update(_filter) + + _limit = args['limit'] + if _limit['page_index'] < 0: + _limit['page_index'] = 0 + if _limit['per_page'] < 10: + _limit['per_page'] = 10 + if _limit['per_page'] > 100: + _limit['per_page'] = 100 + + limit.update(_limit) + + _order = args['order'] + if _order is not None: + order['name'] = _order['k'] + order['asc'] = _order['v'] + if _type == 100: + _total, _hosts = host.get_all_host_info_list(filter, order, limit) + else: + filter['account_name'] = _uname + _total, _hosts = host.get_host_info_list_by_user(filter, order, limit) + # print(_hosts) + + ret = dict() + ret['page_index'] = limit['page_index'] + ret['total'] = _total + ret['data'] = _hosts + self.write_json(0, data=ret) + # self.write(json_encode(data)) + + +class GetGrouplist(SwxAuthJsonHandler): + def post(self): + group_list = host.get_group_list() + self.write_json(0, data=group_list) + + +class UpdateHandler(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + + if 'host_id' not in args or 'kv' not in args: + # ret = {'code':-2} + self.write_json(-2) + return + + # _host_id = args['host_id'] + + _ret = host.update(args['host_id'], args['kv']) + + if _ret: + self.write_json(0) + else: + self.write_json(-1) + + +class AddHost(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + + try: + ret = host.add_host(args) + if ret > 0: + self.write_json(0) + else: + self.write_json(ret) + return + except: + self.write_json(-1) + return + + +class LockHost(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + + host_id = args['host_id'] + lock = args['lock'] + try: + ret = host.lock_host(host_id, lock) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class DeleteHost(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + host_list = args['host_list'] + try: + ret = host.delete_host(host_list) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class ExportHost(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + order = dict() + order['name'] = 'host_id' + order['asc'] = True + + limit = dict() + limit['page_index'] = 0 + limit['per_page'] = 999999 + _total, _hosts = host.get_all_host_info_list(dict(), order, limit,True) + export_file = os.path.join(cfg.static_path, 'download', 'export_csv_data.csv') + if os.path.exists(export_file): + os.remove(export_file) + try: + csv_file = open(export_file, 'w', encoding='utf8') + # with open(export_file, 'wb') as csvfile: + # spamwriter = csv.writer(csvfile) + # spamwriter.writerow(['Spam'] * 5 + ['Baked Beans']) + # spamwriter.writerow(['Spam', 'Lovely Spam', 'Wonderful Spam']) + csv_header = "鍒嗙粍ID, 鎿嶄綔绯荤粺, " \ + "IP鍦板潃, 绔彛, 鍗忚, 鐘舵, 鎻忚堪, " \ + "绯荤粺鐢ㄦ埛, 绯荤粺瀵嗙爜, 鏄惁鍔犲瘑,闄勫姞鍙傛暟, 瀵嗛挜ID, 璁よ瘉绫诲瀷" + csv_file.write(csv_header) + csv_file.write('\n') + except: + log.e('') + csv_file.close() + self.write_json(-1) + return + + try: + + for h in _hosts: + auth_list = h['auth_list'] + # 鍒嗙粍ID, 鎿嶄綔绯荤粺, IP鍦板潃, 绔彛, 鍗忚, 鐘舵, 鎻忚堪, 绯荤粺鐢ㄦ埛, 绯荤粺瀵嗙爜, 鏄惁鍔犲瘑,闄勫姞鍙傛暟, , 瀵嗛挜ID, 璁よ瘉绫诲瀷 + for j in auth_list: + row_string = '' + # row_string = str(h['host_id']) + # row_string += ',' + row_string += str(h['group_id']) + row_string += ',' + row_string += str(h['host_sys_type']) + row_string += ',' + row_string += h['host_ip'] + row_string += ',' + row_string += str(h['host_port']) + row_string += ',' + row_string += str(h['protocol']) + row_string += ',' + row_string += str(h['host_lock']) + row_string += ',' + row_string += h['host_desc'] + row_string += ',' + + # row_string += str(j['host_auth_id']) + # row_string += ',' + row_string += j['user_name'] + row_string += ',' + row_string += j['user_pswd'] + row_string += ',' + row_string += '1' + row_string += ',' + user_param = j['user_param'] + if len(user_param) > 0: + user_param = user_param.replace('\n', '\\n') + row_string += user_param + row_string += ',' + row_string += str(j['cert_id']) + row_string += ',' + row_string += str(j['auth_mode']) + csv_file.write(row_string) + csv_file.write('\n') + # row = list() + # row.append(h['host_id']) + # row.append(h['group_id']) + # row.append(h['host_sys_type']) + # row.append(h['host_ip']) + # row.append(h['host_port']) + # row.append(h['protocol']) + # row.append(h['host_lock']) + # row.append(h['host_desc']) + # auth_list = h['auth_list'] + # row.append(j['host_auth_id']) + # row.append(j['user_name']) + # row.append(j['user_pswd']) + # row.append(1) + # row.append(j['user_param']) + # row.append(j['cert_id']) + # row.append(j['auth_mode']) + except Exception as e: + log.e('') + finally: + csv_file.close() + url = '/static/download/export_csv_data.csv' + ret = dict() + ret['url'] = url + self.write_json(0, data=ret) + return + + +class GetCertList(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + _certs = host.get_cert_list() + if _certs is None: + self.write_json(-1) + return + else: + self.write_json(0, data=_certs) + return + + +class AddCert(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + + cert_pub = args['cert_pub'] + cert_pri = args['cert_pri'] + cert_name = args['cert_name'] + + if len(cert_pri) == 0: + self.write_json(-1) + return + try: + ret_code, cert_pri = get_enc_data(cert_pri) + except Exception as e: + self.write_json(-100) + return + if 0 != ret_code: + self.write_json(ret_code) + return + + try: + ret = host.add_cert(cert_pub, cert_pri, cert_name) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class DeleteCert(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + cert_id = args['cert_id'] + try: + ret = host.delete_cert(cert_id) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class UpdateCert(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + cert_id = args['cert_id'] + cert_pub = args['cert_pub'] + cert_pri = args['cert_pri'] + cert_name = args['cert_name'] + + if len(cert_pri) > 0: + try: + ret_code, cert_pri = get_enc_data(cert_pri) + except Exception as e: + self.write_json(-100) + return + if 0 != ret_code: + self.write_json(ret_code) + return + + try: + ret = host.update_cert(cert_id, cert_pub, cert_pri, cert_name) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class AddGroup(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + group_name = args['group_name'] + try: + ret = host.add_group(group_name) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class UpdateGroup(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + group_id = args['group_id'] + group_name = args['group_name'] + try: + ret = host.update_group(group_id, group_name) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class DeleteGroup(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + group_id = args['group_id'] + try: + ret = host.delete_group(group_id) + if ret == 0: + self.write_json(0) + else: + self.write_json(ret) + return + except: + self.write_json(-1) + return + + +class AddHostToGroup(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + host_list = args['host_list'] + group_id = args['group_id'] + try: + ret = host.add_host_to_group(host_list, group_id) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class GetHostExtendInfo(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + try: + host_id = args['host_id'] + _host = host.get_host_extend_info(host_id) + self.write_json(0, data=_host) + return + except: + self.write_json(-1) + return + + +class UpdateHostExtendInfo(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + host_id = args['host_id'] + + if args['host_auth_mode'] == 1: + if len(args['user_pwd']) > 0: + try: + ret_code, tmp_pswd = get_enc_data(args['user_pwd']) + except Exception as e: + self.write_json(-100) + return + if 0 != ret_code: + self.write_json(ret_code) + return + + args['user_pwd'] = tmp_pswd + + # ip = args['ip'] + # port = args['port'] + # user_name = args['user_name'] + # user_pwd = args['user_pwd'] + # cert_id = args['cert_id'] + # pro_type = args['pro_type'] + ret = host.update_host_extend_info(host_id, args) + if ret: + self.write_json(0) + else: + self.write_json(-1) + + +def post_http(url, values): + try: + # log.v('post_http(), url={}\n'.format(url)) + + user_agent = 'Mozilla/4.0 (compatible;MSIE 5.5; Windows NT)' + # values = { + # 'act': 'login', + # 'login[email]': 'yzhang@i9i8.com', + # 'login[password]': '123456' + # } + values = json.dumps(values) + data = urllib.parse.quote(values).encode('utf-8') + headers = {'User-Agent': user_agent} + req = urllib.request.Request(url=url, data=data, headers=headers) + response = urllib.request.urlopen(req, timeout=3) + the_page = response.read() + info = response.info() + _zip = info.get('Content-Encoding') + if _zip == 'gzip': + the_page = gzip.decompress(the_page) + else: + pass + the_page = the_page.decode() + # print(the_page) + return the_page + except: + return None + + +class GetSessionId(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + values = dict() + if 'auth_id' not in args: + self.write_json(-1) + return + auth_id = args['auth_id'] + + config_list = host.get_config_list() + ts_server_rpc_ip = '127.0.0.1' + + if 'ts_server_rpc_ip' in config_list: + ts_server_rpc_ip = config_list['ts_server_rpc_ip'] + ts_server_rpc_port = 52080 + if 'ts_server_rpc_port' in config_list: + ts_server_rpc_port = config_list['ts_server_rpc_port'] + + url = 'http://{}:{}/request_session'.format(ts_server_rpc_ip, ts_server_rpc_port) + values['auth_id'] = auth_id + return_data = post_http(url, values) + if return_data is None: + return self.write_json(-1) + return_data = json.loads(return_data) + if 'code' not in return_data: + return self.write_json(-1) + _code = return_data['code'] + if _code != 0: + return self.write_json(_code) + try: + session_id = return_data['data']['sid'] + except: + return self.write_json(-1) + + data = dict() + data['session_id'] = session_id + + return self.write_json(0, data=data) + + +class AdminGetSessionId(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-1) + return + + if 'host_auth_id' not in args: + self.write_json(-1) + return + host_auth_id = args['host_auth_id'] + + values = host.get_host_auth_info(host_auth_id) + if values is None: + self.write_json(-1) + return + values['account'] = 'admin' + + config_list = host.get_config_list() + ts_server_rpc_ip = '127.0.0.1' + + if 'ts_server_rpc_ip' in config_list: + ts_server_rpc_ip = config_list['ts_server_rpc_ip'] + ts_server_rpc_port = 52080 + if 'ts_server_rpc_port' in config_list: + ts_server_rpc_port = config_list['ts_server_rpc_port'] + + url = 'http://{}:{}/request_session'.format(ts_server_rpc_ip, ts_server_rpc_port) + # values['auth_id'] = auth_id + return_data = post_http(url, values) + if return_data is None: + return self.write_json(-1) + return_data = json.loads(return_data) + if 'code' not in return_data: + return self.write_json(-1) + _code = return_data['code'] + if _code != 0: + return self.write_json(_code) + try: + session_id = return_data['data']['sid'] + except: + return self.write_json(-1) + + data = dict() + data['session_id'] = session_id + + return self.write_json(0, data=data) + + +class AdminFastGetSessionId(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-1) + return + + try: + host_ip = args['host_ip'] + host_port = args['host_port'] + sys_type = args['sys_type'] + user_name = args['user_name'] + user_pswd = args['user_pswd'] + host_auth_id = args['host_auth_id'] + cert_id = args['cert_id'] + auth_mode = args['auth_mode'] + protocol = args['protocol'] + user_param = args['user_param'] + except Exception as e: + self.write_json(-2) + return + + values = dict() + values['ip'] = host_ip + values['port'] = int(host_port) + values['systype'] = int(sys_type) + + values['uname'] = user_name + values['uparam'] = user_param + values['authmode'] = int(auth_mode) + + values['protocol'] = int(protocol) + values['enc'] = 1 + + if auth_mode == 1: + if len(user_pswd) == 0: + h = host.get_host_auth_info(host_auth_id) + tmp_pswd = h['uauth'] + else: + ret_code, tmp_pswd = get_enc_data(user_pswd) + if ret_code != 0: + self.write_json(-99) + return + values['uauth'] = tmp_pswd + elif auth_mode == 2: + uauth = host.get_cert_info(int(cert_id)) + if uauth is None: + self.write_json(-100) + return + values['uauth'] = uauth + elif auth_mode == 0: + values['uauth'] = '' + else: + self.write_json(-101) + return + + values['account'] = 'admin' + + config_list = host.get_config_list() + ts_server_rpc_ip = '127.0.0.1' + + if 'ts_server_rpc_ip' in config_list: + ts_server_rpc_ip = config_list['ts_server_rpc_ip'] + ts_server_rpc_port = 52080 + if 'ts_server_rpc_port' in config_list: + ts_server_rpc_port = config_list['ts_server_rpc_port'] + + url = 'http://{}:{}/request_session'.format(ts_server_rpc_ip, ts_server_rpc_port) + # values['auth_id'] = auth_id + return_data = post_http(url, values) + if return_data is None: + return self.write_json(-1) + return_data = json.loads(return_data) + if 'code' not in return_data: + return self.write_json(-1) + _code = return_data['code'] + if _code != 0: + return self.write_json(_code) + try: + session_id = return_data['data']['sid'] + except: + return self.write_json(-1) + + data = dict() + data['session_id'] = session_id + + return self.write_json(0, data=data) + + +class SysUserList(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-1) + return + try: + host_id = args['host_id'] + except Exception as e: + self.write_json(-2) + return + + data = host.sys_user_list(host_id) + return self.write_json(0, data=data) + + +class SysUserAdd(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-1) + return + + try: + auth_mode = args['auth_mode'] + user_pswd = args['user_pswd'] + cert_id = args['cert_id'] + except Exception as e: + self.write_json(-2) + return + + if auth_mode == 1: + if 0 == len(args['user_pswd']): + self.write_json(-1) + return + try: + ret_code, tmp_pswd = get_enc_data(user_pswd) + except Exception as e: + self.write_json(ret_code) + return + if 0 != ret_code: + self.write_json(ret_code) + return + + args['user_pswd'] = tmp_pswd + + if host.sys_user_add(args) < 0: + return self.write_json(-1) + + return self.write_json(0) + + +class SysUserUpdate(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + + if 'host_auth_id' not in args or 'kv' not in args: + # ret = {'code':-2} + self.write_json(-2) + return + + kv = args['kv'] + if 'auth_mode' not in kv or 'user_pswd' not in kv or 'cert_id' not in kv: + self.write_json(-3) + return + + auth_mode = kv['auth_mode'] + if 'user_pswd' in kv: + user_pswd = kv['user_pswd'] + if 0 == len(user_pswd): + args['kv'].pop('user_pswd') + user_pswd = None + else: + user_pswd = None + + cert_id = kv['cert_id'] + if auth_mode == 1 and user_pswd is not None: + try: + ret_code, tmp_pswd = get_enc_data(user_pswd) + except Exception as e: + self.write_json(-100) + return + if 0 != ret_code: + self.write_json(ret_code) + return + + args['kv']['user_pswd'] = tmp_pswd + + if host.sys_user_update(args['host_auth_id'], args['kv']): + return self.write_json(0) + + return self.write_json(-1) + + +class SysUserDelete(SwxAuthJsonHandler): + def post(self, *args, **kwargs): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-2) + return + try: + host_auth_id = args['host_auth_id'] + except Exception as e: + self.write_json(-2) + return + + if host.sys_user_delete(host_auth_id): + return self.write_json(0) + + return self.write_json(-1) + diff --git a/server/www/teleport/app/eom_app/controller/index.py b/server/www/teleport/app/eom_app/controller/index.py new file mode 100644 index 0000000..bee94ac --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/index.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +import sys +import tornado.ioloop +from .base import SwxBaseHandler, SwxAuthHandler + + +class IndexHandler(SwxAuthHandler): + def get(self): + self.redirect('/host') + + +class ExitHandler(SwxBaseHandler): + def get(self): + self.write('exit ok') + tornado.ioloop.IOLoop.instance().stop() + # sys.exit(0) diff --git a/server/www/teleport/app/eom_app/controller/pwd.py b/server/www/teleport/app/eom_app/controller/pwd.py new file mode 100644 index 0000000..cd6b5ef --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/pwd.py @@ -0,0 +1,15 @@ +import json +import urllib +import gzip +import os +# from .configs import app_cfg +from eom_app.app.configs import app_cfg +from eom_app.module import host +from .base import SwxJsonHandler, SwxAuthHandler + +cfg = app_cfg() + + +class IndexHandler(SwxAuthHandler): + def get(self): + self.render('pwd/index.mako') \ No newline at end of file diff --git a/server/www/teleport/app/eom_app/controller/set.py b/server/www/teleport/app/eom_app/controller/set.py new file mode 100644 index 0000000..a581d0b --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/set.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- + +import json +import os +import platform +import re +import socket +import subprocess +import threading +import time + +from eom_app.app.configs import app_cfg +from eom_app.module import host +from eom_app.module import set +from .base import SwxAdminHandler, SwxAdminJsonHandler + +cfg = app_cfg() + + +def get_local_ip(): + iplist = [] + PLATFORM = platform.system().lower() + try: + if PLATFORM == "windows": + ip_info = socket.gethostbyname_ex(socket.gethostname()) + return ip_info[2] + else: + ipstr = '([0-9]{1,3}\.){3}[0-9]{1,3}' + ipconfig_process = subprocess.Popen("ifconfig", stdout=subprocess.PIPE) + output = ipconfig_process.stdout.read() + ip_pattern = re.compile('(inet addr:%s)' % ipstr) + pattern = re.compile(ipstr) + + for ipaddr in re.finditer(ip_pattern, str(output)): + ip = pattern.search(ipaddr.group()) + if ip.group() != "127.0.0.1": + iplist.append(ip.group()) + return iplist + except Exception: + return iplist + + +class IndexHandler(SwxAdminHandler): + def get(self): + # static_path = cfg.static_path + # var_js = os.path.join(static_path, 'js', 'var.js') + + # f = None + + try: + config_list = host.get_config_list() + ts_server = dict() + ts_server['ip'] = config_list['ts_server_ip'] + ts_server['ssh_port'] = config_list['ts_server_ssh_port'] + ts_server['rdp_port'] = config_list['ts_server_rdp_port'] + ts_server['telnet_port'] = config_list['ts_server_telnet_port'] + # f = open(var_js, 'w') + # f.write("\"use strict\";\nvar teleport_ip = \"{}\";\n".format(ts_server['ip'])) + except Exception: + return self.write(-1) + finally: + # if f is not None: + # f.close() + pass + + config_list = set.get_config_list() + if 'ts_server_ip' in config_list: + ip_list = get_local_ip() + if not isinstance(ip_list, list): + ip_list = [ip_list, ] + + # ip_list.append(config_list['ts_server_ip']) + if config_list['ts_server_ip'] not in ip_list: + ip_list.append(config_list['ts_server_ip']) + + # if isinstance(temp, list): + # ip_list.extend(temp) + + config_list['_ip_list'] = ip_list + + self.render('set/index.mako', config_list=config_list) + + +def _restart_func(): + time.sleep(1) + + PLATFORM = platform.system().lower() + + if PLATFORM == 'windows': + sf = os.path.join(cfg.app_path, 'tools', 'restart.bat') + os.system('cmd.exe /c "{}"'.format(sf)) + else: + # sf = os.path.join(cfg.app_path, 'tools', 'restart.sh') + # os.system(sf) + os.system('service eom_ts restart') + + # os.system(sf) + + +def restart_service(): + # todo: 浣跨敤eom_ts.exe杩愯鑴氭湰鐨勬柟寮忥紙鏂拌繘绋嬶級鏉ラ噸鍚湇鍔★紝閬垮厤姝e湪杩愯鐨勬湰鏈嶅姟鏈鍑虹殑褰卞搷 + + t = threading.Thread(target=_restart_func) + t.start() + + +class UpdateConfig(SwxAdminJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-1) + return + + change_list = args['cfg'] + reboot = args['reboot'] + + try: + ret = set.set_config(change_list) + if ret: + for i in range(len(change_list)): + if change_list[i]['name'] == 'ts_server_ip': + # static_path = cfg.static_path + var_js = os.path.join(cfg.static_path, 'js', 'var.js') + f = None + try: + f = open(var_js, 'w') + # config_list = host.get_config_list() + # ts_server = dict() + # ts_server['ip'] = config_list['ts_server_ip'] + # ts_server['ssh_port'] = config_list['ts_server_ssh_port'] + # ts_server['rdp_port'] = config_list['ts_server_rdp_port'] + # f.write("\"use strict\";\nvar teleport_ip = \"{}\";\n".format(ts_server['ip'])) + f.write("\"use strict\";\nvar teleport_ip = \"{}\";\n".format(change_list[i]['value'])) + break + except Exception: + return self.write(-1) + finally: + if f is not None: + f.close() + + if reboot: + restart_service() + + self.write_json(0) + else: + self.write_json(-1) + except: + self.write_json(-2) + +# class OsOperator(SwxAuthJsonHandler): +# def post(self): +# args = self.get_argument('args', None) +# if args is not None: +# args = json.loads(args) +# else: +# self.write_json(-1) +# return +# _OP = int(args['OP']) +# try: +# if _OP == 1: +# os.system('reboot') +# else: +# os.system('shutdown -h now') +# # 閲嶆柊鍚姩 +# self.write_json(0) +# except: +# self.write_json(-2) +# diff --git a/server/www/teleport/app/eom_app/controller/user.py b/server/www/teleport/app/eom_app/controller/user.py new file mode 100644 index 0000000..6fe8b20 --- /dev/null +++ b/server/www/teleport/app/eom_app/controller/user.py @@ -0,0 +1,480 @@ +# -*- coding: utf-8 -*- +import ctypes +import json +import os +import platform + +from eom_app.app.configs import app_cfg +from eom_app.module import host +from eom_app.module import record +from eom_app.module import user +from .base import SwxAuthJsonHandler, SwxAdminHandler, SwxAdminJsonHandler + +cfg = app_cfg() + + +def get_free_space_mb(folder): + """ Return folder/drive free space (in bytes) + """ + if platform.system() == 'Windows': + free_bytes = ctypes.c_ulonglong(0) + total_bytes = ctypes.c_ulonglong(0) + ctypes.windll.kernel32.GetDiskFreeSpaceExW(None, None, ctypes.pointer(total_bytes), ctypes.pointer(free_bytes)) + return total_bytes.value / 1024 / 1024 / 1024, free_bytes.value / 1024 / 1024 / 1024 + else: + st = os.statvfs(folder) + return st.f_blocks * st.f_frsize / 1024 / 1024 / 1024, st.f_bavail * st.f_frsize / 1024 / 1024 / 1024 + + +class IndexHandler(SwxAdminHandler): + def get(self): + self.render('user/index.mako') + + +class LogHandler(SwxAdminHandler): + def get(self): + # + user_list = user.get_user_list() + total_size, free_size = get_free_space_mb(cfg.data_path) + + config_list = host.get_config_list() + ts_server = dict() + ts_server['ip'] = config_list['ts_server_ip'] + ts_server['port'] = cfg.server_port + + self.render('log/index.mako', user_list=user_list, total_size=int(total_size), free_size=int(free_size), ts_server=ts_server) + + +class RecordHandler(SwxAdminHandler): + def get(self, protocol, record_id): + protocol = int(protocol) + if protocol == 1: + return + elif protocol == 2: + self.render('log/record.mako', record_id=record_id) + return + pass + +class PlayRdpHandler(SwxAdminHandler): + def get(self, ip, record_id): + # protocol = int(protocol) + # if protocol == 1: + # return + # elif protocol == 2: + # self.render('log/record.mako', record_id=record_id) + # return + # pass + filename = os.path.join(cfg.data_path, 'replay', 'rdp', '{}'.format(record_id), 'tp-rdp.tpr') + + +class ComandLogHandler(SwxAdminHandler): + def get(self, protocol, record_id): + protocol = int(protocol) + if protocol == 1: + return + elif protocol == 2: + record_path = os.path.join(cfg.data_path, 'replay', 'ssh', '{}'.format(record_id)) + file_info = os.path.join(record_path, 'command_list.log') + # file_info = r"E:\GitWork\teleport\share\data\replay\ssh\108\0.ts" + try: + file = open(file_info, 'r') + data = file.read() + except: + self.write('open file error {}'.format(file_info)) + return + # "Content-Type": "text/html; charset=UTF-8", + self.set_header('Content-Type', 'text/plain; charset=UTF-8') + if len(data) == 0: + self.write('璇ョ敤鎴锋病鏈夋搷浣') + else: + self.write(data) + return + + +class RecordGetHeader(SwxAdminJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + record_id = args['id'] + header = record.read_record_head(record_id) + if header is None: + return self.write_json(-1) + term = record.read_record_term(record_id) + if term is None: + return self.write_json(-1) + ret = dict() + ret['header'] = header + ret['term'] = term + self.write_json(0, data=ret) + + +class RecordGetInfo(SwxAdminJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + record_id = args['id'] + file_id = args['file_id'] + data = record.read_record_info(record_id, file_id) + if data is None: + return self.write_json(-1) + self.write_json(0, data=data) + + +class DeleteLog(SwxAdminJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + log_list = args['log_list'] + data = record.delete_log(log_list) + if data is None: + return self.write_json(-1) + self.write_json(0, data=data) + + +class LogList(SwxAdminJsonHandler): + def post(self): + filter = dict() + order = dict() + order['name'] = 'host_id' + order['asc'] = True + limit = dict() + limit['page_index'] = 0 + limit['per_page'] = 25 + + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + + tmp = list() + _filter = args['filter'] + if _filter is not None: + for i in _filter: + if i == 'user_name': + _x = _filter[i].strip() + if _x == '鍏ㄩ儴': + tmp.append(i) + + if i == 'search': + _x = _filter[i].strip() + if len(_x) == 0: + tmp.append(i) + continue + + for i in tmp: + del _filter[i] + + filter.update(_filter) + + _limit = args['limit'] + if _limit['page_index'] < 0: + _limit['page_index'] = 0 + if _limit['per_page'] < 10: + _limit['per_page'] = 10 + if _limit['per_page'] > 100: + _limit['per_page'] = 100 + + limit.update(_limit) + + _order = args['order'] + if _order is not None: + order['name'] = _order['k'] + order['asc'] = _order['v'] + + total, log_list = user.get_log_list(filter, _limit) + ret = dict() + ret['page_index'] = limit['page_index'] + ret['total'] = total + ret['data'] = log_list + + self.write_json(0, data=ret) + + +class AuthHandler(SwxAdminHandler): + def get(self, user_name): + group_list = host.get_group_list() + cert_list = host.get_cert_list() + self.render('user/auth.mako', + group_list=group_list, + cert_list=cert_list, user_name=user_name) + + +class GetListHandler(SwxAdminJsonHandler): + def post(self): + user_list = user.get_user_list() + ret = dict() + ret['page_index'] = 10 + ret['total'] = len(user_list) + ret['data'] = user_list + self.write_json(0, data=ret) + + +class DeleteUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_id = args['user_id'] + try: + ret = user.delete_user(user_id) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class ModifyUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + + user_id = args['user_id'] + user_desc = args['user_desc'] + + try: + ret = user.modify_user(user_id, user_desc) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + + +class AddUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_name = args['user_name'] + user_pwd = '123456' + user_desc = args['user_desc'] + if user_desc is None: + user_desc = '' + try: + ret = user.add_user(user_name, user_pwd, user_desc) + self.write_json(ret) + return + except: + self.write_json(-1) + return + + +class LockUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_id = args['user_id'] + lock_status = args['lock_status'] + + try: + ret = user.lock_user(user_id, lock_status) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + +class ResetUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_id = args['user_id'] + # lock_status = args['lock_status'] + + try: + ret = user.reset_user(user_id) + if ret: + self.write_json(0) + else: + self.write_json(-1) + return + except: + self.write_json(-1) + return + +class HostList(SwxAuthJsonHandler): + def post(self): + filter = dict() + # user = self.get_current_user() + order = dict() + order['name'] = 'host_id' + order['asc'] = True + limit = dict() + limit['page_index'] = 0 + limit['per_page'] = 25 + + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + + tmp = list() + _filter = args['filter'] + for i in _filter: + if i == 'host_sys_type' and _filter[i] == 0: + tmp.append(i) + continue + if i == 'host_group' and _filter[i] == 0: + tmp.append(i) + continue + if i == 'search': + _x = _filter[i].strip() + if len(_x) == 0: + tmp.append(i) + continue + + for i in tmp: + del _filter[i] + + filter.update(_filter) + # print('filter', filter) + + _limit = args['limit'] + if _limit['page_index'] < 0: + _limit['page_index'] = 0 + if _limit['per_page'] < 10: + _limit['per_page'] = 10 + if _limit['per_page'] > 100: + _limit['per_page'] = 100 + + limit.update(_limit) + + _order = args['order'] + if _order is not None: + order['name'] = _order['k'] + order['asc'] = _order['v'] + # filter['account_name'] = user['name'] + _total, _hosts = host.get_host_info_list_by_user(filter, order, limit) + + ret = dict() + ret['page_index'] = limit['page_index'] + ret['total'] = _total + ret['data'] = _hosts + self.write_json(0, data=ret) + + +class AllocHost(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_name = args['user_name'] + host_list = args['host_list'] + try: + ret = user.alloc_host(user_name, host_list) + if ret: + self.write_json(0) + else: + self.write_json(-1) + except: + self.write_json(-2) + + +class AllocHostUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_name = args['user_name'] + host_auth_id_list = args['host_list'] + try: + ret = user.alloc_host_user(user_name, host_auth_id_list) + if ret: + self.write_json(0) + else: + self.write_json(-1) + except: + self.write_json(-2) + + +class DeleteHost(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + # print('args', args) + else: + # ret = {'code':-1} + self.write_json(-1) + return + user_name = args['user_name'] + host_list = args['host_list'] + try: + ret = user.delete_host(user_name, host_list) + if ret: + self.write_json(0) + else: + self.write_json(-1) + except: + self.write_json(-2) + + +class DeleteHostUser(SwxAuthJsonHandler): + def post(self): + args = self.get_argument('args', None) + if args is not None: + args = json.loads(args) + else: + self.write_json(-1) + return + user_name = args['user_name'] + auth_id_list = args['auth_id_list'] + try: + ret = user.delete_host_user(user_name, auth_id_list) + if ret: + self.write_json(0) + else: + self.write_json(-1) + except: + self.write_json(-2) + diff --git a/server/www/teleport/app/eom_app/module/__init__.py b/server/www/teleport/app/eom_app/module/__init__.py new file mode 100644 index 0000000..633f866 --- /dev/null +++ b/server/www/teleport/app/eom_app/module/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- + diff --git a/server/www/teleport/app/eom_app/module/common.py b/server/www/teleport/app/eom_app/module/common.py new file mode 100644 index 0000000..95232b6 --- /dev/null +++ b/server/www/teleport/app/eom_app/module/common.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- + +import gzip +import json +import urllib.parse +import urllib.request + +import eom_common.eomcore.eom_mysql as mysql +import eom_common.eomcore.eom_sqlite as sqlite +from eom_app.app.configs import app_cfg +from eom_app.module import set + +cfg = app_cfg() + + +class DbItem(dict): + def load(self, db_item, db_fields): + if len(db_fields) != len(db_item): + raise RuntimeError('!=') + for i in range(len(db_item)): + self[db_fields[i]] = db_item[i] + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise + + +def get_db_con(): + if False: + sql_exec = mysql.get_mysql_pool().get_tssqlcon() + else: + sql_exec = sqlite.get_sqlite_pool().get_tssqlcon() + return sql_exec + + +def post_http(url, values): + try: + # log.v('post_http(), url={}\n'.format(url)) + + user_agent = 'Mozilla/4.0 (compatible;MSIE 5.5; Windows NT)' + # values = { + # 'act': 'login', + # 'login[email]': 'yzhang@i9i8.com', + # 'login[password]': '123456' + # } + values = json.dumps(values) + data = urllib.parse.quote(values).encode('utf-8') + # data = urllib.parse.urlencode(values).encode() + headers = {'User-Agent': user_agent} + # url = 'http://www.baidu.com' + req = urllib.request.Request(url=url, data=data, headers=headers) + response = urllib.request.urlopen(req, timeout=3) + the_page = response.read() + info = response.info() + _zip = info.get('Content-Encoding') + if _zip == 'gzip': + the_page = gzip.decompress(the_page) + else: + pass + the_page = the_page.decode() + return the_page + except: + return None + + +def get_enc_data(data): + # url = cfg.ts_enc_url + config_list = set.get_config_list() + rpc_port = 52080 + if 'ts_server_rpc_port' in config_list: + rpc_port = int(config_list['ts_server_rpc_port']) + ts_server_rpc_ip = '127.0.0.1' + if 'ts_server_rpc_ip' in config_list: + ts_server_rpc_ip = config_list['ts_server_rpc_ip'] + + url = 'http://{}:{}/enc'.format(ts_server_rpc_ip, rpc_port) + + values = dict() + if not isinstance(data, str): + data = "{}".format(data) + + values['p'] = data + return_data = post_http(url, values) + if return_data is None: + return -2, '' + + if return_data is not None: + return_data = json.loads(return_data) + else: + return -3, '' + + ret_code = return_data['code'] + if ret_code != 0: + return ret_code, '' + if 'data' not in return_data: + return -5, '' + + data = return_data['data'] + if 'c' not in data: + return -6, '' + + decry_data = data['c'] + + return 0, decry_data diff --git a/server/www/teleport/app/eom_app/module/host.py b/server/www/teleport/app/eom_app/module/host.py new file mode 100644 index 0000000..ae8106e --- /dev/null +++ b/server/www/teleport/app/eom_app/module/host.py @@ -0,0 +1,766 @@ +# -*- coding: utf-8 -*- + +# from eom_common.eomcore.utils import * +from .common import * +import time + + +# import eom_common.eomcore.eom_mysql as mysql +# import sqlite3 + + +# 鑾峰彇涓绘満鍒楄〃锛屽寘鎷富鏈虹殑鍩烘湰淇℃伅锛圕PU鍨嬪彿銆佸唴瀛樺ぇ灏忋佺鐩樺ぇ灏忥紝鏈縺娲荤殑涓绘満缂栧彿涔熶細杩斿洖锛 +def get_all_host_info_list(filter, order, limit, with_pwd=False): + sql_exec = get_db_con() + + _where = '' + + if len(filter) > 0: + _where = 'WHERE ( ' + + need_and = False + for k in filter: + if k == 'host_group': + if need_and: + _where += ' AND' + _where += ' b.group_id={}'.format(filter[k]) + need_and = True + elif k == 'host_sys_type': + if need_and: + _where += ' AND' + _where += ' a.host_sys_type={}'.format(filter[k]) + need_and = True + elif k == 'search': + # 鏌ユ壘锛岄檺浜庝富鏈篒D鍜孖P鍦板潃锛屽墠鑰呮槸鏁板瓧锛屽彧鑳界簿纭煡鎵撅紝鍚庤呭彲浠ユā绯婂尮閰 + # 鍥犳锛屽厛鍒ゆ柇鎼滅储椤硅兘鍚﹁浆鎹负鏁板瓧銆 + + if need_and: + _where += ' AND ' + + _where += '(' + _where += 'a.host_ip LIKE "%{}%" OR a.host_desc LIKE "%{}%" )'.format(filter[k], filter[k], filter[k]) + need_and = True + _where += ')' + + # http://www.jb51.net/article/46015.htm + field_a = ['host_id', 'host_lock', 'host_ip','host_port', 'protocol', 'host_desc', 'group_id', 'host_sys_type'] + field_b = ['group_name'] + + # field_c = ['id', 'auth_mode', 'user_name'] + + str_sql = 'SELECT COUNT(*) ' \ + 'FROM ts_host_info AS a ' \ + 'LEFT JOIN ts_group AS b ON a.group_id = b.group_id ' \ + '{};'.format(_where) + + db_ret = sql_exec.ExecProcQuery(str_sql) + total_count = db_ret[0][0] + + # 淇鍒嗛〉鏁版嵁 + _limit = '' + if len(limit) > 0: + _page_index = limit['page_index'] + _per_page = limit['per_page'] + _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) + + if _page_index * _per_page >= total_count: + _page_index = int(total_count / _per_page) + _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) + + # 鐢熸垚鎺掑簭瑙勫垯 + _order = '' + if order is not None: + _order = 'ORDER BY ' + if 'host_id' == order['name']: + _order += 'a.host_id' + elif 'ip' == order['name']: + _order += 'a.host_ip' + else: + _order = '' + + if not order['asc'] and len(_order) > 0: + _order += ' DESC' + + str_sql = 'SELECT {},{} ' \ + 'FROM ts_host_info AS a ' \ + 'LEFT JOIN ts_group AS b ON a.group_id = b.group_id ' \ + '{} {} {};'.format( + ','.join(['a.{}'.format(i) for i in field_a]), + ','.join(['b.{}'.format(i) for i in field_b]), + _where, _order, _limit) + + # print(str_sql) + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is None: + return 0, None + ret = list() + host = dict() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a] + + ['b_{}'.format(i) for i in field_b]) + + h = dict() + h['host_id'] = x.a_host_id + h['host_port'] = x.a_host_port + h['protocol'] = x.a_protocol + h['host_lock'] = x.a_host_lock + h['host_ip'] = x.a_host_ip + h['host_desc'] = x.a_host_desc + h['group_id'] = x.a_group_id + h['host_sys_type'] = x.a_host_sys_type + group_name = '榛樿鍒嗙粍' + if x.b_group_name is not None: + group_name = x.b_group_name + h['group_name'] = group_name + + # h['auth_list'] = list() + # auth_list = h['auth_list'] + h['auth_list'] = sys_user_list(x.a_host_id, with_pwd) + # auth = dict() + # auth['host_auth_id'] = x.c_id + # auth['auth_mode'] = x.c_auth_mode + # auth['user_name'] = x.c_user_name + # auth_list.append(auth) + + ret.append(h) + return total_count, ret + + +def get_host_info_list_by_user(filter, order, limit): + sql_exec = get_db_con() + + _where = '' + + # _where = '' + + # _where = 'WHERE ( a.account_name=\'{}\' '.format(uname) + + if len(filter) > 0: + _where = 'WHERE ( ' + + need_and = False + for k in filter: + if k == 'host_group': + if need_and: + _where += ' AND' + _where += ' b.group_id={}'.format(filter[k]) + need_and = True + elif k == 'host_sys_type': + if need_and: + _where += ' AND' + _where += ' b.host_sys_type={}'.format(filter[k]) + need_and = True + + elif k == 'account_name': + if need_and: + _where += ' AND' + _where += ' a.account_name=\'{}\''.format(filter[k]) + need_and = True + + elif k == 'search': + # 鏌ユ壘锛岄檺浜庝富鏈篒D鍜孖P鍦板潃锛屽墠鑰呮槸鏁板瓧锛屽彧鑳界簿纭煡鎵撅紝鍚庤呭彲浠ユā绯婂尮閰 + # 鍥犳锛屽厛鍒ゆ柇鎼滅储椤硅兘鍚﹁浆鎹负鏁板瓧銆 + + if need_and: + _where += ' AND ' + + _where += '(' + _where += 'b.host_ip LIKE "%{}%" OR b.host_desc LIKE "%{}%" )'.format(filter[k], filter[k], filter[k]) + need_and = True + + _where += ')' + + # http://www.jb51.net/article/46015.htm + field_a = ['auth_id', 'host_id', 'account_name', 'host_auth_id'] + field_b = ['host_id', 'host_lock', 'host_ip', 'protocol', 'host_port', 'host_desc', 'group_id', 'host_sys_type'] + field_c = ['group_name'] + field_d = ['auth_mode', 'user_name'] + str_sql = 'SELECT COUNT(DISTINCT a.host_id) ' \ + 'FROM ts_auth AS a ' \ + 'LEFT JOIN ts_host_info AS b ON a.host_id = b.host_id '\ + '{};'.format(_where) + + db_ret = sql_exec.ExecProcQuery(str_sql) + total_count = db_ret[0][0] + + # 淇鍒嗛〉鏁版嵁 + _limit = '' + if len(limit) > 0: + _page_index = limit['page_index'] + _per_page = limit['per_page'] + _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) + + if _page_index * _per_page >= total_count: + _page_index = int(total_count / _per_page) + _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) + + # 鐢熸垚鎺掑簭瑙勫垯 + _order = '' + # log.d(order['name']) + if order is not None: + _order = 'ORDER BY ' + if 'host_id' == order['name']: + _order += 'b.host_id' + elif 'ip' == order['name']: + _order += 'b.host_ip' + else: + _order = '' + + if not order['asc'] and len(_order) > 0: + _order += ' DESC' + + str_sql = 'SELECT {}, {},{},{} ' \ + 'FROM ts_auth AS a ' \ + 'LEFT JOIN ts_host_info AS b ON a.host_id=b.host_id ' \ + 'LEFT JOIN ts_group AS c ON b.group_id = c.group_id ' \ + 'LEFT JOIN ts_auth_info AS d ON d.id = a.host_auth_id ' \ + '{} {} {};'.format( + ','.join(['a.{}'.format(i) for i in field_a]), + ','.join(['b.{}'.format(i) for i in field_b]), + ','.join(['c.{}'.format(i) for i in field_c]), + ','.join(['d.{}'.format(i) for i in field_d]), + _where, _order, _limit) + + db_ret = sql_exec.ExecProcQuery(str_sql) + ret = list() + temp = dict() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a] + ['b_{}'.format(i) for i in field_b] + ['c_{}'.format(i) for i in field_c] + ['d_{}'.format(i) for i in field_d]) + + host_ip = x.b_host_ip + protocol = x.b_protocol + key = '{}-{}'.format(host_ip, protocol) + temp_auth = None + extend_auth_list = sys_user_list(x.b_host_id, False, x.a_host_auth_id) + if extend_auth_list is not None and len(extend_auth_list) > 0: + auth = extend_auth_list[0] + auth['auth_id'] = x.a_auth_id + temp_auth = auth + add = False + if key in temp: + h = temp[key] + auth_list = h['auth_list'] + auth_list.append(temp_auth) + h['auth_list'] = auth_list + else: + h = dict() + h['host_id'] = x.b_host_id + h['host_lock'] = x.b_host_lock + h['host_ip'] = host_ip + h['host_port'] = x.b_host_port + h['host_desc'] = x.b_host_desc + h['group_id'] = x.b_group_id + h['host_sys_type'] = x.b_host_sys_type + h['protocol'] = x.b_protocol + group_name = '榛樿鍒嗙粍' + if x.c_group_name is not None: + group_name = x.c_group_name + h['group_name'] = group_name + add = True + temp[key] = h + h['auth_list'] = list() + auth_list = h['auth_list'] + auth_list.append(temp_auth) + h['auth_list'] = auth_list + + if add: + ret.append(h) + + return total_count, ret + + +def get_group_list(): + field_a = ['group_id', 'group_name'] + sql_exec = get_db_con() + str_sql = 'SELECT {} ' \ + 'FROM ts_group AS a; ' \ + .format(','.join(['a.{}'.format(i) for i in field_a])) + db_ret = sql_exec.ExecProcQuery(str_sql) + ret = list() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + h = dict() + + h['id'] = x.a_group_id + h['group_name'] = x.a_group_name + ret.append(h) + return ret + + +def get_config_list(): + try: + sql_exec = get_db_con() + field_a = ['name', 'value'] + string_sql = 'SELECT {} FROM ts_config as a ;'.format(','.join(['a.{}'.format(i) for i in field_a])) + db_ret = sql_exec.ExecProcQuery(string_sql) + h = dict() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + h[x.a_name] = x.a_value + + return h + except: + return None + + +def update(host_id, kv): + if len(kv) == 0: + return False + + _val = '' + for k in kv: + if len(_val) > 0: + _val += ',' + if k == 'desc': + _val += 'host_desc="{}"'.format(kv[k]) + elif k == 'pro_port': + temp = json.dumps(kv[k]) + _val += '{}=\'{}\''.format(k, temp) + else: + _val += '{}="{}"'.format(k, kv[k]) + + str_sql = 'UPDATE ts_host_info SET {} ' \ + 'WHERE host_id={};'.format(_val, host_id) + + sql_exec = get_db_con() + db_ret = sql_exec.ExecProcNonQuery(str_sql) + return db_ret + + +def get_cert_list(): + sql_exec = get_db_con() + + # http://www.jb51.net/article/46015.htm + field_a = ['cert_id', 'cert_name', 'cert_pub', 'cert_pri', 'cert_desc'] + + str_sql = 'SELECT {} ' \ + 'FROM ts_cert as a '.format(','.join(['a.{}'.format(i) for i in field_a])) + + db_ret = sql_exec.ExecProcQuery(str_sql) + + if db_ret is None: + return None + ret = list() + for item in db_ret: + x = DbItem() + + x.load(item, ['a_{}'.format(i) for i in field_a]) + h = dict() + + h['cert_id'] = x.a_cert_id + if x.a_cert_name is None: + x.a_cert_name = '' + + h['cert_name'] = x.a_cert_name + h['cert_pub'] = x.a_cert_pub + + h['cert_pri'] = x.a_cert_pri + if x.a_cert_desc is None: + x.a_cert_desc = '' + h['cert_desc'] = x.a_cert_desc + ret.append(h) + return ret + + +def add_host(args, must_not_exists=True): + sql_exec = get_db_con() + + protocol = args['protocol'] + host_port = args['host_port'] + host_ip = args['host_ip'] + + str_sql = 'SELECT host_id FROM ts_host_info WHERE (host_ip=\'{}\' and protocol={} and host_port={});'\ + .format(host_ip, protocol, host_port) + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is not None and len(db_ret) > 0: + if not must_not_exists: + return db_ret[0][0] + else: + return -100 + + group_id = args['group_id'] + host_sys_type = args['host_sys_type'] + # pro_port = args['pro_port'] + # pro_port = json.dumps(pro_port) + # host_user_name = args['user_name'] + # host_user_pwd = args['user_pwd'] + # host_pro_type = args['pro_type'] + # cert_id = args['cert_id'] + # host_encrypt = 1 + # host_auth_mode = args['host_auth_mode'] + host_desc = args['host_desc'] + if len(host_desc) == 0: + host_desc = '鎻忚堪鏈~鍐' + host_lock = 0 + + # + str_sql = 'INSERT INTO ts_host_info (group_id, host_sys_type, host_ip, ' \ + 'host_port, protocol, host_lock, host_desc) ' \ + 'VALUES ({},{},\'{}\',' \ + '{},{},{},' \ + '\'{}\')'.format(group_id, host_sys_type, host_ip, + host_port, protocol, host_lock, host_desc) + + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return -101 + + str_sql = 'select last_insert_rowid()' + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is None: + return -102 + host_id = db_ret[0][0] + return host_id + + +def lock_host(host_id, lock): + sql_exec = get_db_con() + # + str_sql = 'UPDATE ts_host_info SET host_lock = {} ' \ + ' WHERE host_id = {}'.format(lock, host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def delete_host(host_list): + sql_exec = get_db_con() + # + for item in host_list: + host_id = int(item) + str_sql = 'DELETE FROM ts_host_info WHERE host_id = {} '.format(host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + + str_sql = 'DELETE FROM ts_auth_info WHERE host_id = {} '.format(host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + + str_sql = 'DELETE FROM ts_auth WHERE host_id = {} '.format(host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return True + + +def add_cert(cert_pub, cert_pri, cert_name): + sql_exec = get_db_con() + # + str_sql = 'INSERT INTO ts_cert (cert_pub, cert_pri, cert_name) VALUES (\'{}\',\'{}\',\'{}\')'.format(cert_pub, cert_pri, cert_name) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def delete_cert(cert_id): + sql_exec = get_db_con() + # + str_sql = 'DELETE FROM ts_cert WHERE cert_id = {} '.format(cert_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def update_cert(cert_id, cert_pub, cert_pri, cert_name): + sql_exec = get_db_con() + # + + if 0 == len(cert_pri): + str_sql = 'UPDATE ts_cert SET cert_pub = \'{}\', ' \ + 'cert_name = \'{}\'' \ + ' WHERE cert_id = {}'.format(cert_pub, cert_name, cert_id) + else: + str_sql = 'UPDATE ts_cert SET cert_pub = \'{}\', ' \ + 'cert_pri = \'{}\', cert_name = \'{}\'' \ + ' WHERE cert_id = {}'.format(cert_pub, cert_pri, cert_name, cert_id) + + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def add_group(group_name): + sql_exec = get_db_con() + # + str_sql = 'INSERT INTO ts_group (group_name) VALUES (\'{}\')'.format(group_name) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def delete_group(group_id): + sql_exec = get_db_con() + # + string_sql = 'SELECT host_id FROM ts_host_info WHERE group_id = {};'.format(group_id) + db_ret = sql_exec.ExecProcQuery(string_sql) + if len(db_ret) != 0: + return -2 + + str_sql = 'DELETE FROM ts_group WHERE group_id = {} '.format(group_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + if ret: + return 0 + return -3 + + +def update_group(group_id, group_name): + sql_exec = get_db_con() + str_sql = 'UPDATE ts_group SET group_name = \'{}\' ' \ + ' WHERE group_id = {}'.format(group_name, group_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def add_host_to_group(host_list, group_id): + sql_exec = get_db_con() + for item in host_list: + host_id = item + str_sql = 'UPDATE ts_host_info SET ' \ + 'group_id = {}' \ + ' WHERE host_id = {}'.format(group_id, host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def get_host_auth_info(host_auth_id): + sql_exec = get_db_con() + + field_a = ['id', 'auth_mode', 'user_name', 'user_pswd', 'user_param', 'cert_id', 'encrypt'] + field_b = ['host_id', 'host_lock', 'host_ip', 'host_port', 'host_desc', 'group_id', 'host_sys_type', 'protocol'] + + str_sql = 'SELECT {},{} ' \ + 'FROM ts_auth_info AS a ' \ + 'LEFT JOIN ts_host_info AS b ON a.host_id=b.host_id ' \ + 'WHERE a.id = {}'.format( + ','.join(['a.{}'.format(i) for i in field_a]), + ','.join(['b.{}'.format(i) for i in field_b]), host_auth_id) + # print(str_sql) + db_ret = sql_exec.ExecProcQuery(str_sql) + + if db_ret is None or len(db_ret) != 1: + return None + x = DbItem() + x.load(db_ret[0], ['a_{}'.format(i) for i in field_a] + ['b_{}'.format(i) for i in field_b]) + h = dict() + + h['ip'] = x.b_host_ip + h['systype'] = x.b_host_sys_type + h['authmode'] = x.a_auth_mode + h['uname'] = x.a_user_name + h['protocol'] = x.b_protocol + + if x.a_encrypt is None: + h['enc'] = 1 + else: + h['enc'] = x.a_encrypt + + if x.a_user_param is None: + h['uparam'] = '' + else: + h['uparam'] = x.a_user_param + + h['uauth'] = x.a_user_pswd + h['port'] = int(x.b_host_port) + + # user_auth = x.a_user_auth + if x.a_auth_mode == 1: + h['uauth'] = x.a_user_pswd + elif x.a_auth_mode == 2: + if x.a_cert_id is None: + cert_id = 0 + else: + cert_id = int(x.a_cert_id) #int(user_auth) + str_sql = 'SELECT cert_pri FROM ts_cert WHERE cert_id = {}'.format(cert_id) + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is not None and len(db_ret) == 1: + (cert_pri,) = db_ret[0] + h['uauth'] = cert_pri + else: + return None + elif x.a_auth_mode == 0: + h['uauth'] = '' + else: + return None + + return h + + +def update_host_extend_info(host_id, args): + sql_exec = get_db_con() + + ip = args['ip'] + port = int(args['port']) + user_name = args['user_name'] + user_pwd = args['user_pwd'] + cert_id = int(args['cert_id']) + pro_type = int(args['pro_type']) + sys_type = int(args['sys_type']) + group_id = args['group_id'] + host_desc = args['desc'] + host_auth_mode = int(args['host_auth_mode']) + host_encrypt = 1 + + # if len(user_pwd) == 0 and 0 == cert_id: + # return False + if 0 == len(user_pwd): + str_sql = 'UPDATE ts_host_info SET host_ip = \'{}\', ' \ + 'host_pro_port = {}, host_user_name = \'{}\', ' \ + 'cert_id = {}, host_pro_type = {},host_sys_type={}, group_id={},host_auth_mode={},host_encrypt={}, ' \ + 'host_desc=\'{}\' WHERE host_id = {}'.format( + ip, port, user_name, cert_id, pro_type, sys_type, group_id, host_auth_mode, host_encrypt, host_desc, host_id) + + else: + str_sql = 'UPDATE ts_host_info SET host_ip = \'{}\', ' \ + 'host_pro_port = {}, host_user_name = \'{}\', host_user_pwd = \'{}\', ' \ + 'cert_id = {}, host_pro_type = {},host_sys_type={}, group_id={},host_auth_mode={},host_encrypt={}, ' \ + 'host_desc=\'{}\' WHERE host_id = {}'.format( + ip, port, user_name, user_pwd, cert_id, pro_type, sys_type, group_id, host_auth_mode, host_encrypt, host_desc, host_id) + + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def get_cert_info(cert_id): + sql_exec = get_db_con() + str_sql = 'SELECT cert_pri FROM ts_cert WHERE cert_id = {}'.format(cert_id) + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is not None and len(db_ret) == 1: + (cert_pri,) = db_ret[0] + return cert_pri + else: + return None + + +def sys_user_list(host_id, with_pwd=True, host_auth_id=0): + sql_exec = get_db_con() + + field_a = ['id', 'host_id', 'auth_mode', 'user_name', 'user_pswd', 'user_param', 'cert_id', 'log_time'] + if host_auth_id == 0: + str_sql = 'SELECT {} ' \ + 'FROM ts_auth_info AS a ' \ + 'WHERE a.host_id = {};'.format(','.join(['a.{}'.format(i) for i in field_a]), host_id) + else: + str_sql = 'SELECT {} ' \ + 'FROM ts_auth_info AS a ' \ + 'WHERE a.id = {} and a.host_id = {};'.format(','.join(['a.{}'.format(i) for i in field_a]), + host_auth_id, host_id) + + db_ret = sql_exec.ExecProcQuery(str_sql) + + if db_ret is None: + return None + ret = list() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + + h = dict() + # h['id'] = x.a_id + + h['host_auth_id'] = x.a_id + h['host_id'] = x.a_host_id + # h['pro_type'] = x.a_pro_type + h['auth_mode'] = x.a_auth_mode + h['user_name'] = x.a_user_name + if with_pwd: + h['user_pswd'] = x.a_user_pswd + + if x.a_user_param is None: + h['user_param'] = '' + else: + h['user_param'] = x.a_user_param + + h['cert_id'] = x.a_cert_id + h['log_time'] = x.a_log_time + # if x.a_auth_mode == 2: + # h['user_auth'] = x.a_user_auth + # else: + # h['user_auth'] = "******" + ret.append(h) + + return ret + + +def GetNowTime(): + return time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())) + + +def sys_user_add(args): + host_id = args['host_id'] + auth_mode = args['auth_mode'] + user_name = args['user_name'] + user_pswd = args['user_pswd'] + cert_id = args['cert_id'] + + if 'user_param' in args: + user_param = args['user_param'] + else: + user_param = 'ogin:\nassword:' + + encrypt = 1 + + sql_exec = get_db_con() + + # 鍒ゆ柇姝ょ櫥褰曡处鍙锋槸鍚﹀凡缁忓瓨鍦紝濡傛灉瀛樺湪鍒欐姤閿 + str_sql = 'SELECT id FROM ts_auth_info WHERE (host_id={} and auth_mode={} and user_name=\'{}\');'\ + .format(host_id, auth_mode, user_name) + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is not None and len(db_ret) > 0: + return -100 + + log_time = GetNowTime() + + if auth_mode == 1: + str_sql = 'INSERT INTO ts_auth_info (host_id, auth_mode, user_name, user_pswd, user_param,' \ + 'encrypt, cert_id, log_time) ' \ + 'VALUES ({},{},\'{}\',\'{}\',\'{}\',{}, {},\'{}\')'.format(host_id, auth_mode, user_name, user_pswd, user_param,encrypt, 0, log_time) + elif auth_mode == 2: + str_sql = 'INSERT INTO ts_auth_info (host_id, auth_mode, user_name,user_param, ' \ + 'user_pswd,cert_id, encrypt, log_time) ' \ + 'VALUES ({},{},\'{}\',\'{}\',\'{}\',{},{}, \'{}\')'.format(host_id, auth_mode, user_name, user_param, + '', cert_id, encrypt, log_time) + elif auth_mode == 0: + str_sql = 'INSERT INTO ts_auth_info (host_id, auth_mode, user_name,user_param, ' \ + 'user_pswd,cert_id, encrypt, log_time) ' \ + 'VALUES ({},{},\'{}\',\'{}\',\'{}\',{},{}, \'{}\')'.format(host_id, auth_mode, user_name, user_param, + '', 0, encrypt, log_time) + # print(str_sql) + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return -101 + + + str_sql = 'select last_insert_rowid()' + db_ret = sql_exec.ExecProcQuery(str_sql) + if db_ret is None: + return -102 + user_id = db_ret[0][0] + return user_id + + +def sys_user_update(_id, kv): + if len(kv) == 0: + return False + + _val = '' + for k in kv: + if len(_val) > 0: + _val += ',' + + _val += '{}="{}"'.format(k, kv[k]) + + str_sql = 'UPDATE ts_auth_info SET {} ' \ + 'WHERE id={};'.format(_val, _id) + + sql_exec = get_db_con() + db_ret = sql_exec.ExecProcNonQuery(str_sql) + return db_ret + + +def sys_user_delete(id): + sql_exec = get_db_con() + try: + str_sql = 'DELETE FROM ts_auth_info WHERE id = {} '.format(id) + ret = sql_exec.ExecProcNonQuery(str_sql) + + str_sql = 'DELETE FROM ts_auth WHERE host_auth_id = {} '.format(id) + ret = sql_exec.ExecProcNonQuery(str_sql) + except Exception as e: + return False + + return True \ No newline at end of file diff --git a/server/www/teleport/app/eom_app/module/record.py b/server/www/teleport/app/eom_app/module/record.py new file mode 100644 index 0000000..5bad223 --- /dev/null +++ b/server/www/teleport/app/eom_app/module/record.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- +import os +import shutil +import struct + +from .common import * + + +# cfg = app_cfg() + + +def read_record_head(record_id): + record_path = os.path.join(cfg.data_path, 'replay', 'ssh', '{}'.format(record_id)) + header_file_path = os.path.join(record_path, 'head.init') + # header_file_path = r"E:\GitWork\teleport\share\data\replay\ssh\99\head.init" + file = None + try: + file = open(header_file_path, 'rb') + data = file.read() + x = len(data) + offset = 0 + # data = data.decode() + ID, = struct.unpack_from('16s', data, offset) + ID = ID.decode() + offset += 16 + + Version, = struct.unpack_from('16s', data, offset) + Version = Version.decode() + offset += 16 + + total_size, = struct.unpack_from('B', data, offset) + offset += 1 + + total_file_count, = struct.unpack_from('B', data, offset) + offset += 1 + + total_time, = struct.unpack_from('I', data, offset) + offset += 4 + time_list = list() + for i in range(total_file_count): + time, = struct.unpack_from('I', data, offset) + time_list.append(time) + offset += 4 + + except Exception as e: + return None + finally: + if file is not None: + file.close() + + header = dict() + header['id'] = ID + header['ver'] = Version + header['t_size'] = total_size + header['t_count'] = total_file_count + header['t_time'] = total_time + header['t_list'] = time_list + return header + + +def read_record_term(record_id): + record_path = os.path.join(cfg.data_path, 'replay', 'ssh', '{}'.format(record_id)) + term_file_path = os.path.join(record_path, 'term.init') + # term_file_path = r"E:\GitWork\teleport\share\data\replay\ssh\103\term.init" + + file = None + try: + file = open(term_file_path, 'rb') + data = file.read() + x = len(data) + offset = 0 + # data = data.decode() + ID, = struct.unpack_from('16s', data, offset) + ID = ID.decode() + offset += 16 + + Version, = struct.unpack_from('16s', data, offset) + Version = Version.decode() + offset += 16 + + t_count, = struct.unpack_from('I', data, offset) + offset += 4 + term_list = list() + for i in range(t_count): + # _term, = struct.unpack_from('16s', data, offset) + # _term = _term.decode() + # offset += 16 + _time, = struct.unpack_from('I', data, offset) + offset += 4 + + x, = struct.unpack_from('I', data, offset) + offset += 4 + + y, = struct.unpack_from('I', data, offset) + offset += 4 + + # px, = struct.unpack_from('I', data, offset) + # offset += 4 + # + # py, = struct.unpack_from('I', data, offset) + # offset += 4 + # + # _time, = struct.unpack_from('I', data, offset) + # offset += 4 + temp = dict() + # temp['term'] = _term + temp['t'] = _time + temp['w'] = x + temp['h'] = y + # temp['px'] = px + # temp['py'] = py + + term_list.append(temp) + + except Exception as e: + return None + finally: + if file is not None: + file.close() + + header = dict() + header['id'] = ID + header['ver'] = Version + header['count'] = t_count + header['term_list'] = term_list + return header + + +def read_record_info(record_id, file_id): + record_path = os.path.join(cfg.data_path, 'replay', 'ssh', '{}'.format(record_id)) + file_info = os.path.join(record_path, '{}.ts'.format(file_id)) + # file_info = r"E:\GitWork\teleport\share\data\replay\ssh\108\0.ts" + file = None + try: + file = open(file_info, 'rb') + data = file.read() + total_size = len(data) + offset = 0 + data_list = list() + while True: + action, = struct.unpack_from('B', data, offset) + offset += 1 + + _time, = struct.unpack_from('I', data, offset) + offset += 4 + + _size, = struct.unpack_from('I', data, offset) + offset += 4 + + _format = '{}s'.format(_size) + _data, = struct.unpack_from(_format, data, offset) + _data = _data.decode() + offset += _size + + temp = dict() + temp['a'] = action + temp['t'] = _time + temp['d'] = _data + + data_list.append(temp) + if offset == total_size: + break + + except Exception as e: + return None + finally: + if file is not None: + file.close() + return data_list + + +# if __name__ == '__main__': +# read_record_info(94,1) +# pass +# db_path = os.path.join(cfg.data_path, 'ts_db.db') + + +def delete_log(log_list): + try: + sql_exec = get_db_con() + for item in log_list: + log_id = int(item) + str_sql = 'DELETE FROM ts_log WHERE id={}'.format(log_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return False + # 鍒犻櫎褰曞儚鏂囦欢 + try: + record_path = os.path.join(cfg.data_path, 'replay', 'ssh', '{}'.format(log_id)) + if os.path.exists(record_path): + shutil.rmtree(record_path) + record_path = os.path.join(cfg.data_path, 'replay', 'rdp', '{}'.format(log_id)) + if os.path.exists(record_path): + shutil.rmtree(record_path) + except Exception as e: + pass + + return True + except: + return False diff --git a/server/www/teleport/app/eom_app/module/set.py b/server/www/teleport/app/eom_app/module/set.py new file mode 100644 index 0000000..859487b --- /dev/null +++ b/server/www/teleport/app/eom_app/module/set.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +def get_config_list(): + try: + from eom_app.module.common import get_db_con + from eom_app.module.common import DbItem + sql_exec = get_db_con() + field_a = ['name', 'value'] + string_sql = 'SELECT {} FROM ts_config as a ;'.format(','.join(['a.{}'.format(i) for i in field_a])) + db_ret = sql_exec.ExecProcQuery(string_sql) + h = dict() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + h[x.a_name] = x.a_value + + return h + except Exception as e: + return None + + +def set_config(change_list): + from eom_app.module.common import get_db_con + sql_exec = get_db_con() + # + for item in change_list: + name = item['name'] + value = item['value'] + str_sql = 'UPDATE ts_config SET value = \'{}\' ' \ + ' WHERE name = \'{}\''.format(value, name) + ret = sql_exec.ExecProcNonQuery(str_sql) + + return ret diff --git a/server/www/teleport/app/eom_app/module/user.py b/server/www/teleport/app/eom_app/module/user.py new file mode 100644 index 0000000..cde3f51 --- /dev/null +++ b/server/www/teleport/app/eom_app/module/user.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +import hashlib + +from .common import * + + +def verify_user(username, userpwd): + sql_exec = get_db_con() + userpwd = hashlib.sha256(userpwd.encode()).hexdigest() + + string_sql = 'select account_id, account_type, ' \ + 'account_name FROM ts_account WHERE account_name =\'{}\' AND account_pwd = \'{}\''.format(username, userpwd) + db_ret = sql_exec.ExecProcQuery(string_sql) + if len(db_ret) != 1: + return 0, 0, '' + user_id, account_type, username = db_ret[0] + return user_id, account_type, username + + +def modify_pwd(old_pwd, new_pwd, user_id): + sql_exec = get_db_con() + new_pwd = hashlib.sha256(new_pwd.encode()).hexdigest() + old_pwd = hashlib.sha256(old_pwd.encode()).hexdigest() + + string_sql = 'SELECT account_id FROM ts_account WHERE account_pwd = \'{}\' AND account_id = {};'.format(old_pwd, int(user_id)) + db_ret = sql_exec.ExecProcQuery(string_sql) + if len(db_ret) != 1: + return -2 + string_sql = 'UPDATE ts_account SET account_pwd = \'{}\' WHERE account_pwd = \'{}\' AND account_id = {}'.format(new_pwd, old_pwd, int(user_id)) + + ret = sql_exec.ExecProcNonQuery(string_sql) + if ret: + return 0 + return -3 + + +def get_user_list(): + sql_exec = get_db_con() + field_a = ['account_id', 'account_type', 'account_name', 'account_status', 'account_lock', 'account_desc'] + string_sql = 'SELECT {} FROM ts_account as a WHERE account_type<100;'.format(','.join(['a.{}'.format(i) for i in field_a])) + db_ret = sql_exec.ExecProcQuery(string_sql) + ret = list() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + h = dict() + h['user_id'] = x.a_account_id + h['user_type'] = x.a_account_type + h['user_name'] = x.a_account_name + h['user_status'] = x.a_account_status + h['user_lock'] = x.a_account_lock + h['user_desc'] = x.a_account_desc + ret.append(h) + return ret + + +def delete_user(user_id): + sql_exec = get_db_con() + # + str_sql = 'DELETE FROM ts_account WHERE account_id = {} '.format(user_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def lock_user(user_id, lock_status): + sql_exec = get_db_con() + # + str_sql = 'UPDATE ts_account SET account_lock = {} ' \ + ' WHERE account_id = {}'.format(lock_status, user_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def reset_user(user_id): + sql_exec = get_db_con() + # + user_pwd = hashlib.sha256("123456".encode()).hexdigest() + str_sql = 'UPDATE ts_account SET account_pwd = "{}" ' \ + ' WHERE account_id = {}'.format(user_pwd, user_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def modify_user(user_id, user_desc): + sql_exec = get_db_con() + # + str_sql = 'UPDATE ts_account SET account_desc = \'{}\' ' \ + ' WHERE account_id = {}'.format(user_desc, user_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + return ret + + +def add_user(user_name, user_pwd, user_desc): + sql_exec = get_db_con() + # + user_pwd = hashlib.sha256(user_pwd.encode()).hexdigest() + string_sql = 'SELECT account_id FROM ts_account WHERE account_name = \'{}\';'.format(user_name) + db_ret = sql_exec.ExecProcQuery(string_sql) + if len(db_ret) != 0: + return -2 + + str_sql = 'INSERT INTO ts_account (account_type, account_name, account_pwd, account_status,' \ + 'account_lock,account_desc) VALUES (1,\'{}\',\'{}\',0,0,\'{}\')'.format(user_name, user_pwd, user_desc) + ret = sql_exec.ExecProcNonQuery(str_sql) + if ret: + return 0 + return -3 + + +def alloc_host(user_name, host_list): + sql_exec = get_db_con() + field_a = ['host_id'] + string_sql = 'SELECT {} FROM ts_auth as a WHERE account_name=\'{}\';'.format(','.join(['a.{}'.format(i) for i in field_a]), user_name) + db_ret = sql_exec.ExecProcQuery(string_sql) + ret = dict() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + host_id = int(x.a_host_id) + ret[host_id] = host_id + + a_list = list() + for item in host_list: + if item in ret: + pass + else: + a_list.append(item) + try: + for item in a_list: + host_id = int(item) + str_sql = 'INSERT INTO ts_auth (account_name, host_id) VALUES (\'{}\', {})'.format(user_name, host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return False + return True + except: + return False + + +def alloc_host_user(user_name, host_auth_dict): + sql_exec = get_db_con() + field_a = ['host_id', 'host_auth_id'] + string_sql = 'SELECT {} FROM ts_auth as a WHERE account_name=\'{}\';'.format(','.join(['a.{}'.format(i) for i in field_a]), user_name) + db_ret = sql_exec.ExecProcQuery(string_sql) + ret = dict() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + host_id = int(x.a_host_id) + host_auth_id = int(x.a_host_auth_id) + if host_id not in ret: + ret[host_id] = dict() + + temp = ret[host_id] + temp[host_auth_id] = host_id + ret[host_id] = temp + + add_dict = dict() + for k, v in host_auth_dict.items(): + host_id = int(k) + auth_id_list = v + for item in auth_id_list: + host_auth_id = int(item) + if host_id not in ret: + add_dict[host_auth_id] = host_id + continue + temp = ret[host_id] + if host_auth_id not in temp: + add_dict[host_auth_id] = host_id + continue + + try: + for k, v in add_dict.items(): + host_auth_id = int(k) + host_id = int(v) + str_sql = 'INSERT INTO ts_auth (account_name, host_id, host_auth_id) VALUES (\'{}\', {}, {})'.format(user_name, host_id, host_auth_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return False + return True + except: + return False + + +def delete_host(user_name, host_list): + try: + sql_exec = get_db_con() + for item in host_list: + host_id = int(item) + str_sql = 'DELETE FROM ts_auth WHERE account_name = \'{}\' AND host_id={}'.format(user_name, host_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return False + return True + except: + return False + + +def delete_host_user(user_name, auth_id_list): + try: + sql_exec = get_db_con() + for item in auth_id_list: + auth_id = int(item) + str_sql = 'DELETE FROM ts_auth WHERE account_name = \'{}\' AND auth_id={}'.format(user_name, auth_id) + ret = sql_exec.ExecProcNonQuery(str_sql) + if not ret: + return False + return True + except: + return False + + +def get_enc_data_helper(data): + try: + ret_code, data = get_enc_data(data) + except Exception as e: + return -100, '' + + return ret_code, data + + +def get_log_list(filter, limit): + sql_exec = get_db_con() + + _where = '' + + if len(filter) > 0: + _where = 'WHERE ( ' + + need_and = False + for k in filter: + if k == 'account_name': + if need_and: + _where += ' AND' + _where += ' a.account_name=\'{}\''.format(filter[k]) + need_and = True + + if k == 'user_name': + if need_and: + _where += ' AND' + _where += ' a.account_name=\'{}\''.format(filter[k]) + need_and = True + + elif k == 'search': + # 鏌ユ壘锛岄檺浜庝富鏈篒D鍜孖P鍦板潃锛屽墠鑰呮槸鏁板瓧锛屽彧鑳界簿纭煡鎵撅紝鍚庤呭彲浠ユā绯婂尮閰 + # 鍥犳锛屽厛鍒ゆ柇鎼滅储椤硅兘鍚﹁浆鎹负鏁板瓧銆 + + if need_and: + _where += ' AND ' + + _where += '(' + _where += 'a.host_ip LIKE "%{}%" )'.format(filter[k]) + need_and = True + _where += ')' + + # http://www.jb51.net/article/46015.htm + field_a = ['id', 'session_id', 'account_name', 'host_ip', 'host_port', 'auth_type', 'sys_type', 'user_name', 'ret_code', + 'begin_time', 'end_time', 'log_time', 'protocol'] + + str_sql = 'SELECT COUNT(*) ' \ + 'FROM ts_log AS a ' \ + '{};'.format(_where) + + db_ret = sql_exec.ExecProcQuery(str_sql) + total_count = db_ret[0][0] + # 淇鍒嗛〉鏁版嵁 + _limit = '' + if len(limit) > 0: + _page_index = limit['page_index'] + _per_page = limit['per_page'] + _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) + + if _page_index * _per_page >= total_count: + _page_index = int(total_count / _per_page) + # log.d(_page_index) + _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) + + string_sql = 'SELECT {} FROM ts_log as a {} ORDER BY begin_time DESC {};'.format(','.join(['a.{}'.format(i) for i in field_a]), _where, _limit) + db_ret = sql_exec.ExecProcQuery(string_sql) + + ret = list() + for item in db_ret: + x = DbItem() + x.load(item, ['a_{}'.format(i) for i in field_a]) + h = dict() + h['id'] = x.a_id + h['session_id'] = x.a_session_id + h['account_name'] = x.a_account_name + h['host_ip'] = x.a_host_ip + h['host_port'] = x.a_host_port + h['auth_type'] = x.a_auth_type + h['sys_type'] = x.a_sys_type + h['user_name'] = x.a_user_name + h['ret_code'] = x.a_ret_code + cost_time = (x.a_end_time - x.a_begin_time) + if cost_time < 0: + cost_time = 0 + h['cost_time'] = cost_time + h['log_time'] = x.a_log_time + if x.a_protocol is not None: + h['protocol'] = x.a_protocol + else: + if x.a_sys_type == 1: + h['protocol'] = 1 + else: + h['protocol'] = 2 + + ret.append(h) + + return total_count, ret diff --git a/server/www/teleport/app/eom_common/__init__.py b/server/www/teleport/app/eom_common/__init__.py new file mode 100644 index 0000000..633f866 --- /dev/null +++ b/server/www/teleport/app/eom_common/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- + diff --git a/server/www/teleport/app/eom_common/alg/__init__.py b/server/www/teleport/app/eom_common/alg/__init__.py new file mode 100644 index 0000000..c5179f5 --- /dev/null +++ b/server/www/teleport/app/eom_common/alg/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- + +"""EOM Algorithm Package.""" + +__version__ = '1.0.1' + diff --git a/server/www/teleport/app/eom_common/alg/alg.dll b/server/www/teleport/app/eom_common/alg/alg.dll new file mode 100644 index 0000000..175c916 Binary files /dev/null and b/server/www/teleport/app/eom_common/alg/alg.dll differ diff --git a/server/www/teleport/app/eom_common/alg/des.py b/server/www/teleport/app/eom_common/alg/des.py new file mode 100644 index 0000000..95789c1 --- /dev/null +++ b/server/www/teleport/app/eom_common/alg/des.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +"""DES""" + +import os +from ctypes import * + +use_alg = True + +try: + # Try to locate the .so file in the same directory as this file + _file = 'alg.dll' + _path = os.path.join(*(os.path.split(__file__)[:-1] + (_file,))) + _mod = cdll.LoadLibrary(_path) + + _des3_cbc_encrypt = _mod.des3_cbc_encrypt + _des3_cbc_encrypt.argtypes = (c_void_p, c_int, c_void_p, c_int, c_void_p) + _des3_cbc_encrypt.restype = c_int + + _des3_cbc_decrypt = _mod.des3_cbc_decrypt + _des3_cbc_decrypt.argtypes = (c_void_p, c_int, c_void_p, c_int, c_void_p) + _des3_cbc_decrypt.restype = c_int + + _free_buffer = _mod.free_buffer + _free_buffer.argtypes = (c_void_p,) + +except OSError as e: + use_alg = False + from eom_common.eomcore.algorithm import pyDes + # raise RuntimeError('kx') + # print('xxxxxxxxxx') + # pass + + +def print_bin(data): + for i in range(len(data)): + print('%02X ' % data[i], end='') + if (i + 1) % 16 == 0: + print('') + print('') + + +def des3_cbc_encrypt(key, plain_data): + if use_alg: + + out = POINTER(c_ubyte)() + out_len = _des3_cbc_encrypt(key, len(key), plain_data, len(plain_data), byref(out)) + if out_len < 0: + return None + + ret = bytes(cast(out, POINTER(c_ubyte * out_len)).contents) + _free_buffer(out) + + return ret + else: + try: + return pyDes.triple_des(key, pyDes.CBC, b'\x00'*8).encrypt(plain_data, None, pyDes.PAD_PKCS5) + except Exception: + return None + + +def des3_cbc_decrypt(key, enc_data): + if use_alg: + + out = POINTER(c_ubyte)() + out_len = _des3_cbc_decrypt(key, len(key), enc_data, len(enc_data), byref(out)) + if out_len < 0: + return None + + ret = bytes(cast(out, POINTER(c_ubyte * out_len)).contents) + _free_buffer(out) + + return ret + else: + try: + return pyDes.triple_des(key, pyDes.CBC, b'\x00'*8).decrypt(enc_data, None, pyDes.PAD_PKCS5) + except Exception: + return None + + +if __name__ == '__main__': + key = b'\x00' * 24 + plain_data = os.urandom(110) + print_bin(plain_data) + + try: + enc = des3_cbc_encrypt(key, plain_data) + print('==========================') + print_bin(enc) + print('==========================') + dec = des3_cbc_decrypt(key, enc) + print('==========================') + print_bin(dec) + print('==========================') + except Exception: + print('error') + raise diff --git a/server/www/teleport/app/eom_common/eomcore/__init__.py b/server/www/teleport/app/eom_common/eomcore/__init__.py new file mode 100644 index 0000000..2b99267 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- + +"""EOM Core Package.""" + +__version__ = '1.0.0.1' + +# import eomcore.logger as eom_log diff --git a/server/www/teleport/app/eom_common/eomcore/algorithm/__init__.py b/server/www/teleport/app/eom_common/eomcore/algorithm/__init__.py new file mode 100644 index 0000000..3f4b87a --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/algorithm/__init__.py @@ -0,0 +1 @@ +__author__ = 'apex' diff --git a/server/www/teleport/app/eom_common/eomcore/algorithm/pyDes.py b/server/www/teleport/app/eom_common/eomcore/algorithm/pyDes.py new file mode 100644 index 0000000..ef13137 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/algorithm/pyDes.py @@ -0,0 +1,853 @@ +############################################################################# +# Documentation # +############################################################################# + +# Author: Todd Whiteman +# Date: 16th March, 2009 +# Verion: 2.0.0 +# License: Public Domain - free to do as you wish +# Homepage: http://twhiteman.netfirms.com/des.html +# +# This is a pure python implementation of the DES encryption algorithm. +# It's pure python to avoid portability issues, since most DES +# implementations are programmed in C (for performance reasons). +# +# Triple DES class is also implemented, utilising the DES base. Triple DES +# is either DES-EDE3 with a 24 byte key, or DES-EDE2 with a 16 byte key. +# +# See the README.txt that should come with this python module for the +# implementation methods used. +# +# Thanks to: +# * David Broadwell for ideas, comments and suggestions. +# * Mario Wolff for pointing out and debugging some triple des CBC errors. +# * Santiago Palladino for providing the PKCS5 padding technique. +# * Shaya for correcting the PAD_PKCS5 triple des CBC errors. +# +"""A pure python implementation of the DES and TRIPLE DES encryption algorithms. + +Class initialization +-------------------- +pyDes.des(key, [mode], [IV], [pad], [padmode]) +pyDes.triple_des(key, [mode], [IV], [pad], [padmode]) + +key -> Bytes containing the encryption key. 8 bytes for DES, 16 or 24 bytes + for Triple DES +mode -> Optional argument for encryption type, can be either + pyDes.ECB (Electronic Code Book) or pyDes.CBC (Cypher Block Chaining) +IV -> Optional Initial Value bytes, must be supplied if using CBC mode. + Length must be 8 bytes. +pad -> Optional argument, set the pad character (PAD_NORMAL) to use during + all encrypt/decrpt operations done with this instance. +padmode -> Optional argument, set the padding mode (PAD_NORMAL or PAD_PKCS5) + to use during all encrypt/decrpt operations done with this instance. + +I recommend to use PAD_PKCS5 padding, as then you never need to worry about any +padding issues, as the padding can be removed unambiguously upon decrypting +data that was encrypted using PAD_PKCS5 padmode. + +Common methods +-------------- +encrypt(data, [pad], [padmode]) +decrypt(data, [pad], [padmode]) + +data -> Bytes to be encrypted/decrypted +pad -> Optional argument. Only when using padmode of PAD_NORMAL. For + encryption, adds this characters to the end of the data block when + data is not a multiple of 8 bytes. For decryption, will remove the + trailing characters that match this pad character from the last 8 + bytes of the unencrypted data block. +padmode -> Optional argument, set the padding mode, must be one of PAD_NORMAL + or PAD_PKCS5). Defaults to PAD_NORMAL. + + +Example +------- +from pyDes import * + +data = "Please encrypt my data" +k = des("DESCRYPT", CBC, "\0\0\0\0\0\0\0\0", pad=None, padmode=PAD_PKCS5) +# For Python3, you'll need to use bytes, i.e.: +# data = b"Please encrypt my data" +# k = des(b"DESCRYPT", CBC, b"\0\0\0\0\0\0\0\0", pad=None, padmode=PAD_PKCS5) +d = k.encrypt(data) +print "Encrypted: %r" % d +print "Decrypted: %r" % k.decrypt(d) +assert k.decrypt(d, padmode=PAD_PKCS5) == data + + +See the module source (pyDes.py) for more examples of use. +You can also run the pyDes.py file without and arguments to see a simple test. + +Note: This code was not written for high-end systems needing a fast + implementation, but rather a handy portable solution with small usage. + +""" + +import sys + +# _pythonMajorVersion is used to handle Python2 and Python3 differences. +_pythonMajorVersion = sys.version_info[0] + +# Modes of crypting / cyphering +ECB = 0 +CBC = 1 + +# Modes of padding +PAD_NORMAL = 1 +PAD_PKCS5 = 2 + + +# PAD_PKCS5: is a method that will unambiguously remove all padding +# characters after decryption, when originally encrypted with +# this padding mode. +# For a good description of the PKCS5 padding technique, see: +# http://www.faqs.org/rfcs/rfc1423.html + +# The base class shared by des and triple des. +class _baseDes(object): + def __init__(self, mode=ECB, IV=None, pad=None, padmode=PAD_NORMAL): + if IV: + IV = self._guardAgainstUnicode(IV) + if pad: + pad = self._guardAgainstUnicode(pad) + self.block_size = 8 + # Sanity checking of arguments. + if pad and padmode == PAD_PKCS5: + raise ValueError("Cannot use a pad character with PAD_PKCS5") + if IV and len(IV) != self.block_size: + raise ValueError("Invalid Initial Value (IV), must be a multiple of " + str(self.block_size) + " bytes") + + # Set the passed in variables + self._mode = mode + self._iv = IV + self._padding = pad + self._padmode = padmode + + def getKey(self): + """getKey() -> bytes""" + return self.__key + + def setKey(self, key): + """Will set the crypting key for this object.""" + key = self._guardAgainstUnicode(key) + self.__key = key + + def getMode(self): + """getMode() -> pyDes.ECB or pyDes.CBC""" + return self._mode + + def setMode(self, mode): + """Sets the type of crypting mode, pyDes.ECB or pyDes.CBC""" + self._mode = mode + + def getPadding(self): + """getPadding() -> bytes of length 1. Padding character.""" + return self._padding + + def setPadding(self, pad): + """setPadding() -> bytes of length 1. Padding character.""" + if pad is not None: + pad = self._guardAgainstUnicode(pad) + self._padding = pad + + def getPadMode(self): + """getPadMode() -> pyDes.PAD_NORMAL or pyDes.PAD_PKCS5""" + return self._padmode + + def setPadMode(self, mode): + """Sets the type of padding mode, pyDes.PAD_NORMAL or pyDes.PAD_PKCS5""" + self._padmode = mode + + def getIV(self): + """getIV() -> bytes""" + return self._iv + + def setIV(self, IV): + """Will set the Initial Value, used in conjunction with CBC mode""" + if not IV or len(IV) != self.block_size: + raise ValueError("Invalid Initial Value (IV), must be a multiple of " + str(self.block_size) + " bytes") + IV = self._guardAgainstUnicode(IV) + self._iv = IV + + def _padData(self, data, pad, padmode): + # Pad data depending on the mode + if padmode is None: + # Get the default padding mode. + padmode = self.getPadMode() + if pad and padmode == PAD_PKCS5: + raise ValueError("Cannot use a pad character with PAD_PKCS5") + + if padmode == PAD_NORMAL: + if len(data) % self.block_size == 0: + # No padding required. + return data + + if not pad: + # Get the default padding. + pad = self.getPadding() + if not pad: + raise ValueError("Data must be a multiple of " + str(self.block_size) + " bytes in length. Use padmode=PAD_PKCS5 or set the pad character.") + data += (self.block_size - (len(data) % self.block_size)) * pad + + elif padmode == PAD_PKCS5: + pad_len = 8 - (len(data) % self.block_size) + if _pythonMajorVersion < 3: + data += pad_len * chr(pad_len) + else: + data += bytes([pad_len] * pad_len) + + return data + + def _unpadData(self, data, pad, padmode): + # Unpad data depending on the mode. + if not data: + return data + if pad and padmode == PAD_PKCS5: + raise ValueError("Cannot use a pad character with PAD_PKCS5") + if padmode is None: + # Get the default padding mode. + padmode = self.getPadMode() + + if padmode == PAD_NORMAL: + if not pad: + # Get the default padding. + pad = self.getPadding() + if pad: + data = data[:-self.block_size] + \ + data[-self.block_size:].rstrip(pad) + + elif padmode == PAD_PKCS5: + if _pythonMajorVersion < 3: + pad_len = ord(data[-1]) + else: + pad_len = data[-1] + data = data[:-pad_len] + + return data + + def _guardAgainstUnicode(self, data): + # Only accept byte strings or ascii unicode values, otherwise + # there is no way to correctly decode the data into bytes. + if _pythonMajorVersion < 3: + if isinstance(data, unicode): + raise ValueError("pyDes can only work with bytes, not Unicode strings.") + else: + if isinstance(data, str): + # Only accept ascii unicode values. + try: + return data.encode('ascii') + except UnicodeEncodeError: + pass + raise ValueError("pyDes can only work with encoded strings, not Unicode.") + return data + + +############################################################################# +# DES # +############################################################################# +class des(_baseDes): + """DES encryption/decrytpion class + + Supports ECB (Electronic Code Book) and CBC (Cypher Block Chaining) modes. + + pyDes.des(key,[mode], [IV]) + + key -> Bytes containing the encryption key, must be exactly 8 bytes + mode -> Optional argument for encryption type, can be either pyDes.ECB + (Electronic Code Book), pyDes.CBC (Cypher Block Chaining) + IV -> Optional Initial Value bytes, must be supplied if using CBC mode. + Must be 8 bytes in length. + pad -> Optional argument, set the pad character (PAD_NORMAL) to use + during all encrypt/decrpt operations done with this instance. + padmode -> Optional argument, set the padding mode (PAD_NORMAL or + PAD_PKCS5) to use during all encrypt/decrpt operations done + with this instance. + """ + + + # Permutation and translation tables for DES + __pc1 = [56, 48, 40, 32, 24, 16, 8, + 0, 57, 49, 41, 33, 25, 17, + 9, 1, 58, 50, 42, 34, 26, + 18, 10, 2, 59, 51, 43, 35, + 62, 54, 46, 38, 30, 22, 14, + 6, 61, 53, 45, 37, 29, 21, + 13, 5, 60, 52, 44, 36, 28, + 20, 12, 4, 27, 19, 11, 3 + ] + + # number left rotations of pc1 + __left_rotations = [ + 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1 + ] + + # permuted choice key (table 2) + __pc2 = [ + 13, 16, 10, 23, 0, 4, + 2, 27, 14, 5, 20, 9, + 22, 18, 11, 3, 25, 7, + 15, 6, 26, 19, 12, 1, + 40, 51, 30, 36, 46, 54, + 29, 39, 50, 44, 32, 47, + 43, 48, 38, 55, 33, 52, + 45, 41, 49, 35, 28, 31 + ] + + # initial permutation IP + __ip = [57, 49, 41, 33, 25, 17, 9, 1, + 59, 51, 43, 35, 27, 19, 11, 3, + 61, 53, 45, 37, 29, 21, 13, 5, + 63, 55, 47, 39, 31, 23, 15, 7, + 56, 48, 40, 32, 24, 16, 8, 0, + 58, 50, 42, 34, 26, 18, 10, 2, + 60, 52, 44, 36, 28, 20, 12, 4, + 62, 54, 46, 38, 30, 22, 14, 6 + ] + + # Expansion table for turning 32 bit blocks into 48 bits + __expansion_table = [ + 31, 0, 1, 2, 3, 4, + 3, 4, 5, 6, 7, 8, + 7, 8, 9, 10, 11, 12, + 11, 12, 13, 14, 15, 16, + 15, 16, 17, 18, 19, 20, + 19, 20, 21, 22, 23, 24, + 23, 24, 25, 26, 27, 28, + 27, 28, 29, 30, 31, 0 + ] + + # The (in)famous S-boxes + __sbox = [ + # S1 + [14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7, + 0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8, + 4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0, + 15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13], + + # S2 + [15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10, + 3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5, + 0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15, + 13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9], + + # S3 + [10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8, + 13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1, + 13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7, + 1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12], + + # S4 + [7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15, + 13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9, + 10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4, + 3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14], + + # S5 + [2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9, + 14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6, + 4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14, + 11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3], + + # S6 + [12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11, + 10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8, + 9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6, + 4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13], + + # S7 + [4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1, + 13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6, + 1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2, + 6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12], + + # S8 + [13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7, + 1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2, + 7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8, + 2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11], + ] + + + # 32-bit permutation function P used on the output of the S-boxes + __p = [ + 15, 6, 19, 20, 28, 11, + 27, 16, 0, 14, 22, 25, + 4, 17, 30, 9, 1, 7, + 23, 13, 31, 26, 2, 8, + 18, 12, 29, 5, 21, 10, + 3, 24 + ] + + # final permutation IP^-1 + __fp = [ + 39, 7, 47, 15, 55, 23, 63, 31, + 38, 6, 46, 14, 54, 22, 62, 30, + 37, 5, 45, 13, 53, 21, 61, 29, + 36, 4, 44, 12, 52, 20, 60, 28, + 35, 3, 43, 11, 51, 19, 59, 27, + 34, 2, 42, 10, 50, 18, 58, 26, + 33, 1, 41, 9, 49, 17, 57, 25, + 32, 0, 40, 8, 48, 16, 56, 24 + ] + + # Type of crypting being done + ENCRYPT = 0x00 + DECRYPT = 0x01 + + # Initialisation + def __init__(self, key, mode=ECB, IV=None, pad=None, padmode=PAD_NORMAL): + # Sanity checking of arguments. + if len(key) != 8: + raise ValueError("Invalid DES key size. Key must be exactly 8 bytes long.") + _baseDes.__init__(self, mode, IV, pad, padmode) + self.key_size = 8 + + self.L = [] + self.R = [] + self.Kn = [[0] * 48] * 16 # 16 48-bit keys (K1 - K16) + self.final = [] + + self.setKey(key) + + def setKey(self, key): + """Will set the crypting key for this object. Must be 8 bytes.""" + _baseDes.setKey(self, key) + self.__create_sub_keys() + + def __String_to_BitList(self, data): + """Turn the string data, into a list of bits (1, 0)'s""" + if _pythonMajorVersion < 3: + # Turn the strings into integers. Python 3 uses a bytes + # class, which already has this behaviour. + data = [ord(c) for c in data] + l = len(data) * 8 + result = [0] * l + pos = 0 + for ch in data: + i = 7 + while i >= 0: + if ch & (1 << i) != 0: + result[pos] = 1 + else: + result[pos] = 0 + pos += 1 + i -= 1 + + return result + + def __BitList_to_String(self, data): + """Turn the list of bits -> data, into a string""" + result = [] + pos = 0 + c = 0 + while pos < len(data): + c += data[pos] << (7 - (pos % 8)) + if (pos % 8) == 7: + result.append(c) + c = 0 + pos += 1 + + if _pythonMajorVersion < 3: + return ''.join([chr(c) for c in result]) + else: + return bytes(result) + + def __permutate(self, table, block): + """Permutate this block with the specified table""" + return list(map(lambda x: block[x], table)) + + # Transform the secret key, so that it is ready for data processing + # Create the 16 subkeys, K[1] - K[16] + def __create_sub_keys(self): + """Create the 16 subkeys K[1] to K[16] from the given key""" + key = self.__permutate(des.__pc1, self.__String_to_BitList(self.getKey())) + i = 0 + # Split into Left and Right sections + self.L = key[:28] + self.R = key[28:] + while i < 16: + j = 0 + # Perform circular left shifts + while j < des.__left_rotations[i]: + self.L.append(self.L[0]) + del self.L[0] + + self.R.append(self.R[0]) + del self.R[0] + + j += 1 + + # Create one of the 16 subkeys through pc2 permutation + self.Kn[i] = self.__permutate(des.__pc2, self.L + self.R) + + i += 1 + + # Main part of the encryption algorithm, the number cruncher :) + def __des_crypt(self, block, crypt_type): + """Crypt the block of data through DES bit-manipulation""" + block = self.__permutate(des.__ip, block) + self.L = block[:32] + self.R = block[32:] + + # Encryption starts from Kn[1] through to Kn[16] + if crypt_type == des.ENCRYPT: + iteration = 0 + iteration_adjustment = 1 + # Decryption starts from Kn[16] down to Kn[1] + else: + iteration = 15 + iteration_adjustment = -1 + + i = 0 + while i < 16: + # Make a copy of R[i-1], this will later become L[i] + tempR = self.R[:] + + # Permutate R[i - 1] to start creating R[i] + self.R = self.__permutate(des.__expansion_table, self.R) + + # Exclusive or R[i - 1] with K[i], create B[1] to B[8] whilst here + self.R = list(map(lambda x, y: x ^ y, self.R, self.Kn[iteration])) + B = [self.R[:6], self.R[6:12], self.R[12:18], self.R[18:24], self.R[24:30], self.R[30:36], self.R[36:42], self.R[42:]] + # Optimization: Replaced below commented code with above + # j = 0 + # B = [] + # while j < len(self.R): + # self.R[j] = self.R[j] ^ self.Kn[iteration][j] + # j += 1 + # if j % 6 == 0: + # B.append(self.R[j-6:j]) + + # Permutate B[1] to B[8] using the S-Boxes + j = 0 + Bn = [0] * 32 + pos = 0 + while j < 8: + # Work out the offsets + m = (B[j][0] << 1) + B[j][5] + n = (B[j][1] << 3) + (B[j][2] << 2) + (B[j][3] << 1) + B[j][4] + + # Find the permutation value + v = des.__sbox[j][(m << 4) + n] + + # Turn value into bits, add it to result: Bn + Bn[pos] = (v & 8) >> 3 + Bn[pos + 1] = (v & 4) >> 2 + Bn[pos + 2] = (v & 2) >> 1 + Bn[pos + 3] = v & 1 + + pos += 4 + j += 1 + + # Permutate the concatination of B[1] to B[8] (Bn) + self.R = self.__permutate(des.__p, Bn) + + # Xor with L[i - 1] + self.R = list(map(lambda x, y: x ^ y, self.R, self.L)) + # Optimization: This now replaces the below commented code + # j = 0 + # while j < len(self.R): + # self.R[j] = self.R[j] ^ self.L[j] + # j += 1 + + # L[i] becomes R[i - 1] + self.L = tempR + + i += 1 + iteration += iteration_adjustment + + # Final permutation of R[16]L[16] + self.final = self.__permutate(des.__fp, self.R + self.L) + return self.final + + # Data to be encrypted/decrypted + def crypt(self, data, crypt_type): + """Crypt the data in blocks, running it through des_crypt()""" + + # Error check the data + if not data: + return '' + if len(data) % self.block_size != 0: + if crypt_type == des.DECRYPT: # Decryption must work on 8 byte blocks + raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n.") + if not self.getPadding(): + raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n. Try setting the optional padding character") + else: + data += (self.block_size - (len(data) % self.block_size)) * self.getPadding() + # print "Len of data: %f" % (len(data) / self.block_size) + + if self.getMode() == CBC: + if self.getIV(): + iv = self.__String_to_BitList(self.getIV()) + else: + raise ValueError("For CBC mode, you must supply the Initial Value (IV) for ciphering") + + # Split the data into blocks, crypting each one seperately + i = 0 + dict = {} + result = [] + # cached = 0 + # lines = 0 + while i < len(data): + # Test code for caching encryption results + # lines += 1 + # if dict.has_key(data[i:i+8]): + # print "Cached result for: %s" % data[i:i+8] + # cached += 1 + # result.append(dict[data[i:i+8]]) + # i += 8 + # continue + + block = self.__String_to_BitList(data[i:i + 8]) + + # Xor with IV if using CBC mode + if self.getMode() == CBC: + if crypt_type == des.ENCRYPT: + block = list(map(lambda x, y: x ^ y, block, iv)) + # j = 0 + # while j < len(block): + # block[j] = block[j] ^ iv[j] + # j += 1 + + processed_block = self.__des_crypt(block, crypt_type) + + if crypt_type == des.DECRYPT: + processed_block = list(map(lambda x, y: x ^ y, processed_block, iv)) + # j = 0 + # while j < len(processed_block): + # processed_block[j] = processed_block[j] ^ iv[j] + # j += 1 + iv = block + else: + iv = processed_block + else: + processed_block = self.__des_crypt(block, crypt_type) + + + # Add the resulting crypted block to our list + # d = self.__BitList_to_String(processed_block) + # result.append(d) + result.append(self.__BitList_to_String(processed_block)) + # dict[data[i:i+8]] = d + i += 8 + + # print "Lines: %d, cached: %d" % (lines, cached) + + # Return the full crypted string + if _pythonMajorVersion < 3: + return ''.join(result) + else: + return bytes.fromhex('').join(result) + + def encrypt(self, data, pad=None, padmode=None): + """encrypt(data, [pad], [padmode]) -> bytes + + data : Bytes to be encrypted + pad : Optional argument for encryption padding. Must only be one byte + padmode : Optional argument for overriding the padding mode. + + The data must be a multiple of 8 bytes and will be encrypted + with the already specified key. Data does not have to be a + multiple of 8 bytes if the padding character is supplied, or + the padmode is set to PAD_PKCS5, as bytes will then added to + ensure the be padded data is a multiple of 8 bytes. + """ + data = self._guardAgainstUnicode(data) + if pad is not None: + pad = self._guardAgainstUnicode(pad) + data = self._padData(data, pad, padmode) + return self.crypt(data, des.ENCRYPT) + + def decrypt(self, data, pad=None, padmode=None): + """decrypt(data, [pad], [padmode]) -> bytes + + data : Bytes to be encrypted + pad : Optional argument for decryption padding. Must only be one byte + padmode : Optional argument for overriding the padding mode. + + The data must be a multiple of 8 bytes and will be decrypted + with the already specified key. In PAD_NORMAL mode, if the + optional padding character is supplied, then the un-encrypted + data will have the padding characters removed from the end of + the bytes. This pad removal only occurs on the last 8 bytes of + the data (last data block). In PAD_PKCS5 mode, the special + padding end markers will be removed from the data after decrypting. + """ + data = self._guardAgainstUnicode(data) + if pad is not None: + pad = self._guardAgainstUnicode(pad) + data = self.crypt(data, des.DECRYPT) + return self._unpadData(data, pad, padmode) + + +############################################################################# +# Triple DES # +############################################################################# +class triple_des(_baseDes): + """Triple DES encryption/decrytpion class + + This algorithm uses the DES-EDE3 (when a 24 byte key is supplied) or + the DES-EDE2 (when a 16 byte key is supplied) encryption methods. + Supports ECB (Electronic Code Book) and CBC (Cypher Block Chaining) modes. + + pyDes.des(key, [mode], [IV]) + + key -> Bytes containing the encryption key, must be either 16 or +bytes long + mode -> Optional argument for encryption type, can be either pyDes.ECB + (Electronic Code Book), pyDes.CBC (Cypher Block Chaining) + IV -> Optional Initial Value bytes, must be supplied if using CBC mode. + Must be 8 bytes in length. + pad -> Optional argument, set the pad character (PAD_NORMAL) to use + during all encrypt/decrpt operations done with this instance. + padmode -> Optional argument, set the padding mode (PAD_NORMAL or + PAD_PKCS5) to use during all encrypt/decrpt operations done + with this instance. + """ + + def __init__(self, key, mode=ECB, IV=None, pad=None, padmode=PAD_NORMAL): + _baseDes.__init__(self, mode, IV, pad, padmode) + self.setKey(key) + + def setKey(self, key): + """Will set the crypting key for this object. Either 16 or 24 bytes long.""" + self.key_size = 24 # Use DES-EDE3 mode + if len(key) != self.key_size: + if len(key) == 16: # Use DES-EDE2 mode + self.key_size = 16 + else: + raise ValueError("Invalid triple DES key size. Key must be either 16 or 24 bytes long") + if self.getMode() == CBC: + if not self.getIV(): + # Use the first 8 bytes of the key + self._iv = key[:self.block_size] + if len(self.getIV()) != self.block_size: + raise ValueError("Invalid IV, must be 8 bytes in length") + self.__key1 = des(key[:8], self._mode, self._iv, + self._padding, self._padmode) + self.__key2 = des(key[8:16], self._mode, self._iv, + self._padding, self._padmode) + if self.key_size == 16: + self.__key3 = self.__key1 + else: + self.__key3 = des(key[16:], self._mode, self._iv, + self._padding, self._padmode) + _baseDes.setKey(self, key) + + # Override setter methods to work on all 3 keys. + + def setMode(self, mode): + """Sets the type of crypting mode, pyDes.ECB or pyDes.CBC""" + _baseDes.setMode(self, mode) + for key in (self.__key1, self.__key2, self.__key3): + key.setMode(mode) + + def setPadding(self, pad): + """setPadding() -> bytes of length 1. Padding character.""" + _baseDes.setPadding(self, pad) + for key in (self.__key1, self.__key2, self.__key3): + key.setPadding(pad) + + def setPadMode(self, mode): + """Sets the type of padding mode, pyDes.PAD_NORMAL or pyDes.PAD_PKCS5""" + _baseDes.setPadMode(self, mode) + for key in (self.__key1, self.__key2, self.__key3): + key.setPadMode(mode) + + def setIV(self, IV): + """Will set the Initial Value, used in conjunction with CBC mode""" + _baseDes.setIV(self, IV) + for key in (self.__key1, self.__key2, self.__key3): + key.setIV(IV) + + def encrypt(self, data, pad=None, padmode=None): + """encrypt(data, [pad], [padmode]) -> bytes + + data : bytes to be encrypted + pad : Optional argument for encryption padding. Must only be one byte + padmode : Optional argument for overriding the padding mode. + + The data must be a multiple of 8 bytes and will be encrypted + with the already specified key. Data does not have to be a + multiple of 8 bytes if the padding character is supplied, or + the padmode is set to PAD_PKCS5, as bytes will then added to + ensure the be padded data is a multiple of 8 bytes. + """ + ENCRYPT = des.ENCRYPT + DECRYPT = des.DECRYPT + data = self._guardAgainstUnicode(data) + if pad is not None: + pad = self._guardAgainstUnicode(pad) + # Pad the data accordingly. + data = self._padData(data, pad, padmode) + if self.getMode() == CBC: + self.__key1.setIV(self.getIV()) + self.__key2.setIV(self.getIV()) + self.__key3.setIV(self.getIV()) + i = 0 + result = [] + while i < len(data): + block = self.__key1.crypt(data[i:i + 8], ENCRYPT) + block = self.__key2.crypt(block, DECRYPT) + block = self.__key3.crypt(block, ENCRYPT) + self.__key1.setIV(block) + self.__key2.setIV(block) + self.__key3.setIV(block) + result.append(block) + i += 8 + if _pythonMajorVersion < 3: + return ''.join(result) + else: + return bytes.fromhex('').join(result) + else: + data = self.__key1.crypt(data, ENCRYPT) + data = self.__key2.crypt(data, DECRYPT) + return self.__key3.crypt(data, ENCRYPT) + + def decrypt(self, data, pad=None, padmode=None): + """decrypt(data, [pad], [padmode]) -> bytes + + data : bytes to be encrypted + pad : Optional argument for decryption padding. Must only be one byte + padmode : Optional argument for overriding the padding mode. + + The data must be a multiple of 8 bytes and will be decrypted + with the already specified key. In PAD_NORMAL mode, if the + optional padding character is supplied, then the un-encrypted + data will have the padding characters removed from the end of + the bytes. This pad removal only occurs on the last 8 bytes of + the data (last data block). In PAD_PKCS5 mode, the special + padding end markers will be removed from the data after + decrypting, no pad character is required for PAD_PKCS5. + """ + ENCRYPT = des.ENCRYPT + DECRYPT = des.DECRYPT + data = self._guardAgainstUnicode(data) + if pad is not None: + pad = self._guardAgainstUnicode(pad) + if self.getMode() == CBC: + self.__key1.setIV(self.getIV()) + self.__key2.setIV(self.getIV()) + self.__key3.setIV(self.getIV()) + i = 0 + result = [] + while i < len(data): + iv = data[i:i + 8] + block = self.__key3.crypt(iv, DECRYPT) + block = self.__key2.crypt(block, ENCRYPT) + block = self.__key1.crypt(block, DECRYPT) + self.__key1.setIV(iv) + self.__key2.setIV(iv) + self.__key3.setIV(iv) + result.append(block) + i += 8 + if _pythonMajorVersion < 3: + data = ''.join(result) + else: + data = bytes.fromhex('').join(result) + else: + data = self.__key3.crypt(data, DECRYPT) + data = self.__key2.crypt(data, ENCRYPT) + data = self.__key1.crypt(data, DECRYPT) + return self._unpadData(data, pad, padmode) diff --git a/server/www/teleport/app/eom_common/eomcore/env.py b/server/www/teleport/app/eom_common/eomcore/env.py new file mode 100644 index 0000000..7d973d1 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/env.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- + +""" +杩愯鐜妫娴 +""" + +import os +import sys +import platform +from eom_common.common.const import * +from eom_common.eomcore.logger import log +from . import utils + + +class EomEnvBase(object): + def __init__(self): + self._os_type = OS_UNKNOWN + self._linux_dist = OS_LINUX_UNKNOWN + + self._os_name = 'unknown' + self._os_id = 0 # 0 = unknown, 1 = windows, 200=Linux, 201=Ubuntu... + + # # 鑴氭湰鏄敱Python杩愯锛岃繕鏄敱鎴戜滑鑷繁鐨勪富绋嬪簭杩愯 + self._is_self_exec = False + # + # 鏄惁杩愯浜庣鐞嗗憳韬唤 + self._is_run_as_root = False + + # + self._app_path = '' + self._conf_path = '' + self._log_path = '' + self._data_path = '' + + self._check() + + # def init(self, app_path): + # self._app_path = app_path + # self._conf_path = os.path.join(self._app_path, 'conf') + # # self._log_path = os.path.join(self._app_path, 'log') + + def _check(self): + # 鍒ゆ柇鎿嶄綔绯荤粺 + if 'win32' == sys.platform: + self._os_type = OS_WIN32 + self._os_name = 'windows' + self._os_id = 1 + elif 'linux' == sys.platform: + self._os_type = OS_LINUX + self._os_name = 'linux' + self._os_id = 200 + elif 'darwin' == sys.platform: + self._os_type = OS_MAC + self._os_name = 'macos' + self._os_id = 300 + else: + log.e('[ERROR] Can not detect system type.\n') + return + + # 濡傛灉鏄疞inux锛屽垽鏂叾鍙戣鐗 + if OS_LINUX == self._os_type: + (dist, ver, sys_id) = platform.dist() + dist = dist.lower() + if 'centos' == dist: + self._linux_dist = OS_LINUX_CENTOS + self._os_id = 201 + elif 'ubuntu' == dist: + self._linux_dist = OS_LINUX_UBUNTU + self._os_id = 202 + elif 'debian' == dist: + self._linux_dist = OS_LINUX_DEBIAN + self._os_id = 203 + elif 'redhat' == dist: + self._linux_dist = OS_LINUX_REDHAT + self._os_id = 204 + elif 'gentoo' == dist: + self._linux_dist = OS_LINUX_GENTOO + self._os_id = 205 + else: + log.w('[WARNING] Can not detect linux distribution, try default settings.\n') + self._linux_dist = OS_LINUX_DEFAULT + + # 鍒ゆ柇瀹夸富绋嬪簭鏄痯ython杩樻槸鎴戜滑鑷繁鐨勪富绋嬪簭 + exec_names = os.path.split(sys.executable) + # print(sys.executable) + if 'python3' == exec_names[1] or 'python.exe' == exec_names[1]: + self._is_self_exec = False + else: + self._is_self_exec = True + + # 鍒ゆ柇鏄惁鏄互root韬唤杩愯 + if self._os_type == OS_WIN32: + # 鍦╳indows骞冲彴锛屾病鏈夌洿鎺ョ殑鏂瑰紡锛屽彲浠ュ皾璇曞湪鐗瑰畾鐩綍涓嬪垱寤烘枃浠讹紝鐒跺悗鏍规嵁鎴愬姛涓庡惁鏉ュ垽鏂 + tmp_file = '%s\\System32\\6A5D77DDFCFB40CEB26A8444EEC5757E_%s.tmp' % (os.getenv('SystemRoot'), utils.gen_random(4)) + try: + f = open(tmp_file, 'w') + f.close() + os.remove(tmp_file) + self._is_run_as_root = True + except IOError: + pass + else: + if 0 == os.getuid(): + self._is_run_as_root = True + + # # 纭畾璺緞 + # tmp = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + # if tmp[-4:] == '.zip': + # self._app_path = os.path.abspath(os.path.join(tmp, '..', '..')) + # else: + # self._app_path = os.path.abspath(os.path.join(tmp, '..')) + + # self._conf_path = os.path.join(self._app_path, 'conf') + + def is_self_exec(self): + return self._is_self_exec + + def is_root(self): + return self._is_run_as_root + + def get_os_type(self): + return self._os_type + + def get_os_name(self): + return self._os_name + + def get_os_id(self): + return self._os_id + + def is_windows(self): + return True if self._os_type == OS_WIN32 else False + + def is_macos(self): + return True if self._os_type == OS_MAC else False + + def is_linux(self): + return True if self._os_type == OS_LINUX else False + + def get_linux_dist(self): + return self._linux_dist + + def is_ubuntu(self): + return True if self._linux_dist == OS_LINUX_UBUNTU else False + + def is_centos(self): + return True if self._linux_dist == OS_LINUX_CENTOS else False + + def is_debian(self): + return True if self._linux_dist == OS_LINUX_DEBIAN else False + + def is_redhat(self): + return True if self._linux_dist == OS_LINUX_REDHAT else False + + def is_gentoo(self): + return True if self._linux_dist == OS_LINUX_GENTOO else False + + # def get_log_file_path(self): + # if self.is_windows(): + # path = os.path.join(self.app_path, 'log', 'eom-agent') + # elif self.is_macos(): + # path = '/var/log/eom-agent' + # else: + # path = '/var/log/eom-agent' + # + # return path + + def log_path(self): + return self._log_path + + @property + def app_path(self): + """ + 杩斿洖鐨勮矾寰勬槸app鑴氭湰鏂囦欢鎵鍦ㄨ矾寰勭殑涓婁竴绾ц矾寰勶紝鍙敤浜庡悎鎴恖og銆乧onf绛夎矾寰勩 + + :rtype : str + """ + return self._app_path + + @property + def conf_path(self): + return self._conf_path + + @property + def data_path(self): + return self._data_path + +# +# +# # eom_env = EomEnv() +# # del EomEnv +# eom_env = None +# +# +# def get_env(): +# """ +# +# :rtype : EomEnv +# """ +# global eom_env +# if eom_env is None: +# eom_env = EomEnv() +# # del EomEnv +# return eom_env diff --git a/server/www/teleport/app/eom_common/eomcore/eom_mysql.py b/server/www/teleport/app/eom_common/eomcore/eom_mysql.py new file mode 100644 index 0000000..3249f5d --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/eom_mysql.py @@ -0,0 +1,254 @@ +# coding=utf-8 +# !/usr/bin/env python +# ------------------------------------------------------------------------------- +# Name: pymssqlTest.py +# Purpose: 娴嬭瘯 pymssql搴擄紝璇ュ簱鍒拌繖閲屼笅杞斤細http://www.lfd.uci.edu/~gohlke/pythonlibs/#pymssql +# +# Author: scott +# +# Created: 04/02/2012 +# ------------------------------------------------------------------------------- + +import pymysql +import threading +from .logger import * +# import logic.config_file + +# eom_comm_conf = logic.config_file.get_comm_conf() +mysql_pool = None + + +def get_mysql_pool(): + global mysql_pool + if mysql_pool is None: + mysql_pool = MySqlPool() + return mysql_pool + + +class MySQL: + + def __init__(self, host, user, pwd, db, port=3306): + self.host = host + self.port = port + self.user = user + self.pwd = pwd + self.db = db + self.login_timeout = 3 + self.conn = None + + def connect(self): + """ + 寰楀埌杩炴帴淇℃伅 + 杩斿洖: conn.cursor() + """ + if not self.db: + raise (NameError, "娌℃湁璁剧疆鏁版嵁搴撲俊鎭") + # self.conn = pymysql.connect(host=self.host, port=self.port, user=self.user, password=self.pwd, + # login_timeout=self.login_timeout, database=self.db, charset="utf8") + try: + if self.conn is not None: + self.conn.ping() + else: + self.conn = pymysql.connect(host=self.host, + user=self.user, + passwd=self.pwd, + db=self.db, + port=self.port, + connect_timeout=self.login_timeout, + charset='utf8') + except pymysql.err.OperationalError: + log.e('pymsql 杩炴帴鏁版嵁搴撳け璐%s:%d]\n' % (self.host, self.port)) + return None + except Exception as e: + log.e('con 杩炴帴鏁版嵁搴撳け璐%s:%d]\n' % (self.host, self.port)) + return None + + cur = self.conn.cursor() + if not cur: + log.e('cur 杩炴帴鏁版嵁搴撳け璐%s:%d]\n' % (self.host, self.port)) + raise (NameError, "杩炴帴鏁版嵁搴撳け璐") + else: + return cur + + # 璋冪敤瀹炰緥 ms.ExecProcQuery('exec P_Agent_Cmd_Get @CmdGroupId=7') + def ExecProcQuery(self, sql): + try: + if self.connect() is None: + self.conn = None + return None + + cur = self.conn.cursor() + + cur.execute(sql) + + resList = cur.fetchall() + self.conn.commit() + except pymysql.OperationalError as e: + if self.conn is not None: + self.conn.close() + log.e('ExecProcQuery[%s,%s]\n' % (sql, str(e))) + return None + except Exception as e: + if self.conn is not None: + self.conn.close() + log.e('ExecProcQuery[%s,%s]\n' % (sql, str(e))) + return None + return resList + + def ExecProcNonQuery(self, sql): + try: + + if self.connect() is None: + self.conn = None + return None + + cur = self.conn.cursor() + cur.execute(sql) + self.conn.commit() + return True + except pymysql.OperationalError as e: + # self.conn.close() + if self.conn is not None: + self.conn.close() + log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) + return False + except Exception as e: + if self.conn is not None: + self.conn.close() + log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) + return False + + @staticmethod + def ExecNonQuery(mysql, sql): + try: + if mysql.connect() is None: + mysql.conn = None + return False + + cur = mysql.conn.cursor() + cur.execute(sql) + # self.conn.commit() + return True + except pymysql.OperationalError as e: + # self.conn.close() + if mysql.conn is not None: + mysql.conn.close() + log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) + return False + except Exception as e: + if mysql.conn is not None: + mysql.conn.close() + log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) + return False + + @staticmethod + def EndExecNonQuery(mysql): + try: + if mysql is None or mysql.conn is None: + return False + mysql.conn.commit() + return True + except pymysql.OperationalError as e: + # self.conn.close() + if mysql.conn is not None: + mysql.conn.close() + return False + except Exception as e: + if mysql.conn is not None: + mysql.conn.close() + return False + + def CallProc(self, proc_name, in_args, out_in_args=None): + sql = '' + ret_code = list() + try: + # print(in_args) + result = list() + + self.connect() + + cur = self.conn.cursor() + cur.callproc(proc_name, in_args) + # + + data_set = cur.fetchall() + result.append(data_set) + while True: + has_set = cur.nextset() + if not has_set: + break + data_set = cur.fetchall() + result.append(data_set) + + cur.execute('select 0;') + self.conn.commit() + + if out_in_args is not None: + sql = 'select ' + for item in out_in_args: + str_item = '@_{0}_{1},'.format(proc_name, item) + sql += str_item + + sql = sql[:-1] + code = cur.execute(sql) + # code = cur.execute('select @_p_test_1_2,@_p_test_1_3,@_p_test_1_4') + # ret_code = list() + if code == 1: + (data_set,) = cur.fetchall() + length = len(data_set) + for i in range(length): + ret_code.append(data_set[i]) + return result, ret_code + + except pymysql.OperationalError as e: + if self.conn is not None: + self.conn.close() + log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) + return None + except Exception as e: + if self.conn is not None: + self.conn.close() + log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) + return None + + +class MySqlPool: + def __init__(self): + self._conn_log = dict() + self._conn_sys = dict() + self._conn_common = dict() + self._db_ip = '' + self._db_port = 0 + self._db_user = '' + self._db_pass = '' + self._locker_log = threading.RLock() + self._locker_sys1 = threading.RLock() + self._locker_sys2 = threading.RLock() + + def init(self, db_ip, db_port, db_user, db_pass): + self._db_ip = db_ip + self._db_port = db_port + self._db_user = db_user + self._db_pass = db_pass + + def get_websqlcon(self): + with self._locker_log: + thread_id = threading.get_ident() + if thread_id not in self._conn_log: + my_sql = MySQL(self._db_ip, self._db_user, self._db_pass, 'ts_web', self._db_port) + self._conn_log[thread_id] = my_sql + return my_sql + + my_sql = self._conn_log[thread_id] + return my_sql + + def get_tssqlcon(self): + with self._locker_sys1: + thread_id = threading.get_ident() + if thread_id not in self._conn_sys: + my_sql = MySQL(self._db_ip, self._db_user, self._db_pass, 'ts_db', self._db_port) + self._conn_sys[thread_id] = my_sql + return my_sql + + my_sql = self._conn_sys[thread_id] + return my_sql diff --git a/server/www/teleport/app/eom_common/eomcore/eom_sqlite.py b/server/www/teleport/app/eom_common/eomcore/eom_sqlite.py new file mode 100644 index 0000000..1c78de5 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/eom_sqlite.py @@ -0,0 +1,154 @@ +# coding=utf-8 +# +# Created: 04/02/2012 +# ------------------------------------------------------------------------------- + +import os +import sqlite3 +import threading + +from .logger import * + +sqlite_pool = None + + +def get_sqlite_pool(): + global sqlite_pool + if sqlite_pool is None: + sqlite_pool = SqlitePool() + return sqlite_pool + + +class eom_sqlite: + """ + """ + + def __init__(self, path): + self._db_file = path + self._conn = None + + def connect(self): + try: + self._conn = sqlite3.connect(self._db_file) + except: + self._conn = None + raise RuntimeError('can not open database.') + return self._conn + + # 璋冪敤瀹炰緥 ms.ExecProcQuery('exec P_Agent_Cmd_Get @CmdGroupId=7') + def ExecProcQuery(self, sql): + if self._conn is None: + if self.connect() is None: + return None + cursor = self._conn.cursor() + try: + + cursor.execute(sql) + db_ret = cursor.fetchall() + return db_ret + except Exception as e: + return None + finally: + cursor.close() + + # return None + + def ExecProcNonQuery(self, sql): + if self._conn is None: + if self.connect() is None: + return False + + cursor = self._conn.cursor() + try: + cursor.execute(sql) + self._conn.commit() + except Exception as e: + log.e('can not create/open database.\n') + return False + finally: + cursor.close() + + return True + + def ExecManyProcNonQuery(self, sql): + if self._conn is None: + if self.connect() is None: + return False + + cursor = self._conn.cursor() + try: + cursor.executescript(sql) + # print(sql) + self._conn.commit() + cursor.close() + except Exception as e: + log.e('can not create/open database.\n') + return False + + return True + + def close(self): + self._conn.close() + self._conn = None + + +class SqlitePool: + def __init__(self): + self._conn_sys = dict() + self._path = '' + self._locker_sys = threading.RLock() + self._config_server_ip = '' + + def init(self, path): + self._conn_sys.clear() + self._path = os.path.join(path, 'ts_db.db') + if not os.path.exists(self._path): + return False + + try: + sql_con = self.get_tssqlcon() + str_sql = 'SELECT value FROM ts_config WHERE name=\"ts_server_ip\";' + db_ret = sql_con.ExecProcQuery(str_sql) + self._config_server_ip = db_ret[0][0] + except Exception: + self._config_server_ip = '127.0.0.1' + return True + + def init_full_path(self, full_path): + self._conn_sys.clear() + self._path = full_path + if not os.path.exists(self._path): + return False + + try: + sql_con = self.get_tssqlcon() + str_sql = 'SELECT value FROM ts_config WHERE name=\"ts_server_ip\";' + db_ret = sql_con.ExecProcQuery(str_sql) + self._config_server_ip = db_ret[0][0] + except Exception: + self._config_server_ip = '127.0.0.1' + return True + + def get_config_server_ip(self): + return self._config_server_ip + + def get_tssqlcon(self): + with self._locker_sys: + thread_id = threading.get_ident() + if thread_id not in self._conn_sys: + _eom_sqlite = eom_sqlite(self._path) + self._conn_sys[thread_id] = _eom_sqlite + else: + _eom_sqlite = self._conn_sys[thread_id] + + return _eom_sqlite + + def close(self): + with self._locker_sys: + thread_id = threading.get_ident() + if thread_id not in self._conn_sys: + return + else: + _eom_sqlite = self._conn_sys[thread_id] + self._conn_sys.pop(thread_id) + _eom_sqlite.close() diff --git a/server/www/teleport/app/eom_common/eomcore/logger.py b/server/www/teleport/app/eom_common/eomcore/logger.py new file mode 100644 index 0000000..e4d8555 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/logger.py @@ -0,0 +1,620 @@ +# -*- coding: utf-8 -*- + +import atexit +import sys +import threading +import time +import traceback + +__all__ = ['log', + 'CR_DEBUG', 'CR_VERBOSE', 'CR_INFO', 'CR_WARN', 'CR_ERROR', + 'LOG_DEBUG', 'LOG_VERBOSE', 'LOG_INFO', 'LOG_WARN', 'LOG_ERROR', 'TRACE_ERROR_NONE', 'TRACE_ERROR_FULL'] + +LOG_DEBUG = 1 +LOG_VERBOSE = 10 +LOG_INFO = 20 +LOG_WARN = 30 +LOG_ERROR = 99 + +TRACE_ERROR_NONE = 0 +TRACE_ERROR_FULL = 999999 + +# ====================================== +# 棰滆壊 +# ====================================== +CR_NORMAL = 0 # 鎭㈠姝e父 - 娴呯伆鑹 +# BOLD = "[1m" # 楂樹寒鏄剧ず +# UNDERSCORE = "[4m" # 涓嬪垝绾 +# REVERSE = "[7m" # 鍙嶇櫧鏄剧ず +CR_BLACK = 1 # 榛戣壊 +CR_LIGHT_GRAY = 2 # 娴呯伆鑹 - 鏅氭枃瀛 +CR_GRAY = 3 # 娣辩伆鑹 - 鎹曡幏鍒殑鍛戒护鐨勮緭鍑 +CR_WHITE = 4 # 鐧借壊 +CR_RED = 5 # 绾㈣壊 +CR_GREEN = 6 # 缁胯壊 +CR_YELLOW = 7 # 榛勮壊 - Windows骞冲彴绉颁箣涓烘鑹(Brown) +CR_BLUE = 8 # 钃濊壊 +CR_MAGENTA = 9 # 绱孩 +CR_CYAN = 10 # 闈掕壊 +CR_LIGHT_RED = 11 # 浜孩鑹 - 澶辫触 +CR_LIGHT_GREEN = 12 # 浜豢鑹 - 鎴愬姛 +CR_LIGHT_YELLOW = 13 # 浜粍鑹 - 閲嶈 +CR_LIGHT_BLUE = 14 # 浜摑鑹 - 鍏跺疄鍦ㄩ粦鑹茶儗鏅笂杩樻槸姣旇緝娣 +CR_LIGHT_MAGENTA = 15 # 浜传鑹 - 璀﹀憡 +CR_LIGHT_CYAN = 16 # 浜潚鑹 + +CR_DEBUG = CR_GRAY +CR_VERBOSE = CR_LIGHT_GRAY +CR_INFO = CR_GREEN +CR_WARN = CR_LIGHT_MAGENTA +CR_ERROR = CR_LIGHT_RED + +COLORS = { + # 甯搁噺瀹氫箟 Linux鑹插僵 WinConsole鑹插僵 + CR_NORMAL: ('[0m', 7), # 7 = 娴呯伆鑹 - 鏅氭枃瀛 + CR_BLACK: ('[0;30m', 0), # 0 = 榛戣壊 + CR_RED: ("[0;31m", 4), # 绾㈣壊 + CR_GREEN: ("[0;32m", 2), # 缁胯壊 + CR_YELLOW: ("[0;33m", 6), # 榛勮壊 - Windows骞冲彴绉颁箣涓烘鑹(Brown) + CR_BLUE: ("[0;34m", 1), # 钃濊壊 + CR_MAGENTA: ("[0;35m", 5), # 绱孩 + CR_CYAN: ("[0;36m", 3), # 闈掕壊 + CR_LIGHT_GRAY: ('[0;37m', 7), # 娴呯伆鑹 - 鏅氭枃瀛 + CR_GRAY: ("[1;30m", 8), # 娣辩伆鑹 - 鎹曡幏鍒殑鍛戒护鐨勮緭鍑 + CR_LIGHT_RED: ("[1;31m", 12), # 浜孩鑹 - 澶辫触 + CR_LIGHT_GREEN: ("[1;32m", 10), # 浜豢鑹 - 鎴愬姛 + CR_LIGHT_YELLOW: ("[1;33m", 14), # 浜粍鑹 - 閲嶈 + CR_LIGHT_BLUE: ("[1;34m", 9), # 浜摑鑹 - 鍏跺疄鍦ㄩ粦鑹茶儗鏅笂杩樻槸姣旇緝娣 + CR_LIGHT_MAGENTA: ("[1;35m", 13), # 浜传鑹 - 璀﹀憡 + CR_LIGHT_CYAN: ("[1;36m", 11), # 浜潚鑹 + CR_WHITE: ("[1;37m", 15) # 鐧借壊 +} + + +# env = eomcore.env.get_env() + + +class EomLogger: + """ + 鏃ュ織璁板綍妯″潡锛屾敮鎸佽緭鍑哄埌鎺у埗鍙板強鏂囦欢銆 + + :type _file_handle : file + :type _win_color : Win32ColorConsole + """ + + def __init__(self): + self._locker = threading.RLock() + + self._min_level = LOG_INFO # 澶т簬绛変簬姝ゅ肩殑鏃ュ織淇℃伅鎵嶄細璁板綍 + self._trace_error = TRACE_ERROR_NONE # 璁板綍閿欒淇℃伅鏃讹紝鏄惁杩藉姞璁板綍璋冪敤鏍 + self._log_datetime = True # 鏄惁璁板綍鏃ュ織鏃堕棿 + self._file_handle = None # 鏃ュ織鏂囦欢鐨勫彞鏌勶紝涓篘one鏃惰〃绀轰笉璁板綍鍒版枃浠 + self._log_console = self._console_default # 杈撳嚭鍒版帶鍒跺彴鐨勬柟寮忥紝涓篘one鏃惰〃绀轰笉杈撳嚭鍒版帶鍒跺彴 + + self._win_color = None + + self.d = self._func_debug + self.v = self._func_verbose + self.i = self._func_info + self.w = self._func_warn + self.e = self._func_error + + self._set_console(True) + self._set_level(self._min_level) + + atexit.register(self.finalize) + + def initialize(self): + pass + + def finalize(self): + if self._file_handle is not None: + self._file_handle.close() + + def set_attribute(self, min_level=None, console=None, log_datetime=None, trace_error=None, filename=None): + """ + 璁剧疆鏃ュ織妯″潡灞炴э紝鍙傛暟涓篘one鐨勮烦杩囷紝涓嶈皟鏁淬傚叾涓紝filename璁句负''绌哄瓧绗︿覆锛堜笉鏄疦one锛夎〃绀哄叧闂褰曞埌鏂囦欢鐨勫姛鑳姐 + :type filename: str + :type trace_error: int + :type log_datetime: bool + :type min_level: int + """ + if min_level is not None: + self._set_level(min_level) + + if console is not None: + self._set_console(console) + + if log_datetime is not None: + self._log_datetime = log_datetime + + if trace_error is not None: + self._trace_error = trace_error + + if filename is not None: + if not self._set_filename(filename): + return False + + return True + + def _set_level(self, level): + self.d = self._func_debug + self.v = self._func_verbose + self.i = self._func_info + self.w = self._func_warn + # self.e = self._func_error + + if LOG_DEBUG == level: + pass + elif LOG_VERBOSE == level: + self.d = self._func_pass + elif LOG_INFO == level: + self.d = self._func_pass + self.v = self._func_pass + elif LOG_WARN == level: + self.d = self._func_pass + self.v = self._func_pass + self.i = self._func_pass + elif LOG_ERROR == level: + self.d = self._func_pass + self.v = self._func_pass + self.i = self._func_pass + self.w = self._func_pass + pass + else: + pass + + self._min_level = level + + def _set_console(self, is_enabled): + if not is_enabled: + self._log_console = self._func_pass + return + + if sys.platform == 'linux' or sys.platform == 'darwin': + self._log_console = self._console_linux + elif sys.platform == 'win32': + + if sys.stdout is None: + self._dbg_view = Win32DebugView() + if self._dbg_view.available(): + self._log_console = self._dbg_view.output + + self.log('use DebugView as logger output.\n') + # self._log_console = self._func_pass + + + else: + # if 'TERM' in os.environ and 'emacs' != os.environ['TERM']: + # self._log_console = self._console_linux + + # self._win_color = Win32ColorConsole() + # if self._win_color.available(): + # self._log_console = self._console_win + # else: + # self._log_console = self._console_linux + + self._log_console = self._console_linux + + def _set_filename(self, base_filename): + + if len(base_filename) == 0: + if self._file_handle is not None: + self._file_handle.close() + self._file_handle = None + return True + + log_filename = base_filename.strip() + if 0 == len(log_filename): + self.e('invalid log file name.') + return False + + try: + self._file_handle = open(log_filename, 'a+', encoding='utf8') + except IOError: + self._file_handle = None + self.e('Can not open log file for write.\n') + return False + + return True + + def log(self, msg, color=None): + """ + 鑷鎸囧畾棰滆壊锛岃緭鍑哄埌鎺у埗鍙帮紙涓嶄細杈撳嚭鍒版棩蹇楁枃浠讹級锛屼笖杈撳嚭鏃朵笉鍚椂闂翠俊鎭 + """ + self._do_log(msg, color=color, show_datetime=False) + + def _func_pass(self, msg, color=None): + # do nothing. + pass + + def _func_debug(self, msg): + # 璋冭瘯杈撳嚭鐨勬暟鎹紝鍦ㄦ甯歌繍琛屼腑涓嶄細杈撳嚭 + self._do_log(msg, CR_DEBUG) + + # 鏅氱殑鏃ュ織鏁版嵁 + def _func_verbose(self, msg): + # pass + self._do_log(msg, None) + + # 閲嶈淇℃伅 + def _func_info(self, msg): + self._do_log(msg, CR_INFO) + + # 璀﹀憡 + def _func_warn(self, msg): + self._do_log(msg, CR_WARN) + + def _func_error(self, msg): + """閿欒 + """ + self._do_log('[ERROR] %s' % msg, CR_ERROR) + + if self._trace_error == TRACE_ERROR_NONE: + return + + s = traceback.extract_stack() + c = len(s) + for i in range(c - 1): + if i >= self._trace_error: + break + if s[c - 2 - i][0].startswith(' LOG_DEBUG: + return + # 浠呬粎杈撳嚭鍒版帶鍒跺彴锛屼笉杈撳嚭鍒版棩蹇楁枃浠 + if self._log_console is None: + return + + m = msg.rstrip(' \r\n\t') + if bytes != type(data) and bytearray != type(data): + self.w('%s [NOT BINARY]\n' % m) + return + data_size = len(data) + self.d('%s [%d/0x%X B]\n' % (m, data_size, data_size)) + if data_size == 0: + return + + x = 0 + loop = int(data_size / 16) + last_line = data_size % 16 + + for x in range(loop): + m = '%08X ' % (x * 16) + + for y in range(16): + if 8 == y: + m += ' -' + m += ' %02X' % data[x * 16 + y] + + m += ' ' + + for y in range(16): + ch = data[x * 16 + y] + if 32 <= ch <= 126: + m += '%c' % data[x * 16 + y] + else: + m += '.' + + m += '\n' + self.log(m, CR_DEBUG) + + if loop > 0: + x += 1 + + if last_line > 0: + padding_size = (16 - last_line) * 3 + if last_line <= 8: + padding_size += 2 + + m = '%08X ' % (x * 16) + + for y in range(last_line): + if 8 == y: + m += ' -' + m += ' %02X' % data[x * 16 + y] + + m += ' ' * (padding_size + 3) + + for y in range(last_line): + ch = data[x * 16 + y] + if 32 <= ch <= 126: + m += '%c' % data[x * 16 + y] + else: + m += '.' + + m += '\n' + self.log(m, CR_DEBUG) + + def _do_log(self, msg, color=None, show_datetime=True): + with self._locker: + now = time.localtime(time.time()) + _log_time = '[{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}] '.format(now.tm_year, now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min, now.tm_sec) + + try: + if show_datetime and self._log_datetime: + msg = '{}{}'.format(_log_time, msg) + self._log_console(msg, color) + else: + self._log_console(msg, color) + msg = '{}{}'.format(_log_time, msg) + + self._log_file(msg) + + except IOError: + pass + + def _console_default(self, msg, color=None): + """ + Log to console without color. + """ + if not self._log_console: + return + if msg is None: + return + + sys.stdout.writelines(msg) + sys.stdout.flush() + + def _console_win(self, msg, color=None): + if not self._log_console: + return + if msg is None: + msg = '' + + # 杩欓噷鐨勯棶棰樺緢澶嶆潅锛屾棩甯镐娇鐢ㄦ病鏈夐棶棰橈紝浣嗘槸褰撳湪宸ヤ綔鏈轰笂浣跨敤鏃讹紝閮ㄥ垎鍐呭鏄崟鑾峰彟涓涓剼鏈墽琛岀殑缁撴灉鍐嶈緭鍑 + # 濡傛灉缁撴灉涓湁涓枃锛岃繖閲屽氨浼氭樉绀轰贡鐮併傚鏋滃皾璇曠紪鐮佽浆鎹紝浼氭姏鍑哄紓甯搞傜洰鍓嶆殏鏃堕噰鐢ㄦ樉绀轰贡鐮佺殑鏂瑰紡浜嗐 + + # if CONSOLE_WIN_CMD == self.console_type: + # try: + # _msg = unicode(msg, 'utf-8') + # except: + # _msg = msg + # else: + # _msg = msg + # _msg = None + # if isinstance(msg, unicode): + # _msg = msg + # else: + # # _msg = unicode(msg, 'utf-8') + # try: + # _msg = unicode(msg, 'utf-8') + # except: + # _msg = unicode(msg, 'gb2312') + # # _msg = msg + # + # # if CONSOLE_WIN_CMD == self.console_type: + # # sys.stdout.writelines(msg.encode('gb2312')) + # # else: + # # sys.stdout.writelines(msg.encode('utf-8')) + # + # + # # try: + # # _msg = unicode(msg, 'utf-8') + # # except: + # _msg = msg + + if color is None: + sys.stdout.writelines(msg) + else: + self._win_color.set_color(COLORS[color][1]) + sys.stdout.writelines(msg) + sys.stdout.flush() + self._win_color.set_color(COLORS[CR_NORMAL][1]) + + sys.stdout.flush() + + def _console_linux(self, msg, cr=None): + if not self._log_console: + return + if msg is None: + return + + if cr is None: + sys.stdout.writelines(msg) + else: + sys.stdout.writelines('\x1B%s%s\x1B[0m' % (COLORS[cr][0], msg)) + # sys.stdout.writelines('\[%s%s\[[0m' % (COLORS[cr][0], msg)) + + sys.stdout.flush() + + def _log_file(self, msg): + if self._file_handle is None: + return + + # 淇濆瓨鍒版枃浠舵椂锛屾绘槸灏嗗瓧绗︿覆鎸 utf-8 鏍煎紡淇濆瓨 + # self._file_handle.write(msg.encode('utf-8')) + self._file_handle.write(msg) + self._file_handle.flush() + + def _log_print(self, *args, **kwargs): + sep = kwargs['sep'] if 'sep' in kwargs else ' ' + end = kwargs['end'] if 'end' in kwargs else '\n' + + show_datetime = self._log_datetime + first = True + for x in args: + if not first: + log._do_log(sep, show_datetime=show_datetime) + + first = False + if isinstance(x, str): + log._do_log(x, show_datetime=show_datetime) + show_datetime = False + continue + + else: + log._do_log(x.__str__(), show_datetime=show_datetime) + show_datetime = False + + log._do_log(end, show_datetime=show_datetime) + + # s = traceback.extract_stack() + # c = len(s) + # for i in range(c - 1): + # if i >= self._trace_error: + # break + # if s[c - 2 - i][0].startswith('. + # codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None) + + # Make Unicode console output work independently of the current code page. + # This also fixes . + # Credit to Michael Kaplan + # and TZOmegaTZIOY + # . + try: + # + # HANDLE WINAPI GetStdHandle(DWORD nStdHandle); + # returns INVALID_HANDLE_VALUE, NULL, or a valid handle + # + # + # DWORD WINAPI GetFileType(DWORD hFile); + # + # + # BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode); + + STD_OUTPUT_HANDLE = DWORD(-11) + INVALID_HANDLE_VALUE = DWORD(-1).value + + GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(("GetStdHandle", windll.kernel32)) + + self.__SetConsoleTextAttribute = WINFUNCTYPE(BOOL, HANDLE, WORD)(("SetConsoleTextAttribute", windll.kernel32)) + + self.__stdout = GetStdHandle(STD_OUTPUT_HANDLE) + if self.__stdout == INVALID_HANDLE_VALUE: + self.__stdout = None + + except Exception as e: + self.__stdout = None + self._complain("exception %r while fixing up sys.stdout and sys.stderr\n" % (str(e),)) + + # If any exception occurs in this code, we'll probably try to print it on stderr, + # which makes for frustrating debugging if stderr is directed to our wrapper. + # So be paranoid about catching errors and reporting them to original_stderr, + # so that we can at least see them. + @staticmethod + def _complain(message): + # print >> self.__original_stderr, message if isinstance(message, str) else repr(message) + sys.stderr.writelines(message) + + def available(self): + if self.__stdout is None or self.__SetConsoleTextAttribute is None: + return False + else: + return True + + def set_color(self, color): + # if not self.available(): + # return + self.__SetConsoleTextAttribute(self.__stdout, color) + + +log = EomLogger() +del EomLogger + +import builtins + +builtins.__dict__['print'] = log._log_print diff --git a/server/www/teleport/app/eom_common/eomcore/sysexec.py b/server/www/teleport/app/eom_common/eomcore/sysexec.py new file mode 100644 index 0000000..ef75cc2 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/sysexec.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- + +""" +鎵ц绯荤粺鍛戒护鐨勬ā鍧 +""" + +import subprocess + +from .logger import * + + +class SysExec: + def __init__(self, cmd, line_processor=None): + self.__cmd = cmd + self.__process_ret = 0 + self.__console_output = bytes() + self.__line_processor = line_processor + + def get_exec_ret(self): + return self.__process_ret + + def get_console_output(self): + return self.__console_output + + def run(self, direct_output=False, output_codec=None): + # 娉ㄦ剰锛歰utput_codec鍦╳indows榛樿涓篻b2312锛屽叾浠栧钩鍙伴粯璁tf8 + if output_codec is None: + if env.is_windows(): + output_codec = 'gb2312' + else: + output_codec = 'utf8' + + p = None + """type: subprocess.Popen""" + + if env.is_windows(): + try: + p = subprocess.Popen(self.__cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) + except WindowsError as e: + self.__process_ret = e.errno + msg = 'Unknown error.' + if 2 == e.errno: + msg = 'The system cannot find the file specified.' + elif 3 == e.errno: + msg = 'The system cannot find the path specified.' + elif 5 == e.errno: + msg = 'Access is denied.' + elif 13 == e.errno: + msg = 'The process cannot access the file because it is being used by another process.' + + self.__console_output = msg.encode(output_codec) + return + + except: + msg = 'Unknown error.' + self.__process_ret = 999 + self.__console_output = msg.encode(output_codec) + return + + else: + try: + # Test under Mac, shell must be True. + p = subprocess.Popen(self.__cmd, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) + except: + msg = 'Unknown error.' + self.__process_ret = 999 + self.__console_output = msg.encode(output_codec) + + f = p.stdout + while True: + line = f.readline() + if 0 == len(line): + break + + # if '\n' == line[-1]: + # line = line[:-1] + + if line[-2:] == '\r\n': + line = line[:-2] + line += '\n' + if line[-1:] == '\r': + line = line[:-1] + line += '\n' + + if self.__line_processor is not None: + self.__line_processor(line) + + if direct_output: + log.d(line.decode(output_codec)) + + self.__console_output += line + + # # 鎹曡幏杈撳嚭鐨勫瓧绗︿覆锛屽湪鍐呴儴浣跨敤鏃跺厛杞崲涓簎nicode锛屽啀鏍规嵁闇瑕佽浆涓哄叾浠栫紪鐮佹牸寮 + # # 渚嬪瀛樺偍鏃舵绘槸杞崲涓簎tf-8锛岃緭鍑烘椂鍦╓in骞冲彴涓婅浆鎹负gb2312锛屽叾浠栧钩鍙颁笂浼氭槸utf-8. + # _line = None + # if const.CONSOLE_WIN_CMD == self.console_type: + # _line = unicode(line, output_codec) + # else: + # log.v('tab\n') + # _line = utf8_coder.decode(line)[0]#unicode(line, 'utf-8') + + # if direct_output == True: + # log.cap(_line) + + # strOutput += utf8_coder.encode(_line)[0]#_line.encode('utf-8') + + self.__process_ret = p.wait() + + # if bCompareRet: + # if retWanted != ret: + # self.error("\nExecute command returned %d, but we wanted %d.\n\n" % (ret, retWanted)) + + # print(self.__console_output.decode(output_codec)) + return + + def start(self): + # Start a command and return, not wait it end. + + if env.is_windows(): + try: + subprocess.Popen(self.__cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + except OSError as e: + self.__process_ret = e.errno + # msg = 'Unknown error.' + # if 2 == e.errno: + # msg = 'The system cannot find the file specified.' + # elif 3 == e.errno: + # msg = 'The system cannot find the path specified.' + # elif 5 == e.errno: + # msg = 'Access is denied.' + # elif 13 == e.errno: + # msg = 'The process cannot access the file because it is being used by another process.' + # + # self.__console_output = msg.encode(output_codec) + return False + + except: + # msg = 'Unknown error.' + self.__process_ret = 999 + # self.__console_output = msg.encode(output_codec) + return False + + else: + try: + subprocess.Popen(self.__cmd, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) + except: + # msg = 'Unknown error.' + self.__process_ret = 999 + # self.__console_output = msg.encode(output_codec) + return False + + return True diff --git a/server/www/teleport/app/eom_common/eomcore/utils.py b/server/www/teleport/app/eom_common/eomcore/utils.py new file mode 100644 index 0000000..33020d3 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/utils.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- + +import os +import threading +import time +import datetime +import hashlib +import stat + + +def make_dir(path): + """ + 鍒涘缓鐩綍 + + 濡傛灉鐖剁洰褰曚笉瀛樺湪锛屽垯鍚屾椂涔熷垱寤轰箣锛岀洰鏍囨槸淇濊瘉鏁翠釜鐩綍灞傛閮藉瓨鍦紝濡傛灉鎸囧畾鐨勭洰褰曞凡缁忓瓨鍦紝鍒欒涓烘垚鍔燂紙鐩殑灏辨槸璁╄繖涓洰褰曞瓨鍦級 + + :param path: str + :return: boolean + """ + abs_path = os.path.abspath(path) + + if os.path.exists(abs_path): + if os.path.isdir(abs_path): + return True + else: + # log.e(u'An object named "%s" already exists. Can not create such directory.\n' % abs_path) + return False + + base_name = os.path.basename(abs_path) + parent_path = abs_path[:len(abs_path) - len(base_name)] + if parent_path == path: + return False + + if not os.path.exists(parent_path): + # log.v('make_dir: %s\n' % parent_path) + if not make_dir(parent_path): + return False + os.mkdir(abs_path) + # os.mkdir(abs_path, 0o777) + os.chmod(abs_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) + else: + if os.path.isdir(parent_path): + os.mkdir(abs_path) + # os.mkdir(abs_path, 0o777) + os.chmod(abs_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) + else: + # log.e(u'An object named "%s" already exists. Can not create such directory.\n' % parent_path) + return False + + return True + + +def gen_random(n): + """ + 浜х敓n瀛楄妭鐨勯殢鏈烘暟锛岀劧鍚庤緭鍑轰负16杩涘埗瀛楃涓 + + :param n: int + :return : str + """ + ret = '' + data = os.urandom(n) + for i in data: + ret += '%02X' % i + return ret + + +def bytes2human(n): + """ + 灏嗗瓧鑺傛暟杞崲涓烘槗璇荤殑瀛楃涓 + + http://code.activestate.com/recipes/578019 + bytes2human(10000) '9.8K' + bytes2human(100001221) '95.4M' + + :type n: int + :rtype : str + """ + symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + prefix = {} + for i, s in enumerate(symbols): + prefix[s] = 1 << (i + 1) * 10 + for s in reversed(symbols): + if n >= prefix[s]: + value = float(n) / prefix[s] + return '%.1f%s' % (value, s) + return "%sB" % n + + +def second2human(n): + """ + 灏嗙粡杩囩殑鏃堕棿锛堢锛夎浆鎹负鏄撹鐨勫瓧绗︿覆 + + :type n: int + :rtype : str + """ + _sec = n + + ret = '' + d = int(_sec / 86400) # 86400 = 24*60*60绉 涓澶 + if d > 0: + ret = '%dd' % d + + _sec %= 86400 + h = int(_sec / 3600) # 3600 = 60*60绉 涓灏忔椂 + if h > 0: + if len(ret) > 0: + ret = '%s %dh' % (ret, h) + else: + ret = '%dh' % h + + _sec %= 3600 + m = int(_sec / 60) # 3600 = 60*60绉 涓灏忔椂 + if len(ret) > 0: + ret = '%s %dm' % (ret, m) + elif m > 0: + ret = '%dm' % m + + _sec %= 60 + if len(ret) > 0: + ret = '%s %ds' % (ret, _sec) + else: + ret = '%ds' % _sec + + return ret + + +def timestamp_local_to_utc(t): + return int(datetime.datetime.utcfromtimestamp(time.mktime(time.localtime(t))).timestamp()) + + +def bytes_to_string(b, encode='utf8'): + l = len(b) + for c in range(l): + if b[c] == 0: + ret = b[0:c].decode(encode) + return ret + + return b.decode(encode) + + +def md5file(file_name): + if not os.path.exists(file_name) or not os.path.isfile(file_name): + raise ValueError + + f = open(file_name, 'rb') + m = hashlib.md5() + + while 1: + x = f.read(4096) + m.update(x) + if len(x) < 4096: + break + + f.close() + return m.hexdigest() + + +class UniqueId(): + def __init__(self): + self._id = int(datetime.datetime.utcnow().timestamp()) + self._locker = threading.RLock() + + def generate(self): + with self._locker: + self._id += 1 + return self._id + +unique_id = UniqueId() +del UniqueId + diff --git a/server/www/teleport/app/eom_common/eomcore/win_api.py b/server/www/teleport/app/eom_common/eomcore/win_api.py new file mode 100644 index 0000000..26bd495 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/win_api.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- + +import ctypes +import os +import struct +from ctypes import windll, wintypes + +# FSCTL_GET_REPARSE_POINT = 0x900a8 +# +# FILE_ATTRIBUTE_READONLY = 0x0001 +# FILE_ATTRIBUTE_HIDDEN = 0x0002 +# FILE_ATTRIBUTE_DIRECTORY = 0x0010 +# FILE_ATTRIBUTE_NORMAL = 0x0080 +FILE_ATTRIBUTE_REPARSE_POINT = 0x0400 +# +# GENERIC_READ = 0x80000000 +# GENERIC_WRITE = 0x40000000 +# OPEN_EXISTING = 3 +# FILE_READ_ATTRIBUTES = 0x80 +# FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000 +# INVALID_HANDLE_VALUE = wintypes.HANDLE(-1).value +# +# INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF +# +# # FILE_FLAG_OPEN_REPARSE_POINT = 2097152 +# FILE_FLAG_BACKUP_SEMANTICS = 33554432 +# # FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTI +# FILE_FLAG_REPARSE_BACKUP = 35651584 + +GetFileAttributes = windll.kernel32.GetFileAttributesW +# _CreateFileW = windll.kernel32.CreateFileW +# _DevIoCtl = windll.kernel32.DeviceIoControl +# _DevIoCtl.argtypes = [ +# wintypes.HANDLE, # HANDLE hDevice +# wintypes.DWORD, # DWORD dwIoControlCode +# wintypes.LPVOID, # LPVOID lpInBuffer +# wintypes.DWORD, # DWORD nInBufferSize +# wintypes.LPVOID, # LPVOID lpOutBuffer +# wintypes.DWORD, # DWORD nOutBufferSize +# ctypes.POINTER(wintypes.DWORD), # LPDWORD lpBytesReturned +# wintypes.LPVOID] # LPOVERLAPPED lpOverlapped +# _DevIoCtl.restype = wintypes.BOOL + + +def islink(path): + assert os.path.isdir(path), path + if GetFileAttributes(path) & FILE_ATTRIBUTE_REPARSE_POINT: + return True + else: + return False + +# +# def DeviceIoControl(hDevice, ioControlCode, input, output): +# # DeviceIoControl Function +# # http://msdn.microsoft.com/en-us/library/aa363216(v=vs.85).aspx +# if input: +# input_size = len(input) +# else: +# input_size = 0 +# if isinstance(output, int): +# output = ctypes.create_string_buffer(output) +# output_size = len(output) +# assert isinstance(output, ctypes.Array) +# bytesReturned = wintypes.DWORD() +# status = _DevIoCtl(hDevice, ioControlCode, input, +# input_size, output, output_size, bytesReturned, None) +# print("status(%d)" % status) +# if status != 0: +# return output[:bytesReturned.value] +# else: +# return None +# +# +# def CreateFile(path, access, sharemode, creation, flags): +# return _CreateFileW(path, access, sharemode, None, creation, flags, None) +# +# +# SymbolicLinkReparseFormat = "LHHHHHHL" +# SymbolicLinkReparseSize = struct.calcsize(SymbolicLinkReparseFormat); +# +# +# def readlink(path): +# """ Windows readlink implementation. """ +# # This wouldn't return true if the file didn't exist, as far as I know. +# assert islink(path) +# assert type(path) == unicode +# +# # Open the file correctly depending on the string type. +# hfile = CreateFile(path, GENERIC_READ, 0, OPEN_EXISTING, +# FILE_FLAG_REPARSE_BACKUP) +# # MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 16384 = (16*1024) +# buffer = DeviceIoControl(hfile, FSCTL_GET_REPARSE_POINT, None, 16384) +# CloseHandle(hfile) +# +# # Minimum possible length (assuming length of the target is bigger than 0) +# if not buffer or len(buffer) < 9: +# return None +# +# # Parse and return our result. +# # typedef struct _REPARSE_DATA_BUFFER { +# # ULONG ReparseTag; +# # USHORT ReparseDataLength; +# # USHORT Reserved; +# # union { +# # struct { +# # USHORT SubstituteNameOffset; +# # USHORT SubstituteNameLength; +# # USHORT PrintNameOffset; +# # USHORT PrintNameLength; +# # ULONG Flags; +# # WCHAR PathBuffer[1]; +# # } SymbolicLinkReparseBuffer; +# # struct { +# # USHORT SubstituteNameOffset; +# # USHORT SubstituteNameLength; +# # USHORT PrintNameOffset; +# # USHORT PrintNameLength; +# # WCHAR PathBuffer[1]; +# # } MountPointReparseBuffer; +# # struct { +# # UCHAR DataBuffer[1]; +# # } GenericReparseBuffer; +# # } DUMMYUNIONNAME; +# # } REPARSE_DATA_BUFFER, *PREPARSE_DATA_BUFFER; +# +# # Only handle SymbolicLinkReparseBuffer +# (tag, dataLength, reserver, SubstituteNameOffset, SubstituteNameLength, +# PrintNameOffset, PrintNameLength, +# Flags) = struct.unpack(SymbolicLinkReparseFormat, +# buffer[:SymbolicLinkReparseSize]) +# print(tag, dataLength, reserver, SubstituteNameOffset, SubstituteNameLength) +# start = SubstituteNameOffset + SymbolicLinkReparseSize +# actualPath = buffer[start: start + SubstituteNameLength].decode("utf-16") +# # This utf-16 string is null terminated +# index = actualPath.find(u"\0") +# assert index > 0 +# if index > 0: +# actualPath = actualPath[:index] +# if actualPath.startswith(u"?\\"): +# return actualPath[2:] +# else: +# return actualPath + diff --git a/server/www/teleport/app/eom_common/eomcore/zip.py b/server/www/teleport/app/eom_common/eomcore/zip.py new file mode 100644 index 0000000..d03d6f6 --- /dev/null +++ b/server/www/teleport/app/eom_common/eomcore/zip.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +import zipfile +import os + + +# class ZFile(object): +# def __init__(self, filename, mode='r', basedir=''): +# self.filename = filename +# self.mode = mode +# if self.mode in ('w', 'a'): +# self.zfile = zipfile.ZipFile(filename, self.mode, compression=zipfile.ZIP_DEFLATED) +# else: +# self.zfile = zipfile.ZipFile(filename, self.mode) +# self.basedir = basedir +# if not self.basedir: +# self.basedir = os.path.dirname(filename) +# +# def addfile(self, path, arcname=None): +# path = path.replace('//', '/') +# if not arcname: +# if path.startswith(self.basedir): +# arcname = path[len(self.basedir):] +# else: +# arcname = '' +# self.zfile.write(path, arcname) +# +# def addfiles(self, paths): +# for path in paths: +# if isinstance(path, tuple): +# self.addfile(*path) +# else: +# self.addfile(path) +# +# def close(self): +# self.zfile.close() +# +# def extract_to(self, path): +# for p in self.zfile.namelist(): +# self.extract(p, path) +# +# def extract(self, filename, path): +# if not filename.endswith('/'): +# strtemp = type(filename) +# # filename = filename.encode() +# # filename = filename.decode('gbk') +# # filename. +# if sys.getfilesystemencoding() == 'mbcs': +# filename = filename.decode('mbcs') +# f = os.path.join(path, filename) +# dir = os.path.dirname(f) +# if not os.path.exists(dir): +# os.makedirs(dir) +# # file(f, 'wb').write(self.zfile.read(filename)) +# file_object = open(f, 'wb') +# file_object.write(self.zfile.read(filename)) +# file_object.close() +# +# +# def create(zfile, files): +# z = ZFile(zfile, 'w') +# z.addfiles(files) +# z.close() +# +# +# def extract(zfile, path): +# z = ZFile(zfile) +# z.extract_to(path) +# z.close() + + +def zip_dir(dirname, zipfilename): + ret = False + filelist = [] + if os.path.isfile(dirname): + item = os.path.split(dirname) + dirname = item[0] + root = dirname + name = item[1] + filelist.append(os.path.join(root, name)) + else: + for root, dirs, files in os.walk(dirname): + for name in files: + filelist.append(os.path.join(root, name)) + + if os.path.exists(zipfilename): + os.remove(zipfilename) + + zf = zipfile.ZipFile(zipfilename, "w", zipfile.zlib.DEFLATED) + try: + for tar in filelist: + arcname = tar[len(dirname):] + # print arcname + zf.write(tar, arcname) + ret = True + except Exception as e: + ret = False + finally: + zf.close() + return ret + + +def unzip_file(zipfilename, unziptodir): + ret = False + if not os.path.exists(unziptodir): + os.makedirs(unziptodir, 0o777) + + zfobj = zipfile.ZipFile(zipfilename) + try: + for name in zfobj.namelist(): + name = name.replace('\\', '/') + + if name.endswith('/'): + os.mkdir(os.path.join(unziptodir, name)) + else: + ext_filename = os.path.join(unziptodir, name) + ext_dir = os.path.dirname(ext_filename) + if not os.path.exists(ext_dir): + os.mkdir(ext_dir, 0o777) + outfile = open(ext_filename, 'wb') + outfile.write(zfobj.read(name)) + outfile.close() + ret = True + except Exception as e: + ret = False + finally: + zfobj.close() + return ret + +# if __name__ == '__main__': +# try: +# # create('d:\\5.zip', 'd:\\1\\2.txt') +# # zip_dir('d:\\1\\', 'd:\\5.zip') +# # zip_dir('d:\\1\\2.txt', 'd:\\5.zip') +# unzip_file('d:\\5.zip', 'c:\\3\\3\\') +# # temp = sys.getfilesystemencoding() +# # extract('d:\\1.zip','c:\\') +# except Exception as e: +# temp = str(e) +# pass diff --git a/server/www/teleport/app/eom_env.py b/server/www/teleport/app/eom_env.py new file mode 100644 index 0000000..d28bb3e --- /dev/null +++ b/server/www/teleport/app/eom_env.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- + +# 妫娴嬭繍琛岀幆澧冨拰鐩稿瀹氫綅鏂囦欢璺緞 + +import os +import platform +import sys + +__all__ = ['PATH_APP_ROOT', 'PATH_LOG', 'PATH_CONF', 'PATH_DATA', 'DEV_MODE'] + +PATH_LOG = '' +PATH_CONF = '' +PATH_DATA = '' +DEV_MODE = False + +# 灏哖ython瀹夎鐨勬墿灞曞簱绉婚櫎锛岄伩鍏嶅紑鍙戣皟璇曚笌姝e紡鍙戝竷鎵渚濊禆鐨勫簱鏂囦欢涓嶄竴鑷村鑷村彂甯冨嚭鍘荤殑鐗堟湰鏃犳硶杩愯 +x = [] +for p in sys.path: + if p.find('site-packages') != -1 or p.find('dist-packages') != -1: + x.append(p) +for p in x: + sys.path.remove(p) + +PLATFORM = platform.system().lower() +if PLATFORM not in ['windows', 'linux']: + sys.exit(1) + +BITS = 'x64' +if '32bit' == platform.architecture()[0]: + BITS = 'x86' + +path_of_this_file = os.path.abspath(os.path.dirname(__file__)) +PATH_APP_ROOT = os.path.abspath(os.path.join(path_of_this_file, '..')) + +# 濡傛灉娌℃湁鎵撳寘锛屽彲鑳芥槸寮鍙戠増鏈紝涔熷彲鑳芥槸鍙戝竷婧愪唬鐮佺増鏈紝闇瑕佽繘涓姝ュ垽鏂 +if os.path.exists(os.path.join(PATH_APP_ROOT, '..', 'packages', 'packages-common')): + DEV_MODE = True +else: + DEV_MODE = False + +if DEV_MODE: + # 寮鍙戣皟璇曟ā寮 + _ext_path = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', 'packages', 'packages-common')) + if _ext_path not in sys.path: + sys.path.append(_ext_path) + + _ext_path = os.path.abspath( + os.path.join(PATH_APP_ROOT, '..', 'packages', 'packages-{}'.format(PLATFORM), BITS)) + if _ext_path not in sys.path: + sys.path.append(_ext_path) + + PATH_LOG = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', '..', 'share', 'log')) + PATH_CONF = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', '..', 'share', 'etc')) + PATH_DATA = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', '..', 'share', 'data')) + +else: + # 鏈墦鍖呯殑鍙戝竷璺緞锛堝彂甯冩簮浠g爜锛 + # web_root + # |- app + # | |- eom_common + # | \- eom_app + # |- static + # |- view + # \- packages + # |- packages-common + # \- packages-windows or packages-linux + # -------------------------------------------------------------- + # 鎵撳寘鍚庣殑鍙戝竷璺緞 + # web_root + # |- app.zip + # |- static + # |- view + # \- packages + # |- packages-common + # \- packages-windows or packages-linux + + _ext_path = os.path.abspath(os.path.join(PATH_APP_ROOT, 'packages', 'packages-common')) + if _ext_path not in sys.path: + sys.path.append(_ext_path) + # print('add path: ', _ext_path) + + _ext_path = os.path.abspath(os.path.join(PATH_APP_ROOT, 'packages', 'packages-{}'.format(PLATFORM), BITS)) + if _ext_path not in sys.path: + sys.path.append(_ext_path) + # print('add path: ', _ext_path) + + PATH_LOG = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', '..', 'log')) + PATH_CONF = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', '..', 'etc')) + PATH_DATA = os.path.abspath(os.path.join(PATH_APP_ROOT, '..', '..', 'data')) + +# if PLATFORM == 'linux': +# PATH_LOG = '/var/log/eom/teleport' diff --git a/server/www/teleport/app/eom_main.py b/server/www/teleport/app/eom_main.py new file mode 100644 index 0000000..a70b82d --- /dev/null +++ b/server/www/teleport/app/eom_main.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +import os +import sys +from eom_env import * +import eom_app.app as app +from eom_common.eomcore.logger import * + +log.set_attribute(min_level=LOG_DEBUG, trace_error=TRACE_ERROR_FULL) + + +def main(): + options = { + # app_path 缃戠珯绋嬪簭浠g爜璺緞锛岀敤浜庡唴閮ㄥ悎鎴恈ontroller鍜宮odel鐨勮矾寰勶紝蹇呴』鎸囧畾 + 'app_path': PATH_APP_ROOT, + + # cfg_path 缃戠珯閰嶇疆鏂囦欢璺緞锛屽鏈寚瀹氾紝榛樿涓 $_root_path$/conf + 'cfg_path': PATH_CONF, + + # log_path 缃戠珯杩愯鏃舵棩蹇楁枃浠惰矾寰勶紝濡傛湭鎸囧畾锛岄粯璁や负 $_root_path$/log + 'log_path': PATH_LOG, + + # static_path 缃戠珯闈欐佹枃浠惰矾寰勶紝濡傛湭鎸囧畾锛岄粯璁や负 $_root_path$/static + 'static_path': os.path.join(PATH_APP_ROOT, 'static'), + + # data_path 缃戠珯鏁版嵁鏂囦欢璺緞锛屽鏈寚瀹氾紝榛樿涓 $_root_path$/data + 'data_path': PATH_DATA, + + # template_path 缃戠珯妯℃澘鏂囦欢璺緞锛屽鏈寚瀹氾紝榛樿涓 $_root_path$/template + 'template_path': os.path.join(PATH_APP_ROOT, 'view'), + + # res_path 缃戠珯璧勬簮鏂囦欢璺緞锛屼緥濡傚瓧浣撴枃浠剁瓑锛岄粯璁や负 $_root_path$/res + 'res_path': os.path.join(PATH_APP_ROOT, 'res'), + + 'dev_mode': DEV_MODE, + } + + return app.run(options) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/server/www/teleport/app/eom_upgrade.py b/server/www/teleport/app/eom_upgrade.py new file mode 100644 index 0000000..19b2754 --- /dev/null +++ b/server/www/teleport/app/eom_upgrade.py @@ -0,0 +1,617 @@ +# -*- coding: utf-8 -*- + +import json +import os +import shutil +import sys + +from eom_env import * +from eom_common.eomcore.eom_sqlite import get_sqlite_pool +from eom_common.eomcore.logger import * + +log.set_attribute(min_level=LOG_DEBUG, log_datetime=False, trace_error=TRACE_ERROR_FULL) + +db_file = os.path.join(PATH_DATA, 'ts_db.db') + + +def main(): + if not os.path.exists(db_file): + log.v('\n') + log.v('Teleport Server Database Creation\n') + + # 濡傛灉鏁版嵁搴撴枃浠跺皻鏈瓨鍦紝鍒欑洿鎺ュ垱寤轰箣 + get_sqlite_pool().init(PATH_DATA) + + if not create_base_db(): + return 1 + + else: + log.v('\n') + log.v('Teleport Server Upgrade\n') + + if not get_sqlite_pool().init(PATH_DATA): + log.e('upgrade failed.\n') + return 1 + + if not upgrade_to_1_2_102_3(): + log.e('failed to upgrade database to version 1.2.102.3 ...\n') + return 1 + if not upgrade_to_1_5_217_9(): + log.e('failed to upgrade database to version 1.5.217.9 ...\n') + return 1 + + if not upgrade_to_1_6_224_3(): + log.e('failed to upgrade database to version 1.6.224.3 ...\n') + return 1 + + return 0 + + +def create_base_db(): + try: + # f = open(db_file, 'w') + # f.close() + sql_file = os.path.join(PATH_DATA, 'main.sql') + if not os.path.exists(sql_file): + log.e("sql file not exists.\n") + return False + + f = open(sql_file, 'r', encoding='utf-8') + sql = f.read() + f.close() + sql_con = get_sqlite_pool().get_tssqlcon() + sql_con.ExecManyProcNonQuery(sql) + + except Exception: + return False + + return True + + +def upgrade_to_1_2_102_3(): + # 鏈嶅姟绔崌绾у埌鐗堟湰1.2.102.3鏃讹紝绠$悊鍛樺悗鍙板拰鏅氱敤鎴峰悗鍙板悎骞朵簡锛屾暟鎹簱鐣ユ湁璋冩暣 + try: + sql_con = get_sqlite_pool().get_tssqlcon() + + # 濡傛灉瀛樺湪鍚嶄负 ts_sys_user 鐨勮〃锛岃鏄庢槸鏃х増鏈紝闇瑕佸崌绾 + str_sql = 'SELECT COUNT(*) FROM sqlite_master where type="table" and name="ts_sys_user";' + db_ret = sql_con.ExecProcQuery(str_sql) + if (db_ret[0][0] == 0): + return True + + log.v('upgrade database to version 1.2.102.3 ...\n') + bak_file = '{}.before-1.2.102.3'.format(db_file) + if not os.path.exists(bak_file): + shutil.copy(db_file, bak_file) + + # 灏嗗師鏉ョ殑鏅氱敤鎴风殑account_type浠 0 鏀逛负 1 + str_sql = 'UPDATE ts_account SET account_type=1 WHERE account_type=0;' + sql_con.ExecProcNonQuery(str_sql) + + # 灏嗗師鏉ョ殑绠$悊鍛樺悎骞跺埌鐢ㄦ埛璐﹀彿琛ㄤ腑 + str_sql = 'SELECT * FROM ts_sys_user;' + db_ret = sql_con.ExecProcQuery(str_sql) + if db_ret is None: + return True + + for i in range(len(db_ret)): + user_name = db_ret[i][1] + user_pwd = db_ret[i][2] + str_sql = 'INSERT INTO ts_account (account_type, account_name, account_pwd, account_status, ' \ + 'account_lock, account_desc) VALUES (100,"{}","{}",0,0,"{}");'.format(user_name, user_pwd, '瓒呯骇绠$悊鍛') + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not found super admin account.\n') + return False + + # 绉婚櫎鏃х殑琛紙鏆傛椂鏀瑰悕鑰屼笉鏄湡鐨勫垹闄わ級 + str_sql = 'ALTER TABLE ts_sys_user RENAME TO _bak_ts_sys_user;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_sys_user`.\n') + return False + + except: + return False + + return True + + +def upgrade_to_1_5_217_9(): + # 鏈嶅姟绔崌绾у埌鐗堟湰1.5.217.9鏃讹紝涓轰簡鏀寔涓鏈哄鐢ㄦ埛澶氬崗璁紝鏁版嵁搴撶粨鏋勬湁杈冨ぇ绋嬪害鏀瑰姩 + try: + sql_con = get_sqlite_pool().get_tssqlcon() + + # 濡傛灉涓嶅瓨鍦ㄥ悕涓 ts_host_info 鐨勮〃锛岃鏄庢槸鏃х増鏈紝闇瑕佸崌绾 + str_sql = 'SELECT COUNT(*) FROM sqlite_master where type="table" and name="ts_host_info";' + db_ret = sql_con.ExecProcQuery(str_sql) + if (db_ret[0][0] == 1): + return True + + log.v('upgrade database to version 1.5.217.9 ...\n') + bak_file = '{}.before-1.5.217.9'.format(db_file) + if not os.path.exists(bak_file): + shutil.copy(db_file, bak_file) + + # 灏嗗師鏉ョ殑 ts_auth 琛ㄤ腑澧炲姞涓涓瓧娈 + str_sql = 'ALTER TABLE ts_auth ADD host_auth_id INTEGER;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not modify table `ts_auth`.\n') + return False + + # 涓烘柊澧炲瓧娈佃繘琛岃祴鍊 + str_sql = 'UPDATE ts_auth SET host_auth_id=host_id;' + ret = sql_con.ExecProcNonQuery(str_sql) + # print(ret) + if not ret: + log.e('can not update table `ts_auth`.\n') + return False + + # 鏂板缓涓や釜琛紝鐢ㄤ簬鎷嗗垎鍘熸潵鐨 ts_host 琛 + str_sql = '''CREATE TABLE "ts_host_info" ( +"host_id" integer PRIMARY KEY AUTOINCREMENT, +"group_id" int(11) DEFAULT 0, +"host_sys_type" int(11) DEFAULT 1, +"host_ip" varchar(32) DEFAULT '', +"pro_port" varchar(256) NULL, +"host_lock" int(11) DEFAULT 0, +"host_desc" varchar(128) DEFAULT '' +);''' + + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not create table `ts_host_info`.\n') + return False + + str_sql = '''CREATE TABLE "ts_auth_info" ( +"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, +"host_id" INTEGER, +"pro_type" INTEGER, +"auth_mode" INTEGER, +"user_name" varchar(256), +"user_pswd" varchar(256), +"cert_id" INTEGER, +"encrypt" INTEGER, +"log_time" varchar(60) +);''' + + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not create table `ts_auth_info`.\n') + return False + + # 灏嗗師鏉ョ殑 ts_host 琛ㄦ敼鍚 + str_sql = 'ALTER TABLE ts_host RENAME TO _bak_ts_host;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_host`.\n') + return False + + # 浠庡師鏉 ts_host 琛ㄤ腑鏌ヨ鍑烘墍鏈夋暟鎹 + str_sql = 'SELECT * FROM _bak_ts_host;' + db_ret = sql_con.ExecProcQuery(str_sql) + if db_ret is not None: + for i in range(len(db_ret)): + host_id = db_ret[i][0] + group_id = db_ret[i][1] + host_sys_type = db_ret[i][2] + host_ip = db_ret[i][3] + host_pro_port = db_ret[i][4] + host_user_name = db_ret[i][5] + host_user_pwd = db_ret[i][6] + host_pro_type = db_ret[i][7] + cert_id = db_ret[i][8] + host_lock = db_ret[i][9] + host_encrypt = db_ret[i][10] + host_auth_mode = db_ret[i][11] + host_desc = db_ret[i][12] + + _pro_port = {} + _pro_port['ssh'] = {} + _pro_port['ssh']['enable'] = 0 + _pro_port['ssh']['port'] = 22 + _pro_port['rdp'] = {} + _pro_port['rdp']['enable'] = 0 + _pro_port['rdp']['port'] = 3389 + + if (host_pro_type == 1): + _pro_port['rdp']['enable'] = 1 + _pro_port['rdp']['port'] = host_pro_port + elif (host_pro_type == 2): + _pro_port['ssh']['enable'] = 1 + _pro_port['ssh']['port'] = host_pro_port + pro_port = json.dumps(_pro_port) + + str_sql = 'INSERT INTO ts_host_info (host_id, group_id, host_sys_type, host_ip, pro_port, host_lock, host_desc) ' \ + 'VALUES ({}, {}, {}, \'{}\', \'{}\', {}, \'{}\');'.format(host_id, group_id, host_sys_type, host_ip, pro_port, host_lock, host_desc) + # print(str_sql) + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not insert item into `ts_host_info`.\n') + return False + + str_sql = 'INSERT INTO ts_auth_info (host_id, pro_type, auth_mode, user_name, user_pswd, cert_id, encrypt, log_time) ' \ + 'VALUES ({}, {}, {}, \'{}\', \'{}\', {}, {}, \'{}\');'.format(host_id, host_pro_type, host_auth_mode, host_user_name, host_user_pwd, cert_id, host_encrypt, '1') + # print(str_sql) + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not insert item into `ts_auth_info`.\n') + return False + + str_sql = 'ALTER TABLE ts_log add protocol INTEGER;' + # print(str_sql) + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not upgrade database table `ts_log`.\n') + return False + + str_sql = 'UPDATE ts_log SET protocol=1 WHERE sys_type=1;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not fix database table `ts_log`.\n') + return False + + str_sql = 'UPDATE ts_log SET protocol=2 WHERE sys_type=2;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not fix database table `ts_log`.\n') + return False + + str_sql = 'UPDATE ts_log SET ret_code=9999 WHERE ret_code=0;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not fix database table `ts_log`.\n') + return False + + + except: + return False + + return True + + +# def upgrade_to_1_6_224_3(): +# # 鏈嶅姟绔崌绾у埌鐗堟湰1.6.224.3鏃讹紝鍔犲叆telnet鏀寔锛屾暟鎹簱鏈夎皟鏁 +# try: +# sql_con = get_sqlite_pool().get_tssqlcon() +# +# # # 濡傛灉ts_config琛ㄤ腑娌℃湁ts_server_telnet_port椤癸紝鍒欏鍔犻粯璁ゅ52389 +# # str_sql = 'SELECT * FROM ts_config WHERE name="ts_server_telnet_port";' +# # db_ret = sql_con.ExecProcQuery(str_sql) +# # if len(db_ret) == 0: +# # log.v('upgrade database to version 1.6.224.3 ...\n') +# # +# # str_sql = 'INSERT INTO ts_config (name, value) VALUES (\'ts_server_telnet_port\', \'52389\');' +# # db_ret = sql_con.ExecProcNonQuery(str_sql) +# # if not db_ret: +# # log.e('can not add telnet default port into `ts_config`.\n') +# # return False +# # +# +# # 濡傛灉ts_host_info琛ㄤ腑杩樻湁pro_port瀛楁锛岃鏄庢槸鏃х増鏈紝闇瑕佸鐞 +# str_sql = 'SELECT pro_port FROM ts_host_info LIMIT 0;' +# db_ret = sql_con.ExecProcQuery(str_sql) +# if db_ret is not None: +# # 鍙戠幇鏃х増鏈 +# +# log.v('upgrade database to version 1.6.224.3 ...\n') +# bak_file = '{}.before-1.6.224.3'.format(db_file) +# if not os.path.exists(bak_file): +# shutil.copy(db_file, bak_file) +# +# # 鍒犻櫎鎵鏈夌殑琛紝閲嶅缓鏂扮殑 +# # os.remove(db_file) +# str_sql = ''' +# ALTER TABLE ts_account RENAME TO __bak_ts_account; +# ALTER TABLE ts_auth RENAME TO __bak_ts_auth; +# ALTER TABLE ts_cert RENAME TO __bak_ts_cert; +# ALTER TABLE ts_config RENAME TO __bak_ts_config; +# ALTER TABLE ts_group RENAME TO __bak_ts_group; +# ALTER TABLE ts_host_info RENAME TO __bak_ts_host_info; +# ALTER TABLE ts_auth_info RENAME TO __bak_ts_auth_info; +# ALTER TABLE ts_log RENAME TO __bak_ts_log; +# ''' +# sql_con.ExecManyProcNonQuery(str_sql) +# +# return create_base_db() +# +# +# except: +# log.e('failed.\n') +# return False +# +# return True + +def upgrade_to_1_6_224_3(): + # 鏈嶅姟绔崌绾у埌鐗堟湰1.6.224.3鏃讹紝鍔犲叆telnet鏀寔锛屾暟鎹簱鏈夎皟鏁 + try: + sql_con = get_sqlite_pool().get_tssqlcon() + + # 濡傛灉ts_config琛ㄤ腑娌℃湁ts_server_telnet_port椤癸紝鍒欏鍔犻粯璁ゅ52389 + str_sql = 'SELECT * FROM ts_config WHERE name="ts_server_telnet_port";' + db_ret = sql_con.ExecProcQuery(str_sql) + if len(db_ret) == 0: + # log.v('upgrade database to version 1.6.224.3 ...\n') + + str_sql = 'INSERT INTO ts_config (name, value) VALUES (\'ts_server_telnet_port\', \'52389\');' + db_ret = sql_con.ExecProcNonQuery(str_sql) + if not db_ret: + log.e('can not add telnet default port into `ts_config`.\n') + return False + + # 濡傛灉ts_host_info琛ㄤ腑杩樻湁pro_port瀛楁锛岃鏄庢槸鏃х増鏈紝闇瑕佸鐞 + str_sql = 'SELECT pro_port FROM ts_host_info LIMIT 0;' + db_ret = sql_con.ExecProcQuery(str_sql) + if db_ret is None: + return True + + # 鍙戠幇鏃х増鏈 + log.v('upgrade database to version 1.6.224.3 ...\n') + bak_file = '{}.before-1.6.224.3'.format(db_file) + if not os.path.exists(bak_file): + shutil.copy(db_file, bak_file) + + str_sql = 'SELECT id, host_id, pro_type, auth_mode, user_name, user_pswd, cert_id, encrypt, log_time FROM ts_auth_info;' + auth_info_ret = sql_con.ExecProcQuery(str_sql) + # if auth_info_ret is not None: + # for i in range(len(auth_info_ret)): + # #host_id = db_ret[i][0] + # print(auth_info_ret[i]) + + str_sql = 'SELECT auth_id, account_name, host_id, host_auth_id FROM ts_auth;' + auth_ret = sql_con.ExecProcQuery(str_sql) + # if auth_ret is not None: + # for i in range(len(auth_ret)): + # #host_id = db_ret[i][0] + # print(auth_ret[i]) + + max_host_id = 0 + new_host_info = [] + new_auth_info = [] + new_auth = [] + + # 浠庡師鏉ョ殑琛ㄤ腑鏌ヨ鏁版嵁 + str_sql = 'SELECT host_id, group_id, host_sys_type, host_ip, pro_port, host_lock, host_desc FROM ts_host_info;' + host_info_ret = sql_con.ExecProcQuery(str_sql) + if host_info_ret is not None: + # 鍏堟壘鍑烘渶澶х殑host_id锛岃繖鏍峰鏋滆鎷嗗垎涓涓猦ost锛屽氨鐭ラ亾鏂扮殑host_id搴旇鏄灏戜簡 + for i in range(len(host_info_ret)): + # print(host_info_ret[i]) + #j = json.loads(host_info_ret[i][4]) + if host_info_ret[i][0] > max_host_id: + max_host_id = host_info_ret[i][0] + max_host_id += 1 + + # 鐒跺悗鏋勫缓鏂扮殑host鍒楄〃 + for i in range(len(host_info_ret)): + host_info = {} + host_info_alt = None + + protocol = json.loads(host_info_ret[i][4]) + host_info['host_id'] = host_info_ret[i][0] + host_info['group_id'] = host_info_ret[i][1] + host_info['host_sys_type'] = host_info_ret[i][2] + host_info['host_ip'] = host_info_ret[i][3] + host_info['host_lock'] = host_info_ret[i][5] + host_info['host_desc'] = host_info_ret[i][6] + host_info['_old_host_id'] = host_info_ret[i][0] + host_info['host_port'] = 0 + host_info['protocol'] = 0 + + have_rdp = False + have_ssh = False + if auth_info_ret is not None: + for j in range(len(auth_info_ret)): + if auth_info_ret[j][1] == host_info['host_id']: + if auth_info_ret[j][2] == 1: # 鐢ㄥ埌浜嗘涓绘満鐨凴DP + have_rdp = True + elif auth_info_ret[j][2] == 2: # 鐢ㄥ埌浜嗘涓绘満鐨凷SH + have_ssh = True + + if have_rdp and have_ssh: + # 闇瑕佹媶鍒 + host_info['protocol'] = 1 + host_info['host_port'] = protocol['rdp']['port'] + + host_info_alt = {} + host_info_alt['host_id'] = max_host_id + max_host_id += 1 + host_info_alt['group_id'] = host_info_ret[i][1] + host_info_alt['host_sys_type'] = host_info_ret[i][2] + host_info_alt['host_ip'] = host_info_ret[i][3] + host_info_alt['host_lock'] = host_info_ret[i][5] + host_info_alt['host_desc'] = host_info_ret[i][6] + host_info_alt['_old_host_id'] = host_info_ret[i][0] + host_info_alt['host_port'] = protocol['ssh']['port'] + host_info_alt['protocol'] = 2 + elif have_rdp: + host_info['protocol'] = 1 + host_info['host_port'] = protocol['rdp']['port'] + elif have_ssh: + host_info['host_port'] = protocol['ssh']['port'] + host_info['protocol'] = 2 + + new_host_info.append(host_info) + if host_info_alt is not None: + new_host_info.append(host_info_alt) + + # print('=====================================') + # for i in range(len(new_host_info)): + # print(new_host_info[i]) + + # 鐜板湪鏈変簡鏂扮殑ts_host_info琛紝閲嶆瀯ts_auth_info琛 + # 'SELECT id, host_id, pro_type, auth_mode, user_name, user_pswd, cert_id, encrypt, log_time FROM ts_auth_info;' + if auth_info_ret is not None: + for i in range(len(auth_info_ret)): + auth_info = {} + auth_info['id'] = auth_info_ret[i][0] + auth_info['auth_mode'] = auth_info_ret[i][3] + auth_info['user_name'] = auth_info_ret[i][4] + auth_info['user_pswd'] = auth_info_ret[i][5] + auth_info['cert_id'] = auth_info_ret[i][6] + auth_info['encrypt'] = auth_info_ret[i][7] + auth_info['log_time'] = auth_info_ret[i][8] + auth_info['user_param'] = 'ogin:\nassword:' + found = False + for j in range(len(new_host_info)): + if auth_info_ret[i][1] == new_host_info[j]['_old_host_id'] and auth_info_ret[i][2] == new_host_info[j]['protocol']: + found = True + auth_info['host_id'] = new_host_info[j]['host_id'] + auth_info['_old_host_id'] = new_host_info[j]['_old_host_id'] + break + if found: + new_auth_info.append(auth_info) + + # for i in range(len(new_auth_info)): + # print(new_auth_info[i]) + + # 鏈鍚庨噸鏋則s_auth琛 + if auth_ret is not None: + for i in range(len(auth_ret)): + auth = {} + auth['auth_id'] = auth_ret[i][0] + auth['account_name'] = auth_ret[i][1] + found = False + for j in range(len(new_auth_info)): + if auth_ret[i][2] == new_auth_info[j]['_old_host_id'] and auth_ret[i][3] == new_auth_info[j]['id']: + found = True + auth['host_id'] = new_auth_info[j]['host_id'] + auth['host_auth_id'] = new_auth_info[j]['id'] + break + if found: + new_auth.append(auth) + + # for i in range(len(new_auth)): + # print(new_auth[i]) + + # 灏嗘暣鐞嗗ソ鐨勬暟鎹啓鍏ユ柊鐨勪复鏃惰〃 + # 鍏堝垱寤轰笁涓复鏃惰〃 + str_sql = '''CREATE TABLE "ts_auth_tmp" ( + "auth_id" INTEGER PRIMARY KEY AUTOINCREMENT, + "account_name" varchar(256), + "host_id" INTEGER, + "host_auth_id" int(11) NOT NULL + );''' + + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not create table `ts_auth_tmp`.\n') + return False + + str_sql = '''CREATE TABLE "ts_host_info_tmp" ( + "host_id" integer PRIMARY KEY AUTOINCREMENT, + "group_id" int(11) DEFAULT 0, + "host_sys_type" int(11) DEFAULT 1, + "host_ip" varchar(32) DEFAULT '', + "host_port" int(11) DEFAULT 0, + "protocol" int(11) DEFAULT 0, + "host_lock" int(11) DEFAULT 0, + "host_desc" DEFAULT '' + );''' + + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not create table `ts_host_info_tmp`.\n') + return False + + str_sql = '''CREATE TABLE "ts_auth_info_tmp" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + "host_id" INTEGER, + "auth_mode" INTEGER, + "user_name" varchar(256), + "user_pswd" varchar(256), + "user_param" varchar(256), + "cert_id" INTEGER, + "encrypt" INTEGER, + "log_time" varchar(60) + );''' + + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not create table `ts_auth_info_tmp`.\n') + return False + + for i in range(len(new_host_info)): + str_sql = 'INSERT INTO ts_host_info_tmp (host_id, group_id, host_sys_type, host_ip, host_port, protocol, host_lock, host_desc) ' \ + 'VALUES ({}, {}, {}, \'{}\', {}, {}, {}, \'{}\');'.format( + new_host_info[i]['host_id'], new_host_info[i]['group_id'], new_host_info[i]['host_sys_type'], + new_host_info[i]['host_ip'], new_host_info[i]['host_port'], new_host_info[i]['protocol'], + new_host_info[i]['host_lock'], new_host_info[i]['host_desc'] + ) + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not insert item into `ts_host_info`.\n') + return False + + for i in range(len(new_auth_info)): + str_sql = 'INSERT INTO ts_auth_info_tmp (id, host_id, auth_mode, user_name, user_pswd, user_param, cert_id, encrypt, log_time) ' \ + 'VALUES ({}, {}, {}, \'{}\', \'{}\', \'{}\', {}, {}, \'{}\');'.format( + new_auth_info[i]['id'], new_auth_info[i]['host_id'], new_auth_info[i]['auth_mode'], + new_auth_info[i]['user_name'], new_auth_info[i]['user_pswd'], new_auth_info[i]['user_param'], + new_auth_info[i]['cert_id'], new_auth_info[i]['encrypt'], '1' + ) + # print(str_sql) + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not insert item into `ts_auth_info`.\n') + return False + + for i in range(len(new_auth)): + str_sql = 'INSERT INTO ts_auth_tmp (auth_id, account_name, host_id, host_auth_id) ' \ + 'VALUES ({}, \'{}\', {}, {});'.format( + new_auth[i]['auth_id'], new_auth[i]['account_name'], new_auth[i]['host_id'], new_auth[i]['host_auth_id'] + ) + # print(str_sql) + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not insert item into `ts_auth`.\n') + return False + + # 琛ㄦ敼鍚 + str_sql = 'ALTER TABLE ts_auth RENAME TO __bak_ts_auth;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_auth` to `__bak_ts_auth`.\n') + return False + + str_sql = 'ALTER TABLE ts_auth_info RENAME TO __bak_ts_auth_info;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_auth_info` to `__bak_ts_auth_info`.\n') + return False + + str_sql = 'ALTER TABLE ts_host_info RENAME TO __bak_ts_host_info;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_host_info` to `__bak_ts_host_info`.\n') + return False + + str_sql = 'ALTER TABLE ts_auth_tmp RENAME TO ts_auth;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_auth_tmp` to `ts_auth`.\n') + return False + + str_sql = 'ALTER TABLE ts_auth_info_tmp RENAME TO ts_auth_info;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_auth_info_tmp` to `ts_auth_info`.\n') + return False + + str_sql = 'ALTER TABLE ts_host_info_tmp RENAME TO ts_host_info;' + ret = sql_con.ExecProcNonQuery(str_sql) + if not ret: + log.e('can not rename table `ts_host_info_tmp` to `ts_host_info`.\n') + return False + + + except: + log.e('failed.\n') + return False + + return True + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/server/www/teleport/app/eom_ver.py b/server/www/teleport/app/eom_ver.py new file mode 100644 index 0000000..5d6f265 --- /dev/null +++ b/server/www/teleport/app/eom_ver.py @@ -0,0 +1,4 @@ +# -*- coding: utf8 -*- +TS_VER = "1.6.225.1" +TP_ASSIST_LAST_VER = "1.5.46.5" +TP_ASSIST_REQUIRE = "1.5.46.5" diff --git a/server/www/teleport/res/fonts/001.ttf b/server/www/teleport/res/fonts/001.ttf new file mode 100644 index 0000000..c034793 Binary files /dev/null and b/server/www/teleport/res/fonts/001.ttf differ diff --git a/server/www/teleport/static/css/auth.css b/server/www/teleport/static/css/auth.css new file mode 100644 index 0000000..ab14080 --- /dev/null +++ b/server/www/teleport/static/css/auth.css @@ -0,0 +1 @@ +@charset "utf-8";body{padding-top:70px;padding-bottom:24px;background-color:#ececed}#head nav.navbar{height:70px;line-height:70px;background-color:#333;color:#fff}#head .logo .desc{display:block;float:right;color:#ccc;margin-top:10px;font-size:18px}#foot nav.navbar{min-height:24px;height:24px;line-height:24px;background-color:#ddd;color:#fff;font-size:12px;border-top:1px solid #ccc}#foot nav.navbar .container{height:24px}#foot nav.navbar p{margin:0 auto;text-align:center;color:#333}#content{margin:10px 0 50px 0}.auth-box{margin-top:30px;min-height:120px;border:1px solid #ccc;border-radius:8px;background-color:rgba(255,255,255,0.6)}.auth-box .header{min-height:50px;height:50px;border:none;box-shadow:none;border-bottom:1px solid #ccc}.auth-box .header .title{display:inline-block;float:left;margin-left:60px;height:24px;margin-top:25px;line-height:16px;font-size:20px;color:#999}.auth-box .header .selected{border-bottom:1px solid #69c;color:#555}.auth-box .header .title:hover{border-bottom:1px solid #999}.auth-box .inputarea{margin:30px}.auth-box .inputarea .input-group-addon{padding:0 5px 0 5px}.auth-box .inputarea p.input-addon-desc{text-align:right;padding:0 5px 0 5px;color:#999}#leftside{width:560px;height:560px;padding-top:60px;background:url(../img/login/side-001.jpg) 0 0 no-repeat}#leftside h1{font-size:24px;color:#888}#leftside p{font-size:18px;color:#888;padding-left:24px}.auth-box .inputbox{margin-bottom:10px}.auth-box-lg .inputbox{margin-bottom:20px}.auth-box .op_box{display:block;padding:5px;border-radius:3px;text-align:center;margin:5px 20px 10px 20px}.auth-box .op_error{background:#fbb}.auth-box .op_wait{background:#ccc}.auth-box .quick-area{padding:80px 0 80px 0}.auth-box .quick-area .quick-disc{text-align:center;margin-bottom:20px}.auth-box .quick-area .quick-no{padding-top:80px;padding-bottom:100px}.auth-box .quick-area .quick-yes{text-align:center}.auth-box .quick-area .quick-yes .quick-account{display:inline-block;margin:auto;margin-bottom:20px}.auth-box .quick-area .quick-yes .quick-account:hover .quick-image{box-shadow:0 0 8px #00c2f6}.auth-box .quick-area .quick-yes .quick-image{display:block;width:82px;height:82px;line-height:80px;font-size:64px;margin:auto;border:1px solid #a4cdf6;box-shadow:0 0 6px #a7d1fb}.auth-box .quick-area .quick-yes .quick-name{display:block;margin-top:5px} \ No newline at end of file diff --git a/server/www/teleport/static/css/doc.css b/server/www/teleport/static/css/doc.css new file mode 100644 index 0000000..69d39d3 --- /dev/null +++ b/server/www/teleport/static/css/doc.css @@ -0,0 +1 @@ +@charset "utf-8";body{font-family:"Open Sans","Helvetica Neue","Microsoft YaHei","寰蒋闆呴粦",Helvetica,Arial,sans-serif;font-size:13px;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-group-sm>.btn,.btn-sm{padding:2px 5px}.btn.btn-sm{padding:3px 8px}.btn.btn-icon{padding:3px 6px}.btn.btn-icon.btn-sm{padding:0;font-size:14px;height:24px;width:24px;line-height:24px;border-radius:0}.form-group-sm .input-group .input-group-btn>.btn{height:30px;padding:0 8px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{display:inline-block;min-width:8px;padding:5px 10px;border-radius:10px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.badge.badge-plain{text-shadow:none}.badge.badge-sm{font-size:11px;padding:3px 6px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-6px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.label{display:inline-block;min-width:8px;padding:5px 10px;border-radius:5px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.label.label-plain{text-shadow:none}.label.label-sm{font-size:11px;padding:3px 8px;margin-top:0;border-radius:5px;text-shadow:none}.label.label-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.alert-sm{padding:5px;margin-bottom:10px}.modal-dialog-sm .modal-header{padding:10px}.modal-dialog-sm .modal-body{padding:10px}.modal-dialog-sm .modal-footer{padding:10px}.modal-dialog-sm .form-horizontal .form-group{margin-right:-5px;margin-left:-5px}.modal-dialog-sm .col-sm-1,.modal-dialog-sm .col-sm-2,.modal-dialog-sm .col-sm-3,.modal-dialog-sm .col-sm-4,.modal-dialog-sm .col-sm-5,.modal-dialog-sm .col-sm-6,.modal-dialog-sm .col-sm-7,.modal-dialog-sm .col-sm-8,.modal-dialog-sm .col-sm-9,.modal-dialog-sm .col-sm-10,.modal-dialog-sm .col-sm-11{padding-right:5px;padding-left:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px;min-width:390px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.form-group-sm .form-control-static{padding:6px 0}.page-header-fixed{padding-top:48px}.header{border:none;min-height:48px;height:48px;top:0;width:100%;position:fixed;z-index:999}.header .top-navbar{min-height:48px;height:48px;line-height:48px;background-color:#3a3a3a;color:#ccc}.header .top-navbar a{color:#d5d5d5}.header .top-navbar a:hover{color:#5a8fee}.header .top-navbar .brand{float:left;display:inline-block;padding:12px 0;margin:0}.header .top-navbar .brand .site-logo{display:block;width:86px;height:24px;background:url(../img/site-logo-small.png) no-repeat}.header .top-navbar .title-container{float:left;display:inline-block;margin:0;padding:0}.header .top-navbar .title-container .title{font-size:18px}.header .top-navbar .status-container{float:right}.footer{width:100%;height:36px;line-height:36px;background-color:#d5d5d5;border-top:1px solid #a2a2a2;border-bottom:1px solid #efefef;z-index:998;text-align:center}.footer.footer-fixed-bottom{bottom:0;position:fixed}.row-sm .col-sm-1,.row-sm .col-sm-2,.row-sm .col-sm-3,.row-sm .col-sm-4,.row-sm .col-sm-5,.row-sm .col-sm-6,.row-sm .col-sm-7,.row-sm .col-sm-8,.row-sm .col-sm-9,.row-sm .col-sm-10,.row-sm .col-sm-11{padding-right:5px;padding-left:5px}.sidebar{background-color:#fff;width:285px;position:fixed}.search-box{padding:10px;border-bottom:1px solid #eee;margin-bottom:10px}.tree-view{overflow-x:auto;overflow-y:auto;padding:0 10px}.content{margin-top:15px;margin-bottom:56px;min-height:360px;background-color:#fff;border-radius:5px;padding:10px;margin-left:300px}.jstree-ocl{cursor:default}.jstree-default .fa-folder:before{color:#f59c1a}.jstree-default .jstree-wholerow{cursor:default}.jstree-default>ul>li{padding:4px 0}.jstree-default .jstree-node{margin-left:12px}.jstree-default .jstree-open>.jstree-anchor>.fa-folder:before{content:'\f07c';color:#a26307}.jstree-default>.jstree-no-dots .jstree-closed>.jstree-ocl,.jstree-default>.jstree-no-dots .jstree-open>.jstree-ocl{background:none}.jstree-default>.jstree-no-dots .jstree-closed>.jstree-ocl:before,.jstree-default>.jstree-no-dots .jstree-open>.jstree-ocl:before{font-style:normal;font-family:"FontAwesome"}.jstree-default>.jstree-no-dots .jstree-closed>.jstree-ocl:before{content:'\f054';color:#ccc}.jstree-default>.jstree-no-dots .jstree-open>.jstree-ocl:before{content:'\f078';color:#666}.jstree-default>.jstree-no-dots .jstree-loading>.jstree-ocl{background:url("img/loading_01.gif") center center no-repeat}.jstree-default>.jstree-no-dots .jstree-loading>.jstree-ocl:before{content:''}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important} \ No newline at end of file diff --git a/server/www/teleport/static/css/img/desktop/icons-tree-24x24.png b/server/www/teleport/static/css/img/desktop/icons-tree-24x24.png new file mode 100644 index 0000000..25cea67 Binary files /dev/null and b/server/www/teleport/static/css/img/desktop/icons-tree-24x24.png differ diff --git a/server/www/teleport/static/css/img/desktop/mimetype-16.png b/server/www/teleport/static/css/img/desktop/mimetype-16.png new file mode 100644 index 0000000..74e4469 Binary files /dev/null and b/server/www/teleport/static/css/img/desktop/mimetype-16.png differ diff --git a/server/www/teleport/static/css/img/loading_01.gif b/server/www/teleport/static/css/img/loading_01.gif new file mode 100644 index 0000000..e8c2892 Binary files /dev/null and b/server/www/teleport/static/css/img/loading_01.gif differ diff --git a/server/www/teleport/static/css/main.css b/server/www/teleport/static/css/main.css new file mode 100644 index 0000000..0aed86f --- /dev/null +++ b/server/www/teleport/static/css/main.css @@ -0,0 +1 @@ +@charset "utf-8";body{font-family:"Open Sans","Helvetica Neue","Microsoft YaHei","寰蒋闆呴粦",Helvetica,Arial,sans-serif;font-size:13px;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-group-sm>.btn,.btn-sm{padding:2px 5px}.btn.btn-sm{padding:3px 8px}.btn.btn-icon{padding:3px 6px}.btn.btn-icon.btn-sm{padding:0;font-size:14px;height:24px;width:24px;line-height:24px;border-radius:0}.form-group-sm .input-group .input-group-btn>.btn{height:30px;padding:0 8px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{display:inline-block;min-width:8px;padding:5px 10px;border-radius:10px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.badge.badge-plain{text-shadow:none}.badge.badge-sm{font-size:11px;padding:3px 6px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-6px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.label{display:inline-block;min-width:8px;padding:5px 10px;border-radius:5px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.label.label-plain{text-shadow:none}.label.label-sm{font-size:11px;padding:3px 8px;margin-top:0;border-radius:5px;text-shadow:none}.label.label-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.alert-sm{padding:5px;margin-bottom:10px}.modal-dialog-sm .modal-header{padding:10px}.modal-dialog-sm .modal-body{padding:10px}.modal-dialog-sm .modal-footer{padding:10px}.modal-dialog-sm .form-horizontal .form-group{margin-right:-5px;margin-left:-5px}.modal-dialog-sm .col-sm-1,.modal-dialog-sm .col-sm-2,.modal-dialog-sm .col-sm-3,.modal-dialog-sm .col-sm-4,.modal-dialog-sm .col-sm-5,.modal-dialog-sm .col-sm-6,.modal-dialog-sm .col-sm-7,.modal-dialog-sm .col-sm-8,.modal-dialog-sm .col-sm-9,.modal-dialog-sm .col-sm-10,.modal-dialog-sm .col-sm-11{padding-right:5px;padding-left:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px;min-width:390px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.form-group-sm .form-control-static{padding:6px 0}.table{margin-bottom:10px}.table>thead>tr>th{padding:5px 5px;outline:none;white-space:nowrap;font-weight:normal;text-align:center;background-color:#ededed}.table>tbody>tr>td{padding:5px;text-align:center;vertical-align:middle}.table>tbody>tr>td .nowrap{white-space:nowrap}.table.table-data thead .sorting,.table.table-data thead .sorting_asc,.table.table-data thead .sorting_desc{cursor:pointer;position:relative}.table.table-data thead .sorting>span:after,.table.table-data thead .sorting_asc>span:after,.table.table-data thead .sorting_desc>span:after{bottom:4px;padding-left:5px;display:inline-block;font-family:'FontAwesome';opacity:.8}.table.table-data thead .sorting>span:after{opacity:.2;content:"\f0dc"}.table.table-data thead .sorting_asc>span:after{content:"\f0de"}.table.table-data thead .sorting_desc>span:after{content:"\f0dd"}.host-id{display:block;font-size:16px;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;color:#333}.host-id.not-active{font-size:14px;font-weight:400;color:#999}.host-desc{font-size:12px;color:#999;display:inline-block;white-space:nowrap;width:160px;overflow:hidden;text-overflow:ellipsis}a.host-desc:hover:before{display:inline-block;padding-right:3px;line-height:12px;content:"\f040";font-family:'FontAwesome'}.td-ip-list{padding-right:20px;padding-left:5px}.td-ip-show-more{font-size:14px;width:12px;float:right;display:block}.td-ip-item{min-width:12em;width:12em;height:18px;padding:2px 4px;margin:1px 0;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.td-ip-item span{display:inline-block;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.td-ip-item a{display:inline-block;width:14px;float:right;font-size:14px}.admin{background-color:#930;color:#fff;padding:5px 15px;border-radius:5px}.page-header-fixed{padding-top:48px}.header{border:none;box-shadow:0 0 3px rgba(0,0,0,0.5)}.header .container-fluid{padding-left:0}.header .breadcrumb-container{display:inline-block;padding-top:6px}.header .breadcrumb{padding-left:20px;font-size:16px}.header.navbar{min-height:48px;height:48px;margin:0}.header.navbar .brand{display:inline-block;float:left;width:180px;height:48px;padding:12px 0 0;text-align:center;margin:0 auto;background-color:#3a3a3a}.header.navbar .brand .navbar-logo{display:inline-block;width:93px;height:30px;background:url(../img/site-logo-small.png) no-repeat}.header.navbar .breadcrumb>li+li:before{font-size:18px;padding:0 5px;color:#ccc;content:"\f105";font-family:'FontAwesome'}.page-sidebar-fixed .sidebar{position:fixed}.sidebar{top:0;bottom:0;left:0;width:180px;padding-top:48px;z-index:1010;background-color:#3a3a3a}.sidebar .nav-menu>li>a{padding:8px 0 8px 20px;line-height:24px;font-size:13px;color:#c2c2c2;border-left:5px solid #3a3a3a}.sidebar .nav-menu>li>a:focus{background-color:#3a3a3a;border-left:5px solid #3a3a3a}.sidebar .nav-menu>li>a:hover{background-color:#2d2d2d;border-left:5px solid #005c74}.sidebar .nav-menu>li>a.active{color:#fff;background-color:#0084a7;border-left:5px solid #0084a7}.sidebar .nav-menu>li>a.active:hover{border-left:5px solid #00acda}.sidebar .nav-menu>li>a.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:relative;top:1px;display:inline-block;font-style:normal;font-weight:400;float:right;color:#e9e9e9;font-size:20px;line-height:24px;margin-right:-6px}.sidebar .nav-menu li .menu-caret:after{display:inline-block;width:12px;height:12px;margin-left:5px;top:1px;position:relative;border:none;font-family:'FontAwesome';font-style:normal}.sidebar .nav-menu li .menu-caret:after{content:'\f0da'}.sidebar .nav-menu li.expand .menu-caret:after{content:'\f0d7'}.sidebar .nav-menu>li.super-admin>a:hover{background-color:#620;border-left:5px solid #4d1a00}.sidebar .nav-menu>li.super-admin>a.active{background-color:#930;border-left:5px solid #930}.sidebar .nav-menu>li.super-admin>a.active:hover{border-left:5px solid #c40}.sidebar .nav-menu>li>a>i.icon{float:left;margin-top:1px;margin-right:15px;text-align:center;line-height:24px;font-size:14px}.sidebar .sub-menu{padding:0;margin:0;background-color:#292929;position:relative;list-style-type:none;border-top:1px solid #202020;border-bottom:1px solid #464646}.sidebar .sub-menu>li>a{padding:8px 0 8px 40px;line-height:20px;font-size:13px;display:block;position:relative;color:#889097;border-left:5px solid #292929}.sidebar .sub-menu>li>a:before{display:inline-block;padding-right:8px;line-height:20px;content:"\f105";font-family:'FontAwesome'}.sidebar .sub-menu>li>a:hover{color:#fff;border-left:5px solid #005c74}.sidebar .sub-menu>li>a.active{color:#fff;background-color:#0084a7;border-left:5px solid #0084a7}.sidebar .sub-menu>li>a.active:hover{border-left:5px solid #00acda}.sidebar .sub-menu>li>a.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:relative;top:-2px;display:inline-block;font-style:normal;font-weight:400;float:right;color:#e9e9e9;font-size:20px;line-height:24px;margin-right:-6px}.sidebar .nav-profile{padding:15px 10px;color:#ccc;background-color:#333;border-bottom:1px solid #464646}.sidebar .nav-profile a.title{color:#ccc}.sidebar .nav-profile a.title:hover{color:#fff;background-color:transparent}.sidebar .nav-profile a.title:focus{background-color:transparent}.sidebar .nav-profile .image{float:left;margin-top:3px;font-size:24px;color:#69f;width:36px;height:36px;border-radius:5px;background-color:#eee;text-align:center;margin-right:10px;overflow:hidden}.sidebar .nav-profile .image img{margin-top:-3px}.sidebar .nav-profile .name{display:block;font-size:16px}.sidebar .nav-profile .role{display:block;font-size:12px;color:#999}.sidebar .nav-profile .dropdown-menu{font-size:13px}.sidebar .nav-profile .dropdown-menu>li>a{padding:5px 20px}.sidebar .nav-profile .dropdown-menu>li>a:hover{background-color:#ccc}.sidebar .nav-profile .dropdown-menu .divider{margin:5px 0}.sidebar .badge{margin-top:-10px;margin-left:5px}.content{margin-left:180px}.page-content{padding:15px}.page-content-dashboard{padding:20px 25px}.widget{overflow:hidden;border-radius:3px;padding:15px;margin-bottom:20px;color:#fff}.widget.widget-stats{position:relative}.widget .stats-icon{font-size:52px;top:12px;right:21px;width:56px;height:56px;text-align:center;line-height:56px;margin-left:15px;color:#fff;position:absolute;opacity:.2}.widget .stats-title{color:#fff;color:rgba(255,255,255,0.6)}.widget .stats-split{height:2px;margin:0 -15px 10px;background:rgba(0,0,0,0.2)}.widget .stats-content{font-size:24px;font-weight:300;margin-bottom:10px}.widget .stats-desc{display:inline-block;color:#fff;color:rgba(255,255,255,0.6)}.widget .stats-action{display:inline-block;float:right}.widget a{color:#eee;color:rgba(255,255,255,0.7)}.widget a:hover{color:#fff}.widget.widget-info{background-color:#33b7d0}.widget.widget-primary{background-color:#348fe2}.widget.widget-success{background-color:#368142}.widget.widget-warning{background-color:#f57523}.widget.widget-danger{background-color:#d34242}.panel{border:none;box-shadow:none;border-radius:3px}.panel .panel-heading{padding:6px 15px;color:#fff}.panel .panel-heading .panel-title{font-size:14px}.panel .panel-heading .panel-heading-btn{float:right}.panel .panel-heading .panel-heading-btn .btn{display:inline-block;padding:0;border:none;text-align:center}.panel .panel-heading .panel-heading-btn .btn.btn-xs{width:18px;height:18px;line-height:18px;font-size:12px}.panel .panel-heading .panel-heading-btn .btn.btn-circle{border-radius:50%}.place-holder-h200{width:100%;height:300px;background-color:#eee;border:1px solid #ccc;line-height:200px;text-align:center}.dashboard-panel2-holder{width:100%;height:1150px;background-color:#eee;border:1px solid #ccc;line-height:200px;text-align:center}.dashboard-panel-time{color:#cecece}.box{border:none;box-shadow:none;border-radius:3px;background-color:#fff;padding:15px;margin-bottom:15px}.box-fluid{border:none;box-shadow:none;border-radius:3px;margin-bottom:15px}.box.box-sm,.box-fluid.box-sm{padding:5px 15px}.box .box-title,.box-fluid .box-title{margin-bottom:10px}.box .box-title .title,.box-fluid .box-title .title{display:inline-block;font-size:18px;color:#333;height:30px;line-height:30px}.box .box-title .btn-sm,.box-fluid .box-title .btn-sm{padding:3px 8px;margin-top:-5px}.box .nav-tabs,.box-fluid .nav-tabs{font-size:14px;font-weight:bold}.box .nav-tabs>li:first-child,.box-fluid .nav-tabs>li:first-child{margin-left:50px}.box .tab-content>.tab-pane,.box-fluid .tab-content>.tab-pane{background-color:#fff;padding:20px;border:1px solid #ddd;border-top:none;border-bottom-left-radius:3px;border-bottom-right-radius:3px}.box-license{line-height:30px}.box-license .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.box-btn-bar{line-height:30px}.box-btn-bar a.btn{margin-right:20px}.page-nav{height:30px;line-height:30px}.page-nav .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.page-nav .pagination{margin:0 0}.page-nav .btn{margin-top:-3px}.page-filter{height:36px;line-height:36px;margin-bottom:10px}.page-filter .form-control{margin-top:5px;margin-right:4px}.btn.btn-sm .dropdown-menu li a{font-size:11px}.invite{text-align:center;padding-bottom:20px}.invite .code{color:#2f3991;font-size:36px;font-weight:700;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}.invite .link{padding:5px;color:#2f3991;font-size:13px;font-weight:700;background-color:#eee;border-radius:5px}.invite-send-box{width:300px;margin:0 auto}.form-group .input-group{margin-bottom:5px}.op_box{display:block;padding:5px;border-radius:3px;text-align:center;margin-top:5px}.op_error{background:#fbb}.op_wait{background:#ccc}.table-data td.loading{text-align:left;padding:20px}.table-data .btn-group.open .dropdown-toggle{-webkit-box-shadow:none;box-shadow:none}.more-action{position:absolute !important}.more-action .dropdown-menu{background-color:rgba(60,60,60,0.9);color:#fff;font-size:13px}.more-action .dropdown-menu.dropdown-menu-left{margin-left:-120px}.more-action .dropdown-menu>li>a{padding:5px 20px;color:#fff}.more-action .dropdown-menu>li>a:hover,.more-action .dropdown-menu>li>a:active,.more-action .dropdown-menu>li>a:visited{background-color:#0084a7}.more-action .dropdown-menu .divider{margin:5px 0;background-color:#666}.popover-inline-edit input,.popover-inline-edit .btn{height:30px}.popover-inline-edit .popover-title{background-color:#ddd}.popover-inline-edit .popover-content{padding:20px 10px}.popover-inline-edit .popover{padding:0}.popover-inline-edit .popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#ddd}.user-info-table{font-size:14px}.user-info-table tbody>tr>td{padding:8px}.user-info-table .user-field{min-width:100px;width:100px;color:#999;text-align:right}.user-info-table .user-value{color:#333;font-weight:bold}.user-info-table .user-value a{font-weight:normal}.breadcrumb.breadcrumb-trans{background-color:transparent}.biz-box{display:inline-block;width:20%;max-width:20%}.biz-box .bb-inner{background-color:#368142;margin:3px;border-radius:4px}.biz-box .bb-name{color:#fff;padding:9px;margin-bottom:3px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;text-align:center;padding-top:1px;padding-bottom:1px}.biz-box .bb-ver{font-size:11px;height:16px;text-align:center;padding:0 5px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;border-bottom-left-radius:4px;border-bottom-right-radius:4px;color:rgba(255,255,255,0.85);background-color:rgba(0,0,0,0.3)}.biz-box.mp-disabled .mp-inner{background-color:#e5e5e5}.biz-box.mp-disabled .mp-name{color:#999}.biz-box.mp-success .mp-inner{background-color:#368142}.biz-box.mp-success .mp-name{color:#fff}.biz-box.mp-danger .mp-inner{background-color:#d34242}.biz-box.mp-danger .mp-name{color:#fff}.biz-box.mp-warning .mp-inner{background-color:#f57523}.biz-box.mp-warning .mp-name{color:#fff}textarea.textarea-resize-y{resize:vertical}textarea.textarea-resize-none{resize:none}textarea.textarea-code{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}textarea.cert_pub{width:100%;height:64px;border:1px solid #e2e2e2;background-color:#e4ffe5}.icon{display:inline-block}.icon16{width:16px;height:16px;line-height:16px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/mimetype-16.png") !important}.icon16.icon-disk{background-position:0 0 !important}.icon16.icon-folder{background-position:-16px 0 !important}.icon16.icon-file{background-position:0 -16px !important}.icon16.icon-txt{background-position:-16px -16px !important}.icon16.icon-help{background-position:-32px -16px !important}.icon16.icon-sys{background-position:-48px -16px !important}.icon16.icon-exe{background-position:-64px -16px !important}.icon16.icon-office{background-position:0 -32px !important}.icon16.icon-word{background-position:-16px -32px !important}.icon16.icon-excel{background-position:-32px -32px !important}.icon16.icon-ppt{background-position:-48px -32px !important}.icon16.icon-access{background-position:-64px -32px !important}.icon16.icon-visio{background-position:-80px -32px !important}.icon16.icon-audio{background-position:0 -48px !important}.icon16.icon-video{background-position:-16px -48px !important}.icon16.icon-pic{background-position:-32px -48px !important}.icon16.icon-pdf{background-position:-48px -48px !important}.icon16.icon-font{background-position:-64px -48px !important}.icon16.icon-script{background-position:0 -64px !important}.icon16.icon-html{background-position:-16px -64px !important}.icon16.icon-py{background-position:-32px -64px !important}.icon16.icon-h{background-position:-48px -64px !important}.icon16.icon-c{background-position:-64px -64px !important}.icon16.icon-cpp{background-position:-80px -64px !important}.icon16.icon-cs{background-position:-96px -64px !important}.icon16.icon-php{background-position:-112px -64px !important}.icon16.icon-ruby{background-position:-128px -64px !important}.icon16.icon-java{background-position:-144px -64px !important}.icon16.icon-vs{background-position:-160px -64px !important}.icon16.icon-js{background-position:-176px -64px !important}.icon16.icon-archive{background-position:0 -80px !important}.icon16.icon-rar{background-position:-16px -80px !important}.icon16.icon-zip{background-position:-32px -80px !important}.icon16.icon-7z{background-position:-48px -80px !important}.icon16.icon-tar{background-position:-64px -80px !important}.icon16.icon-gz{background-position:-80px -80px !important}.icon16.icon-jar{background-position:-96px -80px !important}.icon16.icon-bz2{background-position:-112px -80px !important}.icon24{width:24px;height:24px;line-height:24px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/icons-tree-24x24.png") !important}.icon24.icon-disk{background-position:0 0 !important}.icon24.icon-folder{background-position:-24px 0 !important}.icon24.icon-folder-open{background-position:-48px 0 !important}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important} \ No newline at end of file diff --git a/server/www/teleport/static/css/sub.css b/server/www/teleport/static/css/sub.css new file mode 100644 index 0000000..08a9bdf --- /dev/null +++ b/server/www/teleport/static/css/sub.css @@ -0,0 +1 @@ +@charset "utf-8";body{font-family:"Open Sans","Helvetica Neue","Microsoft YaHei","寰蒋闆呴粦",Helvetica,Arial,sans-serif;font-size:13px;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-group-sm>.btn,.btn-sm{padding:2px 5px}.btn.btn-sm{padding:3px 8px}.btn.btn-icon{padding:3px 6px}.btn.btn-icon.btn-sm{padding:0;font-size:14px;height:24px;width:24px;line-height:24px;border-radius:0}.form-group-sm .input-group .input-group-btn>.btn{height:30px;padding:0 8px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{display:inline-block;min-width:8px;padding:5px 10px;border-radius:10px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.badge.badge-plain{text-shadow:none}.badge.badge-sm{font-size:11px;padding:3px 6px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-6px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.label{display:inline-block;min-width:8px;padding:5px 10px;border-radius:5px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.label.label-plain{text-shadow:none}.label.label-sm{font-size:11px;padding:3px 8px;margin-top:0;border-radius:5px;text-shadow:none}.label.label-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.alert-sm{padding:5px;margin-bottom:10px}.modal-dialog-sm .modal-header{padding:10px}.modal-dialog-sm .modal-body{padding:10px}.modal-dialog-sm .modal-footer{padding:10px}.modal-dialog-sm .form-horizontal .form-group{margin-right:-5px;margin-left:-5px}.modal-dialog-sm .col-sm-1,.modal-dialog-sm .col-sm-2,.modal-dialog-sm .col-sm-3,.modal-dialog-sm .col-sm-4,.modal-dialog-sm .col-sm-5,.modal-dialog-sm .col-sm-6,.modal-dialog-sm .col-sm-7,.modal-dialog-sm .col-sm-8,.modal-dialog-sm .col-sm-9,.modal-dialog-sm .col-sm-10,.modal-dialog-sm .col-sm-11{padding-right:5px;padding-left:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px;min-width:390px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.form-group-sm .form-control-static{padding:6px 0}.table{margin-bottom:10px}.table>thead>tr>th{padding:5px 5px;outline:none;white-space:nowrap;font-weight:normal;text-align:center;background-color:#ededed}.table>tbody>tr>td{padding:5px;text-align:center;vertical-align:middle}.table>tbody>tr>td .nowrap{white-space:nowrap}.table.table-data thead .sorting,.table.table-data thead .sorting_asc,.table.table-data thead .sorting_desc{cursor:pointer;position:relative}.table.table-data thead .sorting>span:after,.table.table-data thead .sorting_asc>span:after,.table.table-data thead .sorting_desc>span:after{bottom:4px;padding-left:5px;display:inline-block;font-family:'FontAwesome';opacity:.8}.table.table-data thead .sorting>span:after{opacity:.2;content:"\f0dc"}.table.table-data thead .sorting_asc>span:after{content:"\f0de"}.table.table-data thead .sorting_desc>span:after{content:"\f0dd"}.host-id{display:block;font-size:16px;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;color:#333}.host-id.not-active{font-size:14px;font-weight:400;color:#999}.host-desc{font-size:12px;color:#999;display:inline-block;white-space:nowrap;width:160px;overflow:hidden;text-overflow:ellipsis}a.host-desc:hover:before{display:inline-block;padding-right:3px;line-height:12px;content:"\f040";font-family:'FontAwesome'}.td-ip-list{padding-right:20px;padding-left:5px}.td-ip-show-more{font-size:14px;width:12px;float:right;display:block}.td-ip-item{min-width:12em;width:12em;height:18px;padding:2px 4px;margin:1px 0;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.td-ip-item span{display:inline-block;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.td-ip-item a{display:inline-block;width:14px;float:right;font-size:14px}.page-header-fixed{padding-top:48px}.header{border:none;box-shadow:0 0 3px rgba(0,0,0,0.5);min-height:48px;height:48px;top:0;width:100%;position:fixed;z-index:999}.header .top-navbar{min-height:48px;height:48px;line-height:48px;background-color:#3a3a3a;color:#ccc}.header .top-navbar a{color:#d5d5d5}.header .top-navbar a:hover{color:#5a8fee}.header .top-navbar .brand{float:left;display:inline-block;padding:12px 0;margin:0}.header .top-navbar .brand .site-logo{display:block;width:86px;height:24px;background:url(../img/site-logo-small.png) no-repeat}.header .top-navbar .title-container{float:left;display:inline-block;margin:0;padding:0;margin-left:20px}.header .top-navbar .title-container .title{font-size:16px}.header .top-navbar .breadcrumb-container{float:left;display:inline-block;margin:0;padding:0}.header .top-navbar .breadcrumb-container .breadcrumb{background-color:#3a3a3a;height:48px;margin:0;border-radius:0;border:none;padding:0 0 0 20px;font-size:16px;color:#ccc}.header .top-navbar .breadcrumb-container .breadcrumb>li+li:before{font-size:18px;padding:0 5px;color:#555;content:'|'}.header .top-navbar .breadcrumb-container .breadcrumb .title{font-size:18px}.header .top-navbar .breadcrumb-container .breadcrumb .sub-title{font-size:14px;color:#b3b3b3}.header .top-navbar .status-container{float:right}.page-content{margin-top:10px;margin-bottom:44px}.footer{width:100%;height:24px;line-height:24px;background-color:#d5d5d5;border-top:1px solid #a2a2a2;border-bottom:1px solid #efefef;z-index:998;text-align:center;font-size:12px}.footer.footer-fixed-bottom{bottom:0;position:fixed}.row-sm .col-sm-1,.row-sm .col-sm-2,.row-sm .col-sm-3,.row-sm .col-sm-4,.row-sm .col-sm-5,.row-sm .col-sm-6,.row-sm .col-sm-7,.row-sm .col-sm-8,.row-sm .col-sm-9,.row-sm .col-sm-10,.row-sm .col-sm-11{padding-right:5px;padding-left:5px}.content{margin-top:15px;margin-bottom:20px;background-color:#fff;border-radius:5px;padding:10px}.content:last-child{margin-bottom:54px}.table-host{width:100%;border-top:10px solid #b3cfe7;border-bottom:1px solid #b3cfe7}.table-host .cell-host-id{border-left:1px solid #e7e7e7;padding:5px;text-align:center;width:168px;vertical-align:middle}.table-host .cell-host-id .host-id{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:13px;color:#999;display:inline-block}.table-host .cell-host-id .host-name{display:block;width:168px;text-align:center;overflow:hidden;white-space:nowrap;text-overflow:ellipsis;font-size:16px;margin:auto;margin-bottom:10px}.table-host .cell-host-id .td-ip-item{width:10em;height:18px;padding:2px 4px;margin:1px auto;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.table-host .cell-host-id .td-ip-item span{display:inline-block;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.table-host .cell-host-id .actions{margin-top:20px}.table-host .cell-host-id .actions a{margin-left:5px;margin-right:5px}.table-host .cell-host-id .actions a:first-child{margin-left:0}.table-host .cell-host-id .actions a:last-child{margin-right:0}.table-host .cell-detail{border-left:1px solid #e7e7e7;border-right:1px solid #e7e7e7;vertical-align:top}.table-host .cell-detail tr{border-top:1px solid #e7e7e7}.table-host .cell-detail tr:last-child{border-bottom:1px solid #e7e7e7}.table-host .cell-detail .row-host-info{background-color:#ececed}.table-host .cell-detail.host-offline{background-color:#ffcecc;text-align:center;vertical-align:middle}.table-host .cell-detail.host-offline .host-offline-msg{color:#802506;font-size:24px}.table-host .cell-log td{border:1px solid #e7e7e7}.table-host .cell-log td .host-log{font-size:12px;outline:none;width:100%;height:120px;overflow-y:auto;resize:none;border:none;padding:5px}.table-host .cell-log td .host-log div{margin-bottom:3px}.table-host .cell-log td .host-log div .datetime{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}.log-box{margin-top:15px}.log-box .log-list{margin-top:5px;border:1px solid #e7e7e7;font-size:12px;outline:none;width:100%;max-height:480px;overflow-y:auto;resize:none;padding:5px}.log-box .log-list div{margin-bottom:3px}.log-box .log-list div:hover{background-color:#f3f3f3}.log-box .log-list div .log-dt{padding:0 3px;padding-top:2px;padding-bottom:1px;margin-right:3px;background-color:#f57523;color:#fff;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}.log-box .log-list div .log-hid{padding:0 3px;margin-right:3px;background-color:#348fe2;color:#fff}.log-box .log-list div .log-hname{padding:0 3px;margin-right:3px;background-color:#348fe2;color:#fff}.page-nav{height:30px;line-height:30px}.page-nav .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.page-nav .pagination{margin:0 0}.page-nav .btn{margin-top:-3px}.mp{display:inline-block;width:20%;max-width:20%}.mp .mp-inner{background-color:#e5e5e5;margin:3px;border-radius:4px}.mp .mp-name{color:#999;padding:9px;margin-bottom:3px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;text-align:center}.mp .mp-name.with-target{padding-top:17px;padding-bottom:1px}.mp .mp-target{display:inline-block;float:left;position:absolute;font-size:11px;padding:0 5px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;border-top-left-radius:4px;border-bottom-right-radius:4px;color:rgba(255,255,255,0.85);background-color:rgba(0,0,0,0.1)}.mp.mp-disabled .mp-inner{background-color:#e5e5e5}.mp.mp-disabled .mp-name{color:#999}.mp.mp-success .mp-inner{background-color:#368142}.mp.mp-success .mp-name{color:#fff}.mp.mp-danger .mp-inner{background-color:#d34242}.mp.mp-danger .mp-name{color:#fff}.mp.mp-warning .mp-inner{background-color:#f57523}.mp.mp-warning .mp-name{color:#fff}.host-offline{background-color:#ffcecc;height:36px;line-height:36px;padding:0 10px;color:#802506;font-size:20px;cursor:pointer}.host-offline .tips{display:none;font-size:12px}.host-offline:hover .tips{display:inline-block}.host-no-strategy{color:#999;font-size:16px}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important}.icon{display:inline-block}.icon16{width:16px;height:16px;line-height:16px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/mimetype-16.png") !important}.icon16.icon-disk{background-position:0 0 !important}.icon16.icon-folder{background-position:-16px 0 !important}.icon16.icon-file{background-position:0 -16px !important}.icon16.icon-txt{background-position:-16px -16px !important}.icon16.icon-help{background-position:-32px -16px !important}.icon16.icon-sys{background-position:-48px -16px !important}.icon16.icon-exe{background-position:-64px -16px !important}.icon16.icon-office{background-position:0 -32px !important}.icon16.icon-word{background-position:-16px -32px !important}.icon16.icon-excel{background-position:-32px -32px !important}.icon16.icon-ppt{background-position:-48px -32px !important}.icon16.icon-access{background-position:-64px -32px !important}.icon16.icon-visio{background-position:-80px -32px !important}.icon16.icon-audio{background-position:0 -48px !important}.icon16.icon-video{background-position:-16px -48px !important}.icon16.icon-pic{background-position:-32px -48px !important}.icon16.icon-pdf{background-position:-48px -48px !important}.icon16.icon-font{background-position:-64px -48px !important}.icon16.icon-script{background-position:0 -64px !important}.icon16.icon-html{background-position:-16px -64px !important}.icon16.icon-py{background-position:-32px -64px !important}.icon16.icon-h{background-position:-48px -64px !important}.icon16.icon-c{background-position:-64px -64px !important}.icon16.icon-cpp{background-position:-80px -64px !important}.icon16.icon-cs{background-position:-96px -64px !important}.icon16.icon-php{background-position:-112px -64px !important}.icon16.icon-ruby{background-position:-128px -64px !important}.icon16.icon-java{background-position:-144px -64px !important}.icon16.icon-vs{background-position:-160px -64px !important}.icon16.icon-js{background-position:-176px -64px !important}.icon16.icon-archive{background-position:0 -80px !important}.icon16.icon-rar{background-position:-16px -80px !important}.icon16.icon-zip{background-position:-32px -80px !important}.icon16.icon-7z{background-position:-48px -80px !important}.icon16.icon-tar{background-position:-64px -80px !important}.icon16.icon-gz{background-position:-80px -80px !important}.icon16.icon-jar{background-position:-96px -80px !important}.icon16.icon-bz2{background-position:-112px -80px !important}.icon24{width:24px;height:24px;line-height:24px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/icons-tree-24x24.png") !important}.icon24.icon-disk{background-position:0 0 !important}.icon24.icon-folder{background-position:-24px 0 !important}.icon24.icon-folder-open{background-position:-48px 0 !important}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat} \ No newline at end of file diff --git a/server/www/teleport/static/download/example.csv b/server/www/teleport/static/download/example.csv new file mode 100644 index 0000000..927099f --- /dev/null +++ b/server/www/teleport/static/download/example.csv @@ -0,0 +1,4 @@ +分组ID, 操作系统, IP地址, 端口, 协议, 状态, 描述, 系统用户, 系统密码, 是否加密,附加参数, 密钥ID, 认证类型 +0,1,115.28.12.207,3389,1,0,115.28.12.207,administrator,123456,0,,0,1 +0,2,120.26.109.25,22,2,0,120.26.109.25,root,123456,0,,0,1 +0,2,120.26.109.25,22,2,0,120.26.109.25,root,,0,,1,2 diff --git a/server/www/teleport/static/favicon.ico b/server/www/teleport/static/favicon.ico new file mode 100644 index 0000000..2b25ba4 Binary files /dev/null and b/server/www/teleport/static/favicon.ico differ diff --git a/server/www/teleport/static/favicon.png b/server/www/teleport/static/favicon.png new file mode 100644 index 0000000..515eec1 Binary files /dev/null and b/server/www/teleport/static/favicon.png differ diff --git a/server/www/teleport/static/img/avatar/001.png b/server/www/teleport/static/img/avatar/001.png new file mode 100644 index 0000000..95d0f9a Binary files /dev/null and b/server/www/teleport/static/img/avatar/001.png differ diff --git a/server/www/teleport/static/img/login/input_right_clean.png b/server/www/teleport/static/img/login/input_right_clean.png new file mode 100644 index 0000000..2d48a32 Binary files /dev/null and b/server/www/teleport/static/img/login/input_right_clean.png differ diff --git a/server/www/teleport/static/img/login/side-001.jpg b/server/www/teleport/static/img/login/side-001.jpg new file mode 100644 index 0000000..91a438e Binary files /dev/null and b/server/www/teleport/static/img/login/side-001.jpg differ diff --git a/server/www/teleport/static/img/os-icon/centos-24x24.png b/server/www/teleport/static/img/os-icon/centos-24x24.png new file mode 100644 index 0000000..e2be0a3 Binary files /dev/null and b/server/www/teleport/static/img/os-icon/centos-24x24.png differ diff --git a/server/www/teleport/static/img/os-icon/debian-24x24.png b/server/www/teleport/static/img/os-icon/debian-24x24.png new file mode 100644 index 0000000..3b6a93f Binary files /dev/null and b/server/www/teleport/static/img/os-icon/debian-24x24.png differ diff --git a/server/www/teleport/static/img/os-icon/redhat-24x24.png b/server/www/teleport/static/img/os-icon/redhat-24x24.png new file mode 100644 index 0000000..cb3ad9e Binary files /dev/null and b/server/www/teleport/static/img/os-icon/redhat-24x24.png differ diff --git a/server/www/teleport/static/img/os-icon/ubuntu-24x24.png b/server/www/teleport/static/img/os-icon/ubuntu-24x24.png new file mode 100644 index 0000000..0f99190 Binary files /dev/null and b/server/www/teleport/static/img/os-icon/ubuntu-24x24.png differ diff --git a/server/www/teleport/static/img/site-logo-small.png b/server/www/teleport/static/img/site-logo-small.png new file mode 100644 index 0000000..a718510 Binary files /dev/null and b/server/www/teleport/static/img/site-logo-small.png differ diff --git a/server/www/teleport/static/img/site-logo.png b/server/www/teleport/static/img/site-logo.png new file mode 100644 index 0000000..be71722 Binary files /dev/null and b/server/www/teleport/static/img/site-logo.png differ diff --git a/server/www/teleport/static/js/common/term.js b/server/www/teleport/static/js/common/term.js new file mode 100644 index 0000000..b086fda --- /dev/null +++ b/server/www/teleport/static/js/common/term.js @@ -0,0 +1,6275 @@ +/** + * term.js - an xterm emulator + * Copyright (c) 2012-2013, Christopher Jeffrey (MIT License) + * https://github.com/chjj/term.js + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + * + * Originally forked from (with the author's permission): + * Fabrice Bellard's javascript vt100 for jslinux: + * http://bellard.org/jslinux/ + * Copyright (c) 2011 Fabrice Bellard + * The original design remains. The terminal itself + * has been extended to include xterm CSI codes, among + * other features. + */ + +;(function() { + +/** + * Terminal Emulation References: + * http://vt100.net/ + * http://invisible-island.net/xterm/ctlseqs/ctlseqs.txt + * http://invisible-island.net/xterm/ctlseqs/ctlseqs.html + * http://invisible-island.net/vttest/ + * http://www.inwap.com/pdp10/ansicode.txt + * http://linux.die.net/man/4/console_codes + * http://linux.die.net/man/7/urxvt + */ + +'use strict'; + +/** + * Shared + */ + +var window = this + , document = this.document; + +/** + * EventEmitter + */ + +function EventEmitter() { + this._events = this._events || {}; +} + +EventEmitter.prototype.addListener = function(type, listener) { + this._events[type] = this._events[type] || []; + this._events[type].push(listener); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.removeListener = function(type, listener) { + if (!this._events[type]) return; + + var obj = this._events[type] + , i = obj.length; + + while (i--) { + if (obj[i] === listener || obj[i].listener === listener) { + obj.splice(i, 1); + return; + } + } +}; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +EventEmitter.prototype.removeAllListeners = function(type) { + if (this._events[type]) delete this._events[type]; +}; + +EventEmitter.prototype.once = function(type, listener) { + function on() { + var args = Array.prototype.slice.call(arguments); + this.removeListener(type, on); + return listener.apply(this, args); + } + on.listener = listener; + return this.on(type, on); +}; + +EventEmitter.prototype.emit = function(type) { + if (!this._events[type]) return; + + var args = Array.prototype.slice.call(arguments, 1) + , obj = this._events[type] + , l = obj.length + , i = 0; + + for (; i < l; i++) { + obj[i].apply(this, args); + } +}; + +EventEmitter.prototype.listeners = function(type) { + return this._events[type] = this._events[type] || []; +}; + +/** + * Stream + */ + +function Stream() { + EventEmitter.call(this); +} + +inherits(Stream, EventEmitter); + +Stream.prototype.pipe = function(dest, options) { + var src = this + , ondata + , onerror + , onend; + + function unbind() { + src.removeListener('data', ondata); + src.removeListener('error', onerror); + src.removeListener('end', onend); + dest.removeListener('error', onerror); + dest.removeListener('close', unbind); + } + + src.on('data', ondata = function(data) { + dest.write(data); + }); + + src.on('error', onerror = function(err) { + unbind(); + if (!this.listeners('error').length) { + throw err; + } + }); + + src.on('end', onend = function() { + dest.end(); + unbind(); + }); + + dest.on('error', onerror); + dest.on('close', unbind); + + dest.emit('pipe', src); + + return dest; +}; + +/** + * States + */ + +var normal = 0 + , escaped = 1 + , csi = 2 + , osc = 3 + , charset = 4 + , dcs = 5 + , ignore = 6 + , UDK = { type: 'udk' }; + +/** + * Terminal + */ + +function Terminal(options) { + var self = this; + + if (!(this instanceof Terminal)) { + return new Terminal(arguments[0], arguments[1], arguments[2]); + } + + Stream.call(this); + + if (typeof options === 'number') { + options = { + cols: arguments[0], + rows: arguments[1], + handler: arguments[2] + }; + } + + options = options || {}; + + each(keys(Terminal.defaults), function(key) { + if (options[key] == null) { + options[key] = Terminal.options[key]; + // Legacy: + if (Terminal[key] !== Terminal.defaults[key]) { + options[key] = Terminal[key]; + } + } + self[key] = options[key]; + }); + + if (options.colors.length === 8) { + options.colors = options.colors.concat(Terminal._colors.slice(8)); + } else if (options.colors.length === 16) { + options.colors = options.colors.concat(Terminal._colors.slice(16)); + } else if (options.colors.length === 10) { + options.colors = options.colors.slice(0, -2).concat( + Terminal._colors.slice(8, -2), options.colors.slice(-2)); + } else if (options.colors.length === 18) { + options.colors = options.colors.slice(0, -2).concat( + Terminal._colors.slice(16, -2), options.colors.slice(-2)); + } + this.colors = options.colors; + + this.options = options; + + // this.context = options.context || window; + // this.document = options.document || document; + this.parent = options.body || options.parent + || (document ? document.getElementsByTagName('body')[0] : null); + + this.cols = options.cols || options.geometry[0]; + this.rows = options.rows || options.geometry[1]; + + // Act as though we are a node TTY stream: + this.setRawMode; + this.isTTY = true; + this.isRaw = true; + this.columns = this.cols; + this.rows = this.rows; + + if (options.handler) { + this.on('data', options.handler); + } + + this.ybase = 0; + this.ydisp = 0; + this.x = 0; + this.y = 0; + this.cursorState = 0; + this.cursorHidden = false; + this.convertEol; + this.state = 0; + this.queue = ''; + this.scrollTop = 0; + this.scrollBottom = this.rows - 1; + + // modes + this.applicationKeypad = false; + this.applicationCursor = false; + this.originMode = false; + this.insertMode = false; + this.wraparoundMode = false; + this.normal = null; + + // select modes + this.prefixMode = false; + this.selectMode = false; + this.visualMode = false; + this.searchMode = false; + this.searchDown; + this.entry = ''; + this.entryPrefix = 'Search: '; + this._real; + this._selected; + this._textarea; + + // charset + this.charset = null; + this.gcharset = null; + this.glevel = 0; + this.charsets = [null]; + + // mouse properties + this.decLocator; + this.x10Mouse; + this.vt200Mouse; + this.vt300Mouse; + this.normalMouse; + this.mouseEvents; + this.sendFocus; + this.utfMouse; + this.sgrMouse; + this.urxvtMouse; + + // misc + this.element; + this.children; + this.refreshStart; + this.refreshEnd; + this.savedX; + this.savedY; + this.savedCols; + + // stream + this.readable = true; + this.writable = true; + + this.defAttr = (0 << 18) | (257 << 9) | (256 << 0); + this.curAttr = this.defAttr; + + this.params = []; + this.currentParam = 0; + this.prefix = ''; + this.postfix = ''; + + this.lines = []; + var i = this.rows; + while (i--) { + this.lines.push(this.blankLine()); + } + + this.tabs; + this.setupStops(); +} + +inherits(Terminal, Stream); + +/** + * Colors + */ + +// Colors 0-15 +Terminal.tangoColors = [ + // dark: + '#2e3436', + '#cc0000', + '#4e9a06', + '#c4a000', + '#3465a4', + '#75507b', + '#06989a', + '#d3d7cf', + // bright: + '#555753', + '#ef2929', + '#8ae234', + '#fce94f', + '#729fcf', + '#ad7fa8', + '#34e2e2', + '#eeeeec' +]; + +Terminal.xtermColors = [ + // dark: + '#000000', // black + '#cd0000', // red3 + '#00cd00', // green3 + '#cdcd00', // yellow3 + '#0000ee', // blue2 + '#cd00cd', // magenta3 + '#00cdcd', // cyan3 + '#e5e5e5', // gray90 + // bright: + '#7f7f7f', // gray50 + '#ff0000', // red + '#00ff00', // green + '#ffff00', // yellow + '#5c5cff', // rgb:5c/5c/ff + '#ff00ff', // magenta + '#00ffff', // cyan + '#ffffff' // white +]; + +// Colors 0-15 + 16-255 +// Much thanks to TooTallNate for writing this. +Terminal.colors = (function() { + var colors = Terminal.tangoColors.slice() + , r = [0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff] + , i; + + // 16-231 + i = 0; + for (; i < 216; i++) { + out(r[(i / 36) % 6 | 0], r[(i / 6) % 6 | 0], r[i % 6]); + } + + // 232-255 (grey) + i = 0; + for (; i < 24; i++) { + r = 8 + i * 10; + out(r, r, r); + } + + function out(r, g, b) { + colors.push('#' + hex(r) + hex(g) + hex(b)); + } + + function hex(c) { + c = c.toString(16); + return c.length < 2 ? '0' + c : c; + } + + return colors; +})(); + +// Default BG/FG +//Terminal.colors[256] = '#000000'; +//Terminal.colors[257] = '#f0f0f0'; +Terminal.colors[256] = '#333333'; +Terminal.colors[257] = '#f0f0f0'; + +Terminal._colors = Terminal.colors.slice(); + +Terminal.vcolors = (function() { + var out = [] + , colors = Terminal.colors + , i = 0 + , color; + + for (; i < 256; i++) { + color = parseInt(colors[i].substring(1), 16); + out.push([ + (color >> 16) & 0xff, + (color >> 8) & 0xff, + color & 0xff + ]); + } + + return out; +})(); + +/** + * Options + */ + +Terminal.defaults = { + colors: Terminal.colors, + convertEol: false, + termName: 'xterm', + geometry: [80, 24], + cursorBlink: true, + visualBell: false, + popOnBell: false, + scrollback: 1000, + screenKeys: false, + debug: false, + useStyle: false + // programFeatures: false, + // focusKeys: false, +}; + +Terminal.options = {}; + +each(keys(Terminal.defaults), function(key) { + Terminal[key] = Terminal.defaults[key]; + Terminal.options[key] = Terminal.defaults[key]; +}); + +/** + * Focused Terminal + */ + +Terminal.focus = null; + +Terminal.prototype.focus = function() { + if (this._textarea) { + this._textarea.focus(); + } + + if (Terminal.focus === this) return; + + if (Terminal.focus) { + Terminal.focus.blur(); + } + + if (this.sendFocus) this.send('\x1b[I'); + this.showCursor(); + + // try { + // this.element.focus(); + // } catch (e) { + // ; + // } + + // this.emit('focus'); + + Terminal.focus = this; +}; + +Terminal.prototype.blur = function() { + if (Terminal.focus !== this) return; + + this.cursorState = 0; + this.refresh(this.y, this.y); + if (this.sendFocus) this.send('\x1b[O'); + + // try { + // this.element.blur(); + // } catch (e) { + // ; + // } + + // this.emit('blur'); + + Terminal.focus = null; +}; + +/** + * Initialize global behavior + */ + +Terminal.prototype.initGlobal = function() { + var document = this.document; + + Terminal._boundDocs = Terminal._boundDocs || []; + if (~indexOf(Terminal._boundDocs, document)) { + return; + } + Terminal._boundDocs.push(document); + + Terminal.bindPaste(document); + + Terminal.bindKeys(document); + + Terminal.bindCopy(document); + + if (this.useStyle) { + Terminal.insertStyle(document, this.colors[256], this.colors[257]); + } +}; + +/** + * Bind to paste event + */ + +Terminal.bindPaste = function(document) { + // This seems to work well for ctrl-V and middle-click, + // even without the contentEditable workaround. + var window = document.defaultView; + on(window, 'paste', function(ev) { + var term = Terminal.focus; + if (!term) return; + if (term._textarea) return; + if (ev.clipboardData) { + term.send(ev.clipboardData.getData('text/plain')); + } else if (term.context.clipboardData) { + term.send(term.context.clipboardData.getData('Text')); + } + // Not necessary. Do it anyway for good measure. + term.element.contentEditable = 'inherit'; + return cancel(ev); + }); +}; + +/** + * Global Events for key handling + */ + +Terminal.bindKeys = function(document) { + // We should only need to check `target === body` below, + // but we can check everything for good measure. + on(document, 'keydown', function(ev) { + if (!Terminal.focus) return; + var target = ev.target || ev.srcElement; + if (!target) return; + if (target === Terminal.focus.element + || target === Terminal.focus.context + || target === Terminal.focus.document + || target === Terminal.focus.body + || target === Terminal.focus._textarea + || target === Terminal.focus.parent) { + return Terminal.focus.keyDown(ev); + } + }, true); + + on(document, 'keypress', function(ev) { + if (!Terminal.focus) return; + var target = ev.target || ev.srcElement; + if (!target) return; + if (target === Terminal.focus.element + || target === Terminal.focus.context + || target === Terminal.focus.document + || target === Terminal.focus.body + || target === Terminal.focus._textarea + || target === Terminal.focus.parent) { + return Terminal.focus.keyPress(ev); + } + }, true); + + // If we click somewhere other than a + // terminal, unfocus the terminal. + on(document, 'mousedown', function(ev) { + if (!Terminal.focus) return; + + var el = ev.target || ev.srcElement; + if (!el) return; + if (!el.parentNode) return; + if (!el.parentNode.parentNode) return; + + do { + if (el === Terminal.focus.element) return; + } while (el = el.parentNode); + + Terminal.focus.blur(); + }); +}; + +/** + * Copy Selection w/ Ctrl-C (Select Mode) + */ + +Terminal.bindCopy = function(document) { + var window = document.defaultView; + + // if (!('onbeforecopy' in document)) { + // // Copies to *only* the clipboard. + // on(window, 'copy', function fn(ev) { + // var term = Terminal.focus; + // if (!term) return; + // if (!term._selected) return; + // var text = term.grabText( + // term._selected.x1, term._selected.x2, + // term._selected.y1, term._selected.y2); + // term.emit('copy', text); + // ev.clipboardData.setData('text/plain', text); + // }); + // return; + // } + + // Copies to primary selection *and* clipboard. + // NOTE: This may work better on capture phase, + // or using the `beforecopy` event. + on(window, 'copy', function(ev) { + var term = Terminal.focus; + if (!term) return; + if (!term._selected) return; + var textarea = term.getCopyTextarea(); + var text = term.grabText( + term._selected.x1, term._selected.x2, + term._selected.y1, term._selected.y2); + term.emit('copy', text); + textarea.focus(); + textarea.textContent = text; + textarea.value = text; + textarea.setSelectionRange(0, text.length); + setTimeout(function() { + term.element.focus(); + term.focus(); + }, 1); + }); +}; + +/** + * Fix Mobile + */ + +Terminal.prototype.getTextarea = function(document) { + var self = this; + + var textarea = document.createElement('textarea'); + textarea.style.position = 'absolute'; + textarea.style.left = '-32000px'; + textarea.style.top = '-32000px'; + textarea.style.width = '100em'; + textarea.style.height = '2em'; + textarea.style.padding = '0'; + textarea.style.opacity = '0'; + textarea.style.color = 'inherit'; + textarea.style.font = 'inherit'; + textarea.style.textIndent = '-1em'; /* Hide text cursor on IE */ + textarea.style.backgroundColor = 'transparent'; + textarea.style.borderStyle = 'none'; + textarea.style.outlineStyle = 'none'; + textarea.autocapitalize = 'none'; + textarea.autocorrect = 'off'; + + var onInputTimestamp; + + var onInput = function(ev){ + if(ev.timeStamp && ev.timeStamp === onInputTimestamp){ + return; + } + onInputTimestamp = ev.timeStamp; + + var value = textarea.textContent || textarea.value; + if (typeof self.select.startPos !== 'undefined'){ + self.select = {}; + self.clearSelectedText(); + self.refresh(0, self.rows - 1); + } + if (!self.compositionStatus) { + textarea.value = ''; + textarea.textContent = ''; + self.send(value); + } + }; + + on(textarea, 'compositionstart', function() { + textarea.style.opacity = "1.0"; + textarea.style.textIndent = "0"; + self.compositionStatus = true; + }); + on(textarea, 'compositionend', function(ev) { + textarea.style.opacity = "0.0"; + textarea.style.textIndent = "-1em"; + self.compositionStatus = false; + setTimeout(function(){ + onInput(ev); // for IE that does not trigger 'input' after the IME composition. + }, 1); + }); + + on(textarea, 'keydown', function(){ + var value = textarea.textContent || textarea.value; + }); + + on(textarea, 'input', onInput); + + if (Terminal.isAndroid) { + on(textarea, 'change', function() { + var value = textarea.textContent || textarea.value; + textarea.value = ''; + textarea.textContent = ''; + self.send(value + '\r'); + }); + } + return textarea; +}; + +/** + * Insert a default style + */ + +Terminal.insertStyle = function(document, bg, fg) { + var style = document.getElementById('term-style'); + if (style) return; + + var head = document.getElementsByTagName('head')[0]; + if (!head) return; + + var style = document.createElement('style'); + style.id = 'term-style'; + + // textContent doesn't work well with IE for ",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/server/www/teleport/static/plugins/jquery/_jquery.min.js b/server/www/teleport/static/plugins/jquery/_jquery.min.js new file mode 100644 index 0000000..0f60b7b --- /dev/null +++ b/server/www/teleport/static/plugins/jquery/_jquery.min.js @@ -0,0 +1,5 @@ +/*! jQuery v1.11.3 | (c) 2005, 2015 jQuery Foundation, Inc. | jquery.org/license */ +!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.3",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)+1>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b="length"in a&&a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ha(),z=ha(),A=ha(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N=M.replace("w","w#"),O="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+N+"))|)"+L+"*\\]",P=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+O+")*)|.*)\\)|)",Q=new RegExp(L+"+","g"),R=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),S=new RegExp("^"+L+"*,"+L+"*"),T=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),U=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),V=new RegExp(P),W=new RegExp("^"+N+"$"),X={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+O),PSEUDO:new RegExp("^"+P),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,aa=/[+~]/,ba=/'|\\/g,ca=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),da=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},ea=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(fa){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function ga(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],k=b.nodeType,"string"!=typeof a||!a||1!==k&&9!==k&&11!==k)return d;if(!e&&p){if(11!==k&&(f=_.exec(a)))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return H.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName)return H.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=1!==k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(ba,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+ra(o[l]);w=aa.test(a)&&pa(b.parentNode)||b,x=o.join(",")}if(x)try{return H.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function ha(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ia(a){return a[u]=!0,a}function ja(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ka(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function la(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function na(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function oa(a){return ia(function(b){return b=+b,ia(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function pa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=ga.support={},f=ga.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=ga.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=g.documentElement,e=g.defaultView,e&&e!==e.top&&(e.addEventListener?e.addEventListener("unload",ea,!1):e.attachEvent&&e.attachEvent("onunload",ea)),p=!f(g),c.attributes=ja(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ja(function(a){return a.appendChild(g.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(g.getElementsByClassName),c.getById=ja(function(a){return o.appendChild(a).id=u,!g.getElementsByName||!g.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ca,da);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ca,da);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(g.querySelectorAll))&&(ja(function(a){o.appendChild(a).innerHTML="",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ja(function(a){var b=g.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ja(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",P)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===g||a.ownerDocument===v&&t(v,a)?-1:b===g||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,h=[a],i=[b];if(!e||!f)return a===g?-1:b===g?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return la(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?la(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},g):n},ga.matches=function(a,b){return ga(a,null,null,b)},ga.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return ga(b,n,null,[a]).length>0},ga.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},ga.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},ga.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},ga.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=ga.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=ga.selectors={cacheLength:50,createPseudo:ia,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ca,da),a[3]=(a[3]||a[4]||a[5]||"").replace(ca,da),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||ga.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&ga.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ca,da).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=ga.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(Q," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||ga.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ia(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ia(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?ia(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ia(function(a){return function(b){return ga(a,b).length>0}}),contains:ia(function(a){return a=a.replace(ca,da),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ia(function(a){return W.test(a||"")||ga.error("unsupported lang: "+a),a=a.replace(ca,da).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:oa(function(){return[0]}),last:oa(function(a,b){return[b-1]}),eq:oa(function(a,b,c){return[0>c?c+b:c]}),even:oa(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:oa(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:oa(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:oa(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function sa(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function ta(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ua(a,b,c){for(var d=0,e=b.length;e>d;d++)ga(a,b[d],c);return c}function va(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function wa(a,b,c,d,e,f){return d&&!d[u]&&(d=wa(d)),e&&!e[u]&&(e=wa(e,f)),ia(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ua(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:va(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=va(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=va(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function xa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=sa(function(a){return a===b},h,!0),l=sa(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[sa(ta(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return wa(i>1&&ta(m),i>1&&ra(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&xa(a.slice(i,e)),f>e&&xa(a=a.slice(e)),f>e&&ra(a))}m.push(c)}return ta(m)}function ya(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=F.call(i));s=va(s)}H.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&ga.uniqueSort(i)}return k&&(w=v,j=t),r};return c?ia(f):f}return h=ga.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=xa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,ya(e,d)),f.selector=a}return f},i=ga.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ca,da),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ca,da),aa.test(j[0].type)&&pa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&ra(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,aa.test(a)&&pa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ja(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ja(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||ka("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ja(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ka("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ja(function(a){return null==a.getAttribute("disabled")})||ka(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),ga}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1; + +return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h;if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML="
    a",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function aa(){return!0}function ba(){return!1}function ca(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h]","i"),ha=/^\s+/,ia=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,ja=/<([\w:]+)/,ka=/\s*$/g,ra={option:[1,""],legend:[1,"

    ","
    "],area:[1,"",""],param:[1,"",""],thead:[1,"","
    "],tr:[2,"","
    "],col:[2,"","
    "],td:[3,"","
    "],_default:k.htmlSerialize?[0,"",""]:[1,"X
    ","
    "]},sa=da(y),ta=sa.appendChild(y.createElement("div"));ra.optgroup=ra.option,ra.tbody=ra.tfoot=ra.colgroup=ra.caption=ra.thead,ra.th=ra.td;function ua(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ua(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function va(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wa(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xa(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function ya(a){var b=pa.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function za(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Aa(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Ba(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xa(b).text=a.text,ya(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!ga.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(ta.innerHTML=a.outerHTML,ta.removeChild(f=ta.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ua(f),h=ua(a),g=0;null!=(e=h[g]);++g)d[g]&&Ba(e,d[g]);if(b)if(c)for(h=h||ua(a),d=d||ua(f),g=0;null!=(e=h[g]);g++)Aa(e,d[g]);else Aa(a,f);return d=ua(f,"script"),d.length>0&&za(d,!i&&ua(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=da(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(la.test(f)){h=h||o.appendChild(b.createElement("div")),i=(ja.exec(f)||["",""])[1].toLowerCase(),l=ra[i]||ra._default,h.innerHTML=l[1]+f.replace(ia,"<$1>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&ha.test(f)&&p.push(b.createTextNode(ha.exec(f)[0])),!k.tbody){f="table"!==i||ka.test(f)?""!==l[1]||ka.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ua(p,"input"),va),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ua(o.appendChild(f),"script"),g&&za(h),c)){e=0;while(f=h[e++])oa.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wa(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wa(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ua(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&za(ua(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ua(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fa,""):void 0;if(!("string"!=typeof a||ma.test(a)||!k.htmlSerialize&&ga.test(a)||!k.leadingWhitespace&&ha.test(a)||ra[(ja.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ia,"<$1>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ua(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ua(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&na.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ua(i,"script"),xa),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ua(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,ya),j=0;f>j;j++)d=g[j],oa.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qa,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Ca,Da={};function Ea(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fa(a){var b=y,c=Da[a];return c||(c=Ea(a,b),"none"!==c&&c||(Ca=(Ca||m("