mirror of https://github.com/tp4a/teleport
add python run-time and management web site, now works on Windows only.
parent
394b5405c6
commit
37b409f325
|
@ -0,0 +1,48 @@
|
|||
# for Visual Studio
|
||||
.vs
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.user
|
||||
*.ncb
|
||||
*.vc.db
|
||||
*.vc.opendb
|
||||
*.aps
|
||||
**/ipch
|
||||
|
||||
# for Python
|
||||
__pycache__
|
||||
*.pyc
|
||||
|
||||
|
||||
# for pyCharm
|
||||
**/.idea/workspace.xml
|
||||
**/.idea/misc.xml
|
||||
**/.idea/modules.xml
|
||||
**/.idea/dictionaries
|
||||
**/.idea/watcherTasks.xml
|
||||
**/.idea/codeStyleSettings.xml
|
||||
**/.idea/inspectionProfiles
|
||||
|
||||
# for tmp folder or files.
|
||||
/out
|
||||
/external/_download_
|
||||
/external/jsoncpp
|
||||
/external/mongoose
|
||||
/external/openssl
|
||||
/external/python
|
||||
|
||||
# for dist folder
|
||||
/dist/*.zip
|
||||
/dist/*.tar.gz
|
||||
/dist/installer/linux/server/_tmp_
|
||||
**/_tmp_
|
||||
|
||||
/server/share/data/ts_db.db
|
||||
/server/share/data/replay
|
||||
/server/www/teleport/.idea/vcs.xml
|
||||
/server/www/teleport/static/js/var.js
|
||||
/server/www/packages/packages-windows/x64
|
||||
|
||||
# for not finished code
|
||||
/common/libex/test
|
||||
/server/tp_core
|
|
@ -5,7 +5,7 @@
|
|||
#include <ex.h>
|
||||
|
||||
#include "ts_network.h"
|
||||
#include "ts_log.h"
|
||||
#include "ts_ini.h"
|
||||
//#include "ts_log.h"
|
||||
//#include "ts_ini.h"
|
||||
#include "ts_env.h"
|
||||
#include "ts_cfg.h"
|
||||
|
|
|
@ -55,11 +55,16 @@ int APIENTRY wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmd
|
|||
WSACleanup();
|
||||
return 0;
|
||||
}
|
||||
|
||||
g_env.init();
|
||||
// ex_astr temp;
|
||||
// ex_wstr2astr(g_env.m_log_path, temp);
|
||||
// TSLOG_INIT(TS_LOG_LEVEL_DEBUG, "tp_assist.log", temp.c_str());
|
||||
TSLOG_INIT(TS_LOG_LEVEL_DEBUG, L"tp_assist.log", g_env.m_log_path.c_str());
|
||||
|
||||
#ifdef EX_DEBUG
|
||||
EXLOG_LEVEL(EX_LOG_LEVEL_DEBUG);
|
||||
#else
|
||||
EXLOG_LEVEL(EX_LOG_LEVEL_INFO);
|
||||
#endif
|
||||
|
||||
EXLOG_FILE(L"tp_assist.log", g_env.m_log_path.c_str(), 1024, 2);
|
||||
|
||||
g_cfgSSH.init();
|
||||
g_cfgScp.init();
|
||||
|
|
|
@ -90,9 +90,12 @@
|
|||
<ItemGroup>
|
||||
<ClInclude Include="..\..\common\libex\include\ex.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_const.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_ini.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_log.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_path.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_platform.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_str.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_thread.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_types.h" />
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_util.h" />
|
||||
<ClInclude Include="..\..\external\mongoose\mongoose.h" />
|
||||
|
@ -106,13 +109,18 @@
|
|||
<ClInclude Include="ts_const.h" />
|
||||
<ClInclude Include="ts_env.h" />
|
||||
<ClInclude Include="ts_http_rpc.h" />
|
||||
<ClInclude Include="ts_ini.h" />
|
||||
<ClInclude Include="ts_log.h" />
|
||||
<ClInclude Include="ts_network.h" />
|
||||
<ClInclude Include="ts_thread.h" />
|
||||
<ClInclude Include="ts_ver.h" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_ini.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_log.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_path.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
|
||||
|
@ -121,6 +129,10 @@
|
|||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_thread.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_util.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
|
||||
|
@ -151,12 +163,7 @@
|
|||
<ClCompile Include="ts_cfg.cpp" />
|
||||
<ClCompile Include="ts_env.cpp" />
|
||||
<ClCompile Include="ts_http_rpc.cpp" />
|
||||
<ClCompile Include="ts_ini.cpp" />
|
||||
<ClCompile Include="ts_log.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ts_network.cpp" />
|
||||
<ClCompile Include="ts_thread.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ResourceCompile Include="tp_assist.rc" />
|
||||
|
|
|
@ -13,21 +13,12 @@
|
|||
<ClCompile Include="ts_http_rpc.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ts_log.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ts_thread.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ts_network.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="msocketx.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ts_ini.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ts_env.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
|
@ -55,6 +46,15 @@
|
|||
<ClCompile Include="..\..\common\libex\src\ex_path.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_ini.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_log.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\common\libex\src\ex_thread.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="Resource.h">
|
||||
|
@ -75,18 +75,9 @@
|
|||
<ClInclude Include="ts_http_rpc.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ts_log.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ts_thread.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ts_network.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ts_ini.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ts_env.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
|
@ -126,6 +117,15 @@
|
|||
<ClInclude Include="ts_ver.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_ini.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_log.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\common\libex\include\ex\ex_thread.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Image Include="res\tp.ico">
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "stdafx.h"
|
||||
#include "ts_ini.h"
|
||||
//#include "ts_ini.h"
|
||||
#include "ts_cfg.h"
|
||||
#include "ts_env.h"
|
||||
|
||||
|
@ -50,11 +50,11 @@ bool TsClientCfgBase::_init(void)
|
|||
{
|
||||
client_set temp;
|
||||
|
||||
TsIniSection* cfg = NULL;
|
||||
ExIniSection* cfg = NULL;
|
||||
cfg = m_ini.GetSection(_T("common"));
|
||||
if (NULL == cfg)
|
||||
{
|
||||
TSLOGE("[ERROR] Invalid configuration, [common] section not found.\n");
|
||||
EXLOGE("[ERROR] Invalid configuration, [common] section not found.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ bool TsClientCfgBase::_init(void)
|
|||
cfg = m_ini.GetSection(sec_name);
|
||||
if (NULL == cfg)
|
||||
{
|
||||
TSLOGE("[ERROR] Invalid configuration, [common] section not found.\n");
|
||||
EXLOGE("[ERROR] Invalid configuration, [common] section not found.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ bool TsClientCfgBase::_init(void)
|
|||
}
|
||||
temp.desc = _wstr;
|
||||
|
||||
temp.default = 0;
|
||||
temp.is_default = false;
|
||||
|
||||
m_clientsetmap[temp.name] = temp;
|
||||
m_client_list.push_back(temp.name);
|
||||
|
@ -126,11 +126,11 @@ void TsClientCfgBase::set(ex_wstr sec_name, ex_wstr key, ex_wstr value)
|
|||
return;
|
||||
}
|
||||
|
||||
TsIniSection* cfg = NULL;
|
||||
ExIniSection* cfg = NULL;
|
||||
cfg = m_ini.GetSection(sec_name);
|
||||
if (NULL == cfg)
|
||||
{
|
||||
TSLOGE("[ERROR] Invalid configuration, [common] section not found.\n");
|
||||
EXLOGE("[ERROR] Invalid configuration, [common] section not found.\n");
|
||||
return;
|
||||
}
|
||||
cfg->SetValue(key, value);
|
||||
|
@ -162,14 +162,14 @@ bool TsCfgSSH::init(void)
|
|||
temp.path += _T("\\putty\\putty.exe");
|
||||
temp.commandline = _T("-ssh -pw **** -P {host_port} -l {user_name} {host_ip}");
|
||||
temp.desc = _T("PuTTY为开放源代码软件,主要由Simon Tatham维护,使用MIT licence授权。");
|
||||
temp.default = 1;
|
||||
temp.is_default = true;
|
||||
|
||||
m_clientsetmap[temp.name] = temp;
|
||||
m_client_list.push_back(temp.name);
|
||||
|
||||
if (!m_ini.LoadFromFile(g_env.m_ssh_client_conf_file))
|
||||
{
|
||||
TSLOGE("can not load ssh config file.\n");
|
||||
EXLOGE("can not load ssh config file.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -196,13 +196,13 @@ bool TsCfgScp::init(void)
|
|||
temp.path += _T("\\winscp\\winscp.exe");
|
||||
temp.commandline = _T("/sessionname=\"TP#{real_ip}\" {user_name}:****@{host_ip}:{host_port}");
|
||||
temp.desc = _T("WinSCP是一个Windows环境下使用SSH的开源图形化SFTP客户端。同时支持SCP协议。它的主要功能就是在本地与远程计算机间安全的复制文件。");
|
||||
temp.default = 1;
|
||||
temp.is_default = true;
|
||||
m_clientsetmap[temp.name] = temp;
|
||||
m_client_list.push_back(temp.name);
|
||||
|
||||
if (!m_ini.LoadFromFile(g_env.m_scp_client_conf_file))
|
||||
{
|
||||
TSLOGE("can not load scp config file.\n");
|
||||
EXLOGE("can not load scp config file.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -230,13 +230,13 @@ bool TsCfgTelnet::init(void)
|
|||
temp.commandline = _T("telnet://{user_name}@{host_ip}:{host_port}");
|
||||
temp.desc = _T("PuTTY为开放源代码软件,主要由Simon Tatham维护,使用MIT licence授权。");
|
||||
|
||||
temp.default = 1;
|
||||
temp.is_default = true;
|
||||
m_clientsetmap[temp.name] = temp;
|
||||
m_client_list.push_back(temp.name);
|
||||
|
||||
if (!m_ini.LoadFromFile(g_env.m_telnet_client_conf_file))
|
||||
{
|
||||
TSLOGE("can not load telnet config file.\n");
|
||||
EXLOGE("can not load telnet config file.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ struct client_set
|
|||
ex_wstr path;
|
||||
ex_wstr commandline;
|
||||
ex_wstr desc;
|
||||
int default;
|
||||
bool is_default;
|
||||
};
|
||||
|
||||
typedef std::map<ex_wstr, client_set> clientsetmap;
|
||||
|
@ -35,7 +35,7 @@ protected:
|
|||
bool _init(void);
|
||||
|
||||
protected:
|
||||
TsIniFile m_ini;
|
||||
ExIniFile m_ini;
|
||||
};
|
||||
|
||||
class TsCfgSSH : public TsClientCfgBase
|
||||
|
|
|
@ -96,16 +96,16 @@ void http_rpc_main_loop(void)
|
|||
{
|
||||
if (!g_http_interface.init(TS_HTTP_RPC_HOST, TS_HTTP_RPC_PORT))
|
||||
{
|
||||
TSLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT);
|
||||
EXLOGE("[ERROR] can not start HTTP-RPC listener, maybe port %d is already in use.\n", TS_HTTP_RPC_PORT);
|
||||
return;
|
||||
}
|
||||
|
||||
TSLOGV("======================================================\n");
|
||||
TSLOGV("[rpc] TeleportAssist-HTTP-RPC ready on %s:%d\n", TS_HTTP_RPC_HOST, TS_HTTP_RPC_PORT);
|
||||
EXLOGV("======================================================\n");
|
||||
EXLOGV("[rpc] TeleportAssist-HTTP-RPC ready on %s:%d\n", TS_HTTP_RPC_HOST, TS_HTTP_RPC_PORT);
|
||||
|
||||
g_http_interface.run();
|
||||
|
||||
TSLOGV("[prc] main loop end.\n");
|
||||
EXLOGV("[prc] main loop end.\n");
|
||||
}
|
||||
|
||||
#define HEXTOI(x) (isdigit(x) ? x - '0' : x - 'W')
|
||||
|
@ -185,7 +185,7 @@ bool TsHttpRpc::init(const char* ip, int port)
|
|||
nc = mg_bind(&m_mg_mgr, addr, _mg_event_handler);
|
||||
if (nc == NULL)
|
||||
{
|
||||
TSLOGE("[rpc] TsHttpRpc::init %s:%d\n", ip, port);
|
||||
EXLOGE("[rpc] TsHttpRpc::init %s:%d\n", ip, port);
|
||||
return false;
|
||||
}
|
||||
nc->user_data = this;
|
||||
|
@ -225,7 +225,7 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat
|
|||
TsHttpRpc* _this = (TsHttpRpc*)nc->user_data;
|
||||
if (NULL == _this)
|
||||
{
|
||||
TSLOGE("[ERROR] invalid http request.\n");
|
||||
EXLOGE("[ERROR] invalid http request.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -249,7 +249,7 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat
|
|||
else
|
||||
dbg_method = "UNSUPPORTED-HTTP-METHOD";
|
||||
|
||||
TSLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str());
|
||||
EXLOGV("[rpc] got %s request: %s\n", dbg_method, uri.c_str());
|
||||
#endif
|
||||
ex_astr ret_buf;
|
||||
bool b_is_index = false;
|
||||
|
@ -328,7 +328,7 @@ void TsHttpRpc::_mg_event_handler(struct mg_connection *nc, int ev, void *ev_dat
|
|||
unsigned int rv = _this->_parse_request(hm, method, json_param);
|
||||
if (0 != rv)
|
||||
{
|
||||
TSLOGE("[ERROR] http-rpc got invalid request.\n");
|
||||
EXLOGE("[ERROR] http-rpc got invalid request.\n");
|
||||
_this->_create_json_ret(ret_buf, rv);
|
||||
}
|
||||
else
|
||||
|
@ -432,7 +432,7 @@ unsigned int TsHttpRpc::_parse_request(struct http_message* req, ex_astr& func_c
|
|||
func_args = &sztmp[0];
|
||||
}
|
||||
|
||||
TSLOGV("[rpc] method=%s, json_param=%s\n", func_cmd.c_str(), func_args.c_str());
|
||||
EXLOGV("[rpc] method=%s, json_param=%s\n", func_cmd.c_str(), func_args.c_str());
|
||||
|
||||
return TSR_OK;
|
||||
}
|
||||
|
@ -469,7 +469,7 @@ void TsHttpRpc::_process_js_request(const ex_astr& func_cmd, const ex_astr& func
|
|||
}
|
||||
else
|
||||
{
|
||||
TSLOGE("[rpc] got unknown command: %s\n", func_cmd.c_str());
|
||||
EXLOGE("[rpc] got unknown command: %s\n", func_cmd.c_str());
|
||||
_create_json_ret(buf, TSR_NO_SUCH_METHOD);
|
||||
}
|
||||
}
|
||||
|
@ -851,7 +851,7 @@ void TsHttpRpc::_rpc_func_create_ts_client(const ex_astr& func_args, ex_astr& bu
|
|||
|
||||
if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi))
|
||||
{
|
||||
TSLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str());
|
||||
EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str());
|
||||
root_ret["code"] = TSR_CREATE_PROCESS_ERROR;
|
||||
_create_json_ret(buf, root_ret);
|
||||
return;
|
||||
|
@ -1109,7 +1109,7 @@ void TsHttpRpc::_rpc_func_ts_rdp_play(const ex_astr& func_args, ex_astr& buf)
|
|||
ZeroMemory(&pi, sizeof(pi));
|
||||
if (!CreateProcess(NULL, (wchar_t *)w_exe_path.c_str(), NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi))
|
||||
{
|
||||
TSLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str());
|
||||
EXLOGE(_T("CreateProcess() failed. Error=0x%08X.\n %s\n"), GetLastError(), w_exe_path.c_str());
|
||||
root_ret["code"] = TSR_CREATE_PROCESS_ERROR;
|
||||
_create_json_ret(buf, root_ret);
|
||||
return;
|
||||
|
@ -1165,7 +1165,7 @@ void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf)
|
|||
ex_wstr2astr(it->second.desc, temp, EX_CODEPAGE_UTF8);
|
||||
config["desc"] = temp;
|
||||
|
||||
config["build_in"] = it->second.default;
|
||||
config["build_in"] = it->second.is_default ? 1 : 0;
|
||||
if (it->first == g_cfgSSH.m_current_client)
|
||||
{
|
||||
config["current"] = 1;
|
||||
|
@ -1211,7 +1211,7 @@ void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf)
|
|||
ex_wstr2astr(it->second.alias_name, temp, EX_CODEPAGE_UTF8);
|
||||
config["alias_name"] = temp;
|
||||
|
||||
config["build_in"] = it->second.default;
|
||||
config["build_in"] = it->second.is_default ? 1 : 0;
|
||||
|
||||
if (it->first == g_cfgScp.m_current_client)
|
||||
config["current"] = 1;
|
||||
|
@ -1252,7 +1252,7 @@ void TsHttpRpc::_rpc_func_get_config(const ex_astr& func_args, ex_astr& buf)
|
|||
ex_wstr2astr(it->second.alias_name, temp, EX_CODEPAGE_UTF8);
|
||||
config["alias_name"] = temp;
|
||||
|
||||
config["build_in"] = it->second.default;
|
||||
config["build_in"] = it->second.is_default ? 1 : 0;
|
||||
|
||||
if (it->first == g_cfgTelnet.m_current_client)
|
||||
config["current"] = 1;
|
||||
|
@ -1318,7 +1318,7 @@ void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf)
|
|||
_create_json_ret(buf, TSR_INVALID_JSON_PARAM);
|
||||
return;
|
||||
}
|
||||
if (it->second.default == 1)
|
||||
if (it->second.is_default)
|
||||
{
|
||||
g_cfgSSH.set(_T("common"), _T("current_client"), w_name);
|
||||
g_cfgSSH.save();
|
||||
|
@ -1343,7 +1343,7 @@ void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf)
|
|||
_create_json_ret(buf, TSR_INVALID_JSON_PARAM);
|
||||
return;
|
||||
}
|
||||
if (it->second.default == 1)
|
||||
if (it->second.is_default)
|
||||
{
|
||||
g_cfgScp.set(_T("common"), _T("current_client"), w_name);
|
||||
g_cfgScp.save();
|
||||
|
@ -1367,7 +1367,7 @@ void TsHttpRpc::_rpc_func_set_config(const ex_astr& func_args, ex_astr& buf)
|
|||
_create_json_ret(buf, TSR_INVALID_JSON_PARAM);
|
||||
return;
|
||||
}
|
||||
if (it->second.default == 1)
|
||||
if (it->second.is_default)
|
||||
{
|
||||
g_cfgTelnet.set(_T("common"), _T("current_client"), w_name);
|
||||
g_cfgTelnet.save();
|
||||
|
|
|
@ -1,626 +0,0 @@
|
|||
#include "stdafx.h"
|
||||
#include "ts_log.h"
|
||||
#include "ts_thread.h"
|
||||
|
||||
#include <vector>
|
||||
#include <deque>
|
||||
#include <algorithm>
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
#include <io.h>
|
||||
#include <stdio.h>
|
||||
#include <direct.h>
|
||||
#else
|
||||
#include <dirent.h>
|
||||
#include <sys/time.h>
|
||||
#endif
|
||||
|
||||
#define LOG_PATH_MAX_LEN 1024
|
||||
#define LOG_CONTENT_MAX_LEN 2048
|
||||
|
||||
#define LOG_FILE_MAX_SIZE 1024*1024*10
|
||||
#define LOG_FILE_MAX_COUNT 10
|
||||
typedef enum TS_COLORS
|
||||
{
|
||||
TS_COLOR_BLACK = 0,
|
||||
TS_COLOR_BLUE = 1,
|
||||
TS_COLOR_GREEN = 2,
|
||||
TS_COLOR_CYAN = 3,
|
||||
TS_COLOR_RED = 4,
|
||||
TS_COLOR_MAGENTA = 5,
|
||||
TS_COLOR_YELLOW = 6,
|
||||
TS_COLOR_LIGHT_GRAY = 7,
|
||||
TS_COLOR_GRAY = 8,
|
||||
TS_COLOR_LIGHT_BLUE = 9,
|
||||
TS_COLOR_LIGHT_GREEN = 10,
|
||||
TS_COLOR_LIGHT_CYAN = 11,
|
||||
TS_COLOR_LIGHT_RED = 12,
|
||||
TS_COLOR_LIGHT_MAGENTA = 13,
|
||||
TS_COLOR_LIGHT_YELLOW = 14,
|
||||
TS_COLOR_WHITE = 15,
|
||||
|
||||
TS_COLOR_NORMAL = 0xFF,
|
||||
}TS_COLORS;
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
static HANDLE g_hConsole = NULL;
|
||||
#endif
|
||||
|
||||
int g_log_min_level = TS_LOG_LEVEL_INFO;
|
||||
ex_wstr g_log_path;
|
||||
ex_wstr g_log_name;
|
||||
TsThreadLock g_log_lock;
|
||||
|
||||
class TSLogFile
|
||||
{
|
||||
public:
|
||||
TSLogFile() {
|
||||
m_hFile = NULL;
|
||||
m_nMaxFileLength = LOG_FILE_MAX_SIZE;
|
||||
m_nMaxFileCount = LOG_FILE_MAX_COUNT;
|
||||
}
|
||||
~TSLogFile() {
|
||||
}
|
||||
bool WriteData(int level, char* buf, int len);
|
||||
bool Init(const ex_astr& log_path, const ex_astr& log_name)
|
||||
{
|
||||
m_Log_Path = log_path;
|
||||
#ifdef EX_OS_WIN32
|
||||
m_Log_Path += "\\";
|
||||
#else
|
||||
m_Log_Path += "//";
|
||||
#endif
|
||||
m_Log_Path += log_name;
|
||||
|
||||
m_log_name = log_name;
|
||||
|
||||
m_log_file_dir = log_path;
|
||||
|
||||
load_file_list();
|
||||
return true;
|
||||
}
|
||||
protected:
|
||||
bool open_file();
|
||||
bool backup_file();
|
||||
bool load_file_list();
|
||||
|
||||
protected:
|
||||
typedef std::deque<unsigned long long> log_file_deque;
|
||||
FILE* m_hFile;
|
||||
|
||||
unsigned int m_nMaxFileLength;
|
||||
unsigned int m_nMaxFileCount;
|
||||
std::string m_Log_Path;
|
||||
std::string m_log_name;
|
||||
std::string m_log_file_dir;
|
||||
log_file_deque m_log_file_list;
|
||||
private:
|
||||
|
||||
};
|
||||
TSLogFile g_log_file;
|
||||
|
||||
void TSLOG_INIT(int min_level, const wchar_t*log_file_name, const wchar_t* log_path)
|
||||
{
|
||||
g_log_min_level = min_level;
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
if (NULL == g_hConsole)
|
||||
g_hConsole = GetStdHandle(STD_OUTPUT_HANDLE);
|
||||
#endif
|
||||
|
||||
if (log_file_name)
|
||||
{
|
||||
g_log_name = log_file_name;
|
||||
}
|
||||
else
|
||||
{
|
||||
g_log_name = L"main.log";
|
||||
}
|
||||
|
||||
if (log_path)
|
||||
{
|
||||
g_log_path = log_path;
|
||||
}
|
||||
else
|
||||
{
|
||||
ex_exec_file(g_log_path);
|
||||
ex_dirname(g_log_path);
|
||||
ex_path_join(g_log_path, false, L"log");
|
||||
}
|
||||
|
||||
ex_mkdirs(g_log_path);
|
||||
|
||||
ex_astr _path, _file;
|
||||
ex_wstr2astr(g_log_path, _path);
|
||||
ex_wstr2astr(g_log_name, _file);
|
||||
|
||||
g_log_file.Init(_path, _file);
|
||||
}
|
||||
|
||||
static void _ts_printf_a(int level,TS_COLORS clrBackGround, const char* fmt, va_list valist)
|
||||
{
|
||||
if (NULL == fmt || 0 == strlen(fmt))
|
||||
return;
|
||||
if (g_log_min_level > level)
|
||||
return;
|
||||
TS_COLORS clrForeGround = TS_COLOR_NORMAL;
|
||||
switch (level)
|
||||
{
|
||||
case TS_LOG_LEVEL_DEBUG:
|
||||
{
|
||||
clrForeGround = TS_COLOR_GRAY;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_VERBOSE:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_GRAY;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_INFO:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_MAGENTA;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_WARN:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_RED;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_ERROR:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_RED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (TS_COLOR_NORMAL == clrForeGround)
|
||||
clrForeGround = TS_COLOR_LIGHT_GRAY;
|
||||
if (TS_COLOR_NORMAL == clrBackGround)
|
||||
clrBackGround = TS_COLOR_BLACK;
|
||||
|
||||
char szTmp[4096] = { 0 };
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
vsnprintf_s(szTmp, 4096, 4095, fmt, valist);
|
||||
if (NULL != g_hConsole)
|
||||
{
|
||||
SetConsoleTextAttribute(g_hConsole, (WORD)((clrBackGround << 4) | clrForeGround));
|
||||
printf_s("%s", szTmp);
|
||||
fflush(stdout);
|
||||
SetConsoleTextAttribute(g_hConsole, TS_COLOR_GRAY);
|
||||
}
|
||||
else {
|
||||
OutputDebugStringA(szTmp);
|
||||
}
|
||||
#else
|
||||
vsnprintf(szTmp, 4095, fmt, valist);
|
||||
printf("%s", szTmp);
|
||||
fflush(stdout);
|
||||
#endif
|
||||
g_log_file.WriteData(level, szTmp, strlen(szTmp));
|
||||
}
|
||||
|
||||
static void _ts_printf_w(int level, TS_COLORS clrBackGround, const wchar_t* fmt, va_list valist)
|
||||
{
|
||||
if (NULL == fmt || 0 == wcslen(fmt))
|
||||
return;
|
||||
if (g_log_min_level > level)
|
||||
return;
|
||||
|
||||
TS_COLORS clrForeGround = TS_COLOR_NORMAL;
|
||||
switch (level)
|
||||
{
|
||||
case TS_LOG_LEVEL_DEBUG:
|
||||
{
|
||||
clrForeGround = TS_COLOR_GRAY;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_VERBOSE:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_GRAY;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_INFO:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_MAGENTA;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_WARN:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_RED;
|
||||
}
|
||||
break;
|
||||
case TS_LOG_LEVEL_ERROR:
|
||||
{
|
||||
clrForeGround = TS_COLOR_LIGHT_RED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (TS_COLOR_NORMAL == clrForeGround)
|
||||
clrForeGround = TS_COLOR_LIGHT_GRAY;
|
||||
if (TS_COLOR_NORMAL == clrBackGround)
|
||||
clrBackGround = TS_COLOR_BLACK;
|
||||
|
||||
wchar_t szTmp[4096] = { 0 };
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
_vsnwprintf_s(szTmp, 4096, 4095, fmt, valist);
|
||||
if (NULL != g_hConsole)
|
||||
{
|
||||
SetConsoleTextAttribute(g_hConsole, (WORD)((clrBackGround << 4) | clrForeGround));
|
||||
wprintf_s(_T("%s"), szTmp);
|
||||
fflush(stdout);
|
||||
SetConsoleTextAttribute(g_hConsole, TS_COLOR_GRAY);
|
||||
}
|
||||
else {
|
||||
OutputDebugStringW(szTmp);
|
||||
}
|
||||
#else
|
||||
vswprintf(szTmp, 4095, fmt, valist);
|
||||
wprintf(L"%s", szTmp);
|
||||
fflush(stdout);
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
#define TS_PRINTF_X(fn, level) \
|
||||
void fn(const char* fmt, ...) \
|
||||
{ \
|
||||
TsThreadSmartLock locker(g_log_lock); \
|
||||
va_list valist; \
|
||||
va_start(valist, fmt); \
|
||||
_ts_printf_a(level, TS_COLOR_BLACK, fmt, valist); \
|
||||
va_end(valist); \
|
||||
} \
|
||||
void fn(const wchar_t* fmt, ...) \
|
||||
{ \
|
||||
TsThreadSmartLock locker(g_log_lock); \
|
||||
va_list valist; \
|
||||
va_start(valist, fmt); \
|
||||
_ts_printf_w(level, TS_COLOR_BLACK, fmt, valist); \
|
||||
va_end(valist); \
|
||||
}
|
||||
|
||||
TS_PRINTF_X(ts_printf_d, TS_LOG_LEVEL_DEBUG)
|
||||
TS_PRINTF_X(ts_printf_v, TS_LOG_LEVEL_VERBOSE)
|
||||
TS_PRINTF_X(ts_printf_i, TS_LOG_LEVEL_INFO)
|
||||
TS_PRINTF_X(ts_printf_w, TS_LOG_LEVEL_WARN)
|
||||
TS_PRINTF_X(ts_printf_e, TS_LOG_LEVEL_ERROR)
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
void ts_printf_e_lasterror(const char* fmt, ...)
|
||||
{
|
||||
TsThreadSmartLock locker(g_log_lock);
|
||||
|
||||
va_list valist;
|
||||
va_start(valist, fmt);
|
||||
_ts_printf_a(TS_COLOR_LIGHT_RED, TS_COLOR_BLACK, fmt, valist);
|
||||
va_end(valist);
|
||||
|
||||
//=========================================
|
||||
|
||||
LPVOID lpMsgBuf;
|
||||
DWORD dw = GetLastError();
|
||||
|
||||
FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||
NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
|
||||
(LPSTR)&lpMsgBuf, 0, NULL);
|
||||
|
||||
ts_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf);
|
||||
LocalFree(lpMsgBuf);
|
||||
}
|
||||
#endif
|
||||
|
||||
void ts_printf_bin(ex_u8* bin_data, size_t bin_size, const char* fmt, ...)
|
||||
{
|
||||
TsThreadSmartLock locker(g_log_lock);
|
||||
|
||||
va_list valist;
|
||||
va_start(valist, fmt);
|
||||
_ts_printf_a(TS_COLOR_GRAY, TS_COLOR_BLACK, fmt, valist);
|
||||
va_end(valist);
|
||||
|
||||
ts_printf_d(" (%d/0x%02x Bytes)\n", bin_size, bin_size);
|
||||
|
||||
const ex_u8* line = bin_data;
|
||||
size_t thisline = 0;
|
||||
size_t offset = 0;
|
||||
unsigned int i = 0;
|
||||
|
||||
char szTmp[128] = { 0 };
|
||||
int _offset = 0;
|
||||
|
||||
while (offset < bin_size)
|
||||
{
|
||||
memset(szTmp, 0, 128);
|
||||
_offset = 0;
|
||||
|
||||
snprintf(szTmp + _offset, 128 - _offset, "%06x ", (int)offset);
|
||||
_offset += 8;
|
||||
|
||||
thisline = bin_size - offset;
|
||||
if (thisline > 16)
|
||||
thisline = 16;
|
||||
|
||||
for (i = 0; i < thisline; i++)
|
||||
{
|
||||
snprintf(szTmp + _offset, 128 - _offset, "%02x ", line[i]);
|
||||
_offset += 3;
|
||||
}
|
||||
|
||||
snprintf(szTmp + _offset, 128 - _offset, " ");
|
||||
_offset += 2;
|
||||
|
||||
for (; i < 16; i++)
|
||||
{
|
||||
snprintf(szTmp + _offset, 128 - _offset, " ");
|
||||
_offset += 3;
|
||||
}
|
||||
|
||||
for (i = 0; i < thisline; i++)
|
||||
{
|
||||
snprintf(szTmp + _offset, 128 - _offset, "%c", (line[i] >= 0x20 && line[i] < 0x7f) ? line[i] : '.');
|
||||
_offset += 1;
|
||||
}
|
||||
|
||||
snprintf(szTmp + _offset, 128 - _offset, "\n");
|
||||
_offset += 1;
|
||||
|
||||
ts_printf_d("%s", szTmp);
|
||||
|
||||
offset += thisline;
|
||||
line += thisline;
|
||||
}
|
||||
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
bool TSLogFile::open_file()
|
||||
{
|
||||
if (m_hFile)
|
||||
{
|
||||
fclose(m_hFile);
|
||||
m_hFile = 0;
|
||||
}
|
||||
|
||||
// 注意:这里必须使用 _fsopen 来指定共享读方式打开日志文件,否则进程推出前无法查看日志文件内容。
|
||||
m_hFile = _fsopen(m_Log_Path.c_str(), "a", _SH_DENYWR);
|
||||
if (NULL == m_hFile)
|
||||
return false;
|
||||
|
||||
fseek(m_hFile, 0, SEEK_END);
|
||||
unsigned long file_size = ftell(m_hFile);
|
||||
if (file_size > (unsigned long)m_nMaxFileLength)
|
||||
{
|
||||
//备份文件
|
||||
if (backup_file())
|
||||
{
|
||||
//打开文件
|
||||
return open_file();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool TSLogFile::backup_file()
|
||||
{
|
||||
char szNewFileLogName[LOG_PATH_MAX_LEN] = {0};
|
||||
char szBaseNewFileLogName[LOG_PATH_MAX_LEN] = { 0 };
|
||||
#ifdef EX_OS_WIN32
|
||||
SYSTEMTIME st;
|
||||
GetLocalTime(&st);
|
||||
sprintf_s(szNewFileLogName, LOG_PATH_MAX_LEN, "%s\\%04d%02d%02d%02d%02d%02d.log",
|
||||
m_log_file_dir.c_str(),st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
|
||||
sprintf_s(szBaseNewFileLogName, LOG_PATH_MAX_LEN, "%04d%02d%02d%02d%02d%02d",
|
||||
st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
#else
|
||||
time_t timep;
|
||||
struct tm *p;
|
||||
time(&timep);
|
||||
p = localtime(&timep); //get server's time
|
||||
if (p == NULL)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
sprintf(szNewFileLogName, "%s//%04d%02d%02d%02d%02d%02d.log",
|
||||
m_log_file_dir.c_str(),p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
sprintf(szBaseNewFileLogName, "%04d%02d%02d%02d%02d%02d",
|
||||
p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
#endif
|
||||
if (m_hFile)
|
||||
{
|
||||
fclose(m_hFile);
|
||||
m_hFile = 0;
|
||||
}
|
||||
#ifdef EX_OS_WIN32
|
||||
if (!MoveFileA(m_Log_Path.c_str(), szNewFileLogName))
|
||||
{
|
||||
DWORD dwError = GetLastError();
|
||||
|
||||
DeleteFileA(szNewFileLogName);
|
||||
|
||||
MoveFileA(m_Log_Path.c_str(), szNewFileLogName);
|
||||
}
|
||||
#else
|
||||
if (rename(m_Log_Path.c_str(), szNewFileLogName) != 0)
|
||||
{
|
||||
remove(szNewFileLogName);
|
||||
|
||||
rename(m_Log_Path.c_str(), szNewFileLogName);
|
||||
}
|
||||
#endif
|
||||
unsigned long long value = atoll(szBaseNewFileLogName);
|
||||
if (value !=0 )
|
||||
{
|
||||
m_log_file_list.push_back(value);
|
||||
}
|
||||
int try_count = 0;
|
||||
while ((m_log_file_list.size() > m_nMaxFileCount))
|
||||
{
|
||||
unsigned long long value = m_log_file_list.front();
|
||||
char szDeleteFile[256] = { 0 };
|
||||
#ifdef EX_OS_WIN32
|
||||
sprintf_s(szDeleteFile, 256, "%s\\%llu.log", m_log_file_dir.c_str(), value);
|
||||
if (DeleteFileA(szDeleteFile))
|
||||
{
|
||||
m_log_file_list.pop_front();
|
||||
}
|
||||
#else
|
||||
sprintf(szDeleteFile, "%s//%llu.log", m_log_file_dir.c_str(), value);
|
||||
if (remove(szDeleteFile) == 0)
|
||||
{
|
||||
m_log_file_list.pop_front();
|
||||
}
|
||||
#endif
|
||||
else
|
||||
{
|
||||
if (try_count > 5)
|
||||
{
|
||||
break;
|
||||
}
|
||||
try_count++;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool TSLogFile::WriteData(int level, char* buf, int len)
|
||||
{
|
||||
if (len > LOG_CONTENT_MAX_LEN)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: 这里每次写日志时都会导致判断文件大小来决定是否新开一个日志文件,效率低下。应该改为缓存文件大小,每次写入完毕后更新大小值,超过阀值则新开日志文件。
|
||||
if (!open_file())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
unsigned long _tid = GetCurrentThreadId();
|
||||
#else
|
||||
unsigned long _tid = pthread_self();
|
||||
#endif
|
||||
#ifdef EX_OS_WIN32
|
||||
unsigned long now = GetTickCount();
|
||||
#else
|
||||
// unsigned long now = 0;
|
||||
struct timeval tv;
|
||||
if (gettimeofday(&tv, NULL /* tz */) != 0) return false;
|
||||
unsigned long now = (double)tv.tv_sec + (((double)tv.tv_usec) / 1000.0);
|
||||
#endif
|
||||
|
||||
char szLog[LOG_CONTENT_MAX_LEN + 100] = {0};
|
||||
#ifdef EX_OS_WIN32
|
||||
SYSTEMTIME st;
|
||||
GetLocalTime(&st);
|
||||
sprintf_s(szLog, LOG_CONTENT_MAX_LEN + 100, "[%04d-%02d-%02d %02d:%02d:%02d] %s",
|
||||
st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond, buf);
|
||||
#else
|
||||
time_t timep;
|
||||
struct tm *p;
|
||||
time(&timep);
|
||||
p = localtime(&timep); //get server's time
|
||||
if (p == NULL)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
sprintf(szLog, "[%04d-%02d-%02d %02d:%02d:%02d] %s",
|
||||
p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec, buf);
|
||||
#endif
|
||||
// TODO: 在这里统计文件大小
|
||||
fwrite(szLog, strlen(szLog), 1, m_hFile);
|
||||
fflush(m_hFile);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool TSLogFile::load_file_list()
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
struct _finddata_t data;
|
||||
std::string log_match = m_log_file_dir;
|
||||
log_match += "\\";
|
||||
log_match += "*.log";
|
||||
long hnd = _findfirst(log_match.c_str(), &data);
|
||||
if (hnd < 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
int nRet = (hnd <0) ? -1 : 1;
|
||||
while (nRet > 0)
|
||||
{
|
||||
if (data.attrib == _A_SUBDIR)
|
||||
printf(" [%s]*\n", data.name);
|
||||
else {
|
||||
|
||||
if (m_log_name.compare(data.name) == 0)
|
||||
{
|
||||
}
|
||||
else {
|
||||
char* match = strrchr(data.name, '.');
|
||||
if (match != NULL)
|
||||
{
|
||||
*match = '\0';
|
||||
}
|
||||
unsigned long long value = atoll(data.name);
|
||||
if (value == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
m_log_file_list.push_back(value);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
nRet = _findnext(hnd, &data);
|
||||
}
|
||||
_findclose(hnd);
|
||||
#else
|
||||
DIR *dir;
|
||||
|
||||
struct dirent *ptr;
|
||||
|
||||
dir = opendir(m_log_file_dir.c_str());
|
||||
|
||||
while ((ptr = readdir(dir)) != NULL)
|
||||
{
|
||||
if(ptr->d_type == 8)
|
||||
{
|
||||
char temp_file_name[PATH_MAX] = {0};
|
||||
strcpy(temp_file_name,ptr->d_name);
|
||||
if (m_log_name.compare(temp_file_name) == 0)
|
||||
{
|
||||
|
||||
}else{
|
||||
char* match = strrchr(temp_file_name, '.');
|
||||
if (match != NULL)
|
||||
{
|
||||
*match = '\0';
|
||||
}
|
||||
unsigned long long value = atoll(temp_file_name);
|
||||
if (value == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
m_log_file_list.push_back(value);
|
||||
}
|
||||
}
|
||||
// printf("d_name: %s d_type: %d\n", ptr->d_name, ptr->d_type);
|
||||
}
|
||||
|
||||
|
||||
closedir(dir);
|
||||
#endif // EX_OS_WIN32
|
||||
|
||||
std::sort(m_log_file_list.begin(), m_log_file_list.end(), std::less<unsigned long long>());
|
||||
return true;
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
#ifndef __TS_LOG_H__
|
||||
#define __TS_LOG_H__
|
||||
|
||||
#include <ex.h>
|
||||
|
||||
#define TS_LOG_LEVEL_DEBUG 0
|
||||
#define TS_LOG_LEVEL_VERBOSE 1
|
||||
#define TS_LOG_LEVEL_INFO 2
|
||||
#define TS_LOG_LEVEL_WARN 3
|
||||
#define TS_LOG_LEVEL_ERROR 4
|
||||
|
||||
void TSLOG_INIT(int min_level, const wchar_t* log_file_name, const wchar_t* log_path = NULL);
|
||||
|
||||
#define TSLOGV ts_printf_v
|
||||
#define TSLOGI ts_printf_i
|
||||
#define TSLOGW ts_printf_w
|
||||
#define TSLOGE ts_printf_e
|
||||
|
||||
#ifdef TS_DEBUG
|
||||
# define TSLOGD ts_printf_d
|
||||
# define TSLOG_BIN ts_printf_bin
|
||||
#else
|
||||
# define TSLOGD
|
||||
# define TSLOG_BIN
|
||||
#endif
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
#define TSLOGE_WIN ts_printf_e_lasterror
|
||||
void ts_printf_e_lasterror(const char* fmt, ...);
|
||||
void ts_printf_e_lasterror(const wchar_t* fmt, ...);
|
||||
#endif
|
||||
|
||||
|
||||
void ts_printf_d(const char* fmt, ...);
|
||||
void ts_printf_v(const char* fmt, ...);
|
||||
void ts_printf_i(const char* fmt, ...);
|
||||
void ts_printf_w(const char* fmt, ...);
|
||||
void ts_printf_e(const char* fmt, ...);
|
||||
|
||||
void ts_printf_d(const wchar_t* fmt, ...);
|
||||
void ts_printf_v(const wchar_t* fmt, ...);
|
||||
void ts_printf_i(const wchar_t* fmt, ...);
|
||||
void ts_printf_w(const wchar_t* fmt, ...);
|
||||
void ts_printf_e(const wchar_t* fmt, ...);
|
||||
|
||||
void ts_printf_bin(ex_u8* bin_data, size_t bin_size, const char* fmt, ...);
|
||||
void ts_printf_bin(ex_u8* bin_data, size_t bin_size, const wchar_t* fmt, ...);
|
||||
|
||||
|
||||
#endif // __TS_LOG_H__
|
|
@ -11,6 +11,12 @@
|
|||
#include "ex/ex_util.h"
|
||||
#include "ex/ex_str.h"
|
||||
#include "ex/ex_path.h"
|
||||
#include "ex/ex_thread.h"
|
||||
#include "ex/ex_log.h"
|
||||
#include "ex/ex_ini.h"
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
# include "ex/ex_winsrv.h"
|
||||
#endif
|
||||
|
||||
#endif // __LIB_EX_H__
|
||||
|
|
|
@ -37,4 +37,21 @@
|
|||
#define EX_CURRENT_DIR_STR L"."
|
||||
#define EX_NULL_END L'\0'
|
||||
|
||||
|
||||
|
||||
//====================================================
|
||||
// error code.
|
||||
//====================================================
|
||||
#define EXRV_OK 0
|
||||
#define EXRV_SYS_ERR 1 // 系统错误,可以使用GetLastError或者errno来获取具体错误值
|
||||
#define EXRV_FAILED 2 // 操作失败
|
||||
|
||||
//#define EXRV_CANNOT_FOUND 9
|
||||
#define EXRV_CANNOT_CREATE 10
|
||||
#define EXRV_CANNOT_OPEN 11
|
||||
#define EXRV_CANNOT_SET 12
|
||||
#define EXRV_CANNOT_REMOVE 13
|
||||
#define EXRV_NOT_START 14
|
||||
#define EXRV_NOT_EXISTS 14
|
||||
|
||||
#endif // __LIB_EX_CONST_H__
|
||||
|
|
|
@ -1,16 +1,27 @@
|
|||
#ifndef __TS_INI_H__
|
||||
#define __TS_INI_H__
|
||||
#ifndef __EX_INI_H__
|
||||
#define __EX_INI_H__
|
||||
|
||||
#include <ex.h>
|
||||
/*
|
||||
特别注意:
|
||||
|
||||
typedef std::map<ex_wstr, ex_wstr> ts_ini_kvs;
|
||||
1. 以 分号';' 或者 井号'#' 作为注释行的第一个字符
|
||||
2. 不支持行内注释
|
||||
3. 值对以第一个等号分隔,等号前后如果有空格会被忽略,之后的空格会保留,包括行尾空格
|
||||
4. 如果有不属于某个小节的值对,可以使用GetDumySection()获取
|
||||
DumySection主要是为了能够兼容简单的Python文件做配置文件。
|
||||
*/
|
||||
|
||||
class TsIniSection
|
||||
#include "ex_str.h"
|
||||
#include <map>
|
||||
|
||||
typedef std::map<ex_wstr, ex_wstr> ex_ini_kvs;
|
||||
|
||||
class ExIniSection
|
||||
{
|
||||
public:
|
||||
TsIniSection();
|
||||
TsIniSection(const ex_wstr& strSectionName);
|
||||
~TsIniSection();
|
||||
ExIniSection();
|
||||
ExIniSection(const ex_wstr& strSectionName);
|
||||
~ExIniSection();
|
||||
|
||||
void ClearUp(void);
|
||||
|
||||
|
@ -27,14 +38,14 @@ public:
|
|||
|
||||
bool SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool bAddIfNotExists = false);
|
||||
|
||||
ts_ini_kvs& GetKeyValues(void) { return m_kvs; }
|
||||
ex_ini_kvs& GetKeyValues(void) { return m_kvs; }
|
||||
|
||||
int Count(void) const
|
||||
{
|
||||
return m_kvs.size();
|
||||
}
|
||||
void Save(FILE* file, int codepage);
|
||||
#ifdef _DEBUG
|
||||
#ifdef EX_DEBUG
|
||||
void Dump(void);
|
||||
#endif
|
||||
|
||||
|
@ -43,14 +54,14 @@ protected:
|
|||
|
||||
private:
|
||||
ex_wstr m_strName;
|
||||
ts_ini_kvs m_kvs;
|
||||
ex_ini_kvs m_kvs;
|
||||
};
|
||||
|
||||
|
||||
typedef std::map<ex_wstr, TsIniSection*> ts_ini_sections;
|
||||
typedef std::map<ex_wstr, ExIniSection*> ex_ini_sections;
|
||||
|
||||
// Ini file
|
||||
class TsIniFile
|
||||
class ExIniFile
|
||||
{
|
||||
public:
|
||||
enum PARSE_RV
|
||||
|
@ -63,8 +74,8 @@ public:
|
|||
};
|
||||
|
||||
public:
|
||||
TsIniFile();
|
||||
~TsIniFile();
|
||||
ExIniFile();
|
||||
~ExIniFile();
|
||||
|
||||
void ClearUp(void);
|
||||
|
||||
|
@ -72,24 +83,26 @@ public:
|
|||
bool LoadFromFile(const ex_wstr& strFileName, bool bClearOld = true);
|
||||
bool LoadFromMemory(const ex_wstr& strData, bool bClearOld = true);
|
||||
|
||||
TsIniSection* GetSection(const ex_wstr& strName, bool bCreateIfNotExists = false);
|
||||
ExIniSection* GetSection(const ex_wstr& strName, bool bCreateIfNotExists = false);
|
||||
ExIniSection* GetDumySection(void) { return &m_dumy_sec; }
|
||||
|
||||
int Count(void) const
|
||||
{
|
||||
return m_secs.size();
|
||||
}
|
||||
void Save(int codepage = EX_CODEPAGE_UTF8);
|
||||
#ifdef _DEBUG
|
||||
#ifdef EX_DEBUG
|
||||
void Dump(void);
|
||||
#endif
|
||||
|
||||
protected:
|
||||
static PARSE_RV _ParseLine(const ex_wstr& strLine, ex_wstr& strKey, ex_wstr& strValue);
|
||||
bool _ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection);
|
||||
bool _ProcessLine(const ex_wstr strLine, ExIniSection** pCurSection);
|
||||
|
||||
private:
|
||||
ts_ini_sections m_secs;
|
||||
ex_ini_sections m_secs;
|
||||
ExIniSection m_dumy_sec;
|
||||
ex_wstr m_file_path;
|
||||
};
|
||||
|
||||
#endif // __TS_INI_H__
|
||||
#endif // __EX_INI_H__
|
|
@ -0,0 +1,53 @@
|
|||
#ifndef __EX_LOG_H__
|
||||
#define __EX_LOG_H__
|
||||
|
||||
#include "ex_types.h"
|
||||
|
||||
#define EX_LOG_LEVEL_DEBUG 0
|
||||
#define EX_LOG_LEVEL_VERBOSE 1
|
||||
#define EX_LOG_LEVEL_INFO 2
|
||||
#define EX_LOG_LEVEL_WARN 3
|
||||
#define EX_LOG_LEVEL_ERROR 4
|
||||
|
||||
#define EX_LOG_FILE_MAX_SIZE 1024*1024*10
|
||||
#define EX_LOG_FILE_MAX_COUNT 10
|
||||
|
||||
|
||||
void EXLOG_LEVEL(int min_level);
|
||||
|
||||
// 设定日志文件名及路径,如未指定路径,则为可执行程序所在目录下的log目录。
|
||||
void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path = NULL, ex_u32 max_filesize = EX_LOG_FILE_MAX_SIZE, ex_u8 max_filecount = EX_LOG_FILE_MAX_COUNT);
|
||||
|
||||
void EXLOG_CONSOLE(bool output_to_console);
|
||||
|
||||
#define EXLOGV ex_printf_v
|
||||
#define EXLOGI ex_printf_i
|
||||
#define EXLOGW ex_printf_w
|
||||
#define EXLOGE ex_printf_e
|
||||
#define EXLOGD ex_printf_d
|
||||
#define EXLOG_BIN ex_printf_bin
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
#define EXLOGE_WIN ex_printf_e_lasterror
|
||||
void ex_printf_e_lasterror(const char* fmt, ...);
|
||||
void ex_printf_e_lasterror(const wchar_t* fmt, ...);
|
||||
#endif
|
||||
|
||||
|
||||
void ex_printf_d(const char* fmt, ...);
|
||||
void ex_printf_v(const char* fmt, ...);
|
||||
void ex_printf_i(const char* fmt, ...);
|
||||
void ex_printf_w(const char* fmt, ...);
|
||||
void ex_printf_e(const char* fmt, ...);
|
||||
|
||||
void ex_printf_d(const wchar_t* fmt, ...);
|
||||
void ex_printf_v(const wchar_t* fmt, ...);
|
||||
void ex_printf_i(const wchar_t* fmt, ...);
|
||||
void ex_printf_w(const wchar_t* fmt, ...);
|
||||
void ex_printf_e(const wchar_t* fmt, ...);
|
||||
|
||||
void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const char* fmt, ...);
|
||||
void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const wchar_t* fmt, ...);
|
||||
|
||||
|
||||
#endif // __EX_LOG_H__
|
|
@ -36,6 +36,10 @@ bool ex_dirname(ex_wstr& inout_filename);
|
|||
bool ex_path_join(ex_wstr& inout_path, bool auto_abspath, ...);
|
||||
bool ex_abspath_to(const ex_wstr& base_abs_path, const ex_wstr& relate_path, ex_wstr& out_path);
|
||||
bool ex_mkdirs(const ex_wstr& in_path);
|
||||
|
||||
// 获取文件名中的扩展名部分(不包括.,例如abc.py,返回 py)
|
||||
bool ex_path_ext_name(const ex_wstr& in_filename, ex_wstr& out_ext);
|
||||
|
||||
#endif
|
||||
|
||||
#endif // __LIB_EX_PATH_H__
|
||||
|
|
|
@ -3,20 +3,12 @@
|
|||
|
||||
#if defined(_WIN32) || defined(WIN32)
|
||||
# define EX_OS_WIN32
|
||||
// # define EX_OS_NAME L"windows"
|
||||
// # ifdef _CONSOLE
|
||||
// # define EX_CONSOLE
|
||||
// # endif
|
||||
#elif defined(__linux__)
|
||||
# define EX_OS_LINUX
|
||||
# define EX_OS_UNIX
|
||||
// # define EX_OS_NAME L"linux"
|
||||
// # define EX_CONSOLE
|
||||
#elif defined(__APPLE__)
|
||||
# define EX_OS_MACOS
|
||||
# define EX_OS_UNIX
|
||||
// # define EX_OS_NAME L"macos"
|
||||
// # define PYS_CONSOLE
|
||||
#else
|
||||
# error unsupported platform.
|
||||
#endif
|
||||
|
@ -110,8 +102,6 @@
|
|||
# endif
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
# pragma comment(lib, "shlwapi.lib")
|
||||
#endif
|
||||
|
|
|
@ -4,7 +4,12 @@
|
|||
#include "ex_types.h"
|
||||
|
||||
#define EX_CODEPAGE_ACP 0
|
||||
#define EX_CODEPAGE_UTF8 1
|
||||
#define EX_CODEPAGE_UTF8 1
|
||||
#ifdef EX_OS_WIN32
|
||||
# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_ACP
|
||||
#else
|
||||
# define EX_CODEPAGE_DEFAULT EX_CODEPAGE_UTF8
|
||||
#endif
|
||||
|
||||
#define EX_RSC_BEGIN 0x01
|
||||
#define EX_RSC_END 0x02
|
||||
|
@ -53,10 +58,10 @@ typedef std::wstring ex_wstr;
|
|||
typedef std::vector<ex_astr> ex_astrs;
|
||||
typedef std::vector<ex_wstr> ex_wstrs;
|
||||
|
||||
bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_ACP);
|
||||
bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_ACP);
|
||||
bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_ACP);
|
||||
bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_ACP);
|
||||
bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT);
|
||||
bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page = EX_CODEPAGE_DEFAULT);
|
||||
bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT);
|
||||
bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page = EX_CODEPAGE_DEFAULT);
|
||||
|
||||
bool ex_only_white_space(const ex_astr& str_check);
|
||||
bool ex_only_white_space(const ex_wstr& str_check);
|
||||
|
|
|
@ -1,24 +1,26 @@
|
|||
#ifndef __TS_THREAD_H__
|
||||
#define __TS_THREAD_H__
|
||||
#ifndef __EX_THREAD_H__
|
||||
#define __EX_THREAD_H__
|
||||
|
||||
//#include "ts_common.h"
|
||||
#include <ex/ex_str.h>
|
||||
|
||||
#include <ex.h>
|
||||
#include <list>
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
# include <process.h>
|
||||
typedef HANDLE TS_THREAD_HANDLE;
|
||||
typedef HANDLE EX_THREAD_HANDLE;
|
||||
#else
|
||||
# include <pthread.h>
|
||||
typedef pthread_t TS_THREAD_HANDLE;
|
||||
typedef pthread_t EX_THREAD_HANDLE;
|
||||
#endif
|
||||
|
||||
class TsThreadManager;
|
||||
class ExThreadManager;
|
||||
|
||||
class TsThreadBase
|
||||
class ExThreadBase
|
||||
{
|
||||
public:
|
||||
TsThreadBase(TsThreadManager* tm, const char* thread_name);
|
||||
virtual ~TsThreadBase();
|
||||
ExThreadBase(ExThreadManager* tm, const char* thread_name);
|
||||
virtual ~ExThreadBase();
|
||||
|
||||
bool is_running(void) { return m_is_running; }
|
||||
|
||||
|
@ -41,21 +43,24 @@ protected:
|
|||
static void* _thread_func(void * pParam);
|
||||
#endif
|
||||
|
||||
// Ï̹߳ÒÆðmsºÁÃë
|
||||
// void _sleep_ms(int ms);
|
||||
|
||||
protected:
|
||||
TsThreadManager* m_thread_manager;
|
||||
ExThreadManager* m_thread_manager;
|
||||
ex_astr m_thread_name;
|
||||
TS_THREAD_HANDLE m_handle;
|
||||
EX_THREAD_HANDLE m_handle;
|
||||
bool m_is_running;
|
||||
bool m_stop_by_request;
|
||||
};
|
||||
|
||||
|
||||
// 线程锁(进程内使用)
|
||||
class TsThreadLock
|
||||
class ExThreadLock
|
||||
{
|
||||
public:
|
||||
TsThreadLock();
|
||||
virtual ~TsThreadLock();
|
||||
ExThreadLock();
|
||||
virtual ~ExThreadLock();
|
||||
|
||||
void lock(void);
|
||||
void unlock(void);
|
||||
|
@ -69,47 +74,47 @@ private:
|
|||
};
|
||||
|
||||
// 线程锁辅助类
|
||||
class TsThreadSmartLock
|
||||
class ExThreadSmartLock
|
||||
{
|
||||
public:
|
||||
TsThreadSmartLock(TsThreadLock& lock) : m_lock(lock)
|
||||
ExThreadSmartLock(ExThreadLock& lock) : m_lock(lock)
|
||||
{
|
||||
m_lock.lock();
|
||||
}
|
||||
~TsThreadSmartLock()
|
||||
~ExThreadSmartLock()
|
||||
{
|
||||
m_lock.unlock();
|
||||
}
|
||||
|
||||
private:
|
||||
TsThreadLock& m_lock;
|
||||
ExThreadLock& m_lock;
|
||||
};
|
||||
|
||||
typedef std::list<TsThreadBase*> ts_threads;
|
||||
typedef std::list<ExThreadBase*> ex_threads;
|
||||
|
||||
class TsThreadManager
|
||||
class ExThreadManager
|
||||
{
|
||||
friend class TsThreadBase;
|
||||
friend class ExThreadBase;
|
||||
|
||||
public:
|
||||
TsThreadManager();
|
||||
virtual ~TsThreadManager();
|
||||
ExThreadManager();
|
||||
virtual ~ExThreadManager();
|
||||
|
||||
void stop_all(void);
|
||||
|
||||
private:
|
||||
void _add_thread(TsThreadBase* tb);
|
||||
void _remove_thread(TsThreadBase* tb);
|
||||
void _add_thread(ExThreadBase* tb);
|
||||
void _remove_thread(ExThreadBase* tb);
|
||||
|
||||
private:
|
||||
TsThreadLock m_lock;
|
||||
ts_threads m_threads;
|
||||
ExThreadLock m_lock;
|
||||
ex_threads m_threads;
|
||||
};
|
||||
|
||||
|
||||
// 原子操作
|
||||
int ts_atomic_add(volatile int* pt, int t);
|
||||
int ts_atomic_inc(volatile int* pt);
|
||||
int ts_atomic_dec(volatile int* pt);
|
||||
int ex_atomic_add(volatile int* pt, int t);
|
||||
int ex_atomic_inc(volatile int* pt);
|
||||
int ex_atomic_dec(volatile int* pt);
|
||||
|
||||
#endif // __TS_THREAD_H__
|
||||
#endif // __EX_THREAD_H__
|
|
@ -31,4 +31,6 @@ typedef int EX_BOOL;
|
|||
typedef std::vector<ex_u8> ex_bin;
|
||||
typedef std::vector<char> ex_chars;
|
||||
|
||||
typedef ex_u32 ex_rv;
|
||||
|
||||
#endif // __LIB_EX_TYPE_H__
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
#ifndef __EX_WINSRV_H__
|
||||
#define __EX_WINSRV_H__
|
||||
|
||||
#include "ex_str.h"
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
|
||||
ex_rv ex_winsrv_install(const ex_wstr& srv_name, const ex_wstr& disp_name, const ex_wstr& exec_path);
|
||||
ex_rv ex_winsrv_uninstall(const ex_wstr& srv_name);
|
||||
bool ex_winsrv_is_exists(const ex_wstr& srv_name);
|
||||
ex_rv ex_winsrv_start(const ex_wstr& srv_name);
|
||||
ex_rv ex_winsrv_stop(const ex_wstr& srv_name);
|
||||
ex_rv ex_winsrv_status(const ex_wstr& srv_name, ex_ulong& status);
|
||||
ex_rv ex_winsrv_pause(const ex_wstr& srv_name);
|
||||
ex_rv ex_winsrv_resume(const ex_wstr& srv_name);
|
||||
ex_rv ex_winsrv_config(const ex_wstr& srv_name, QUERY_SERVICE_CONFIG& cfg);
|
||||
ex_rv ex_winsrv_pid(const ex_wstr& srv_name, ex_ulong& pid);
|
||||
|
||||
#endif
|
||||
|
||||
#endif // __EX_WINSRV_H__
|
|
@ -1,40 +1,40 @@
|
|||
#include "stdafx.h"
|
||||
#include "ts_ini.h"
|
||||
#include <ex/ex_ini.h>
|
||||
#include <ex/ex_log.h>
|
||||
|
||||
TsIniSection::TsIniSection(const ex_wstr& strSectionName)
|
||||
ExIniSection::ExIniSection(const ex_wstr& strSectionName)
|
||||
{
|
||||
m_kvs.clear();
|
||||
m_strName = strSectionName;
|
||||
}
|
||||
|
||||
TsIniSection::TsIniSection()
|
||||
ExIniSection::ExIniSection()
|
||||
{
|
||||
m_kvs.clear();
|
||||
m_strName = _T("N/A");
|
||||
}
|
||||
|
||||
TsIniSection::~TsIniSection()
|
||||
ExIniSection::~ExIniSection()
|
||||
{
|
||||
m_kvs.clear();
|
||||
}
|
||||
|
||||
bool TsIniSection::_IsKeyExists(const ex_wstr& strKey)
|
||||
bool ExIniSection::_IsKeyExists(const ex_wstr& strKey)
|
||||
{
|
||||
return (m_kvs.end() != m_kvs.find(strKey));
|
||||
}
|
||||
|
||||
void TsIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue, const ex_wstr& strDefault)
|
||||
void ExIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue, const ex_wstr& strDefault)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (m_kvs.end() == it)
|
||||
strValue = strDefault;
|
||||
else
|
||||
strValue = (*it).second;
|
||||
}
|
||||
|
||||
bool TsIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue)
|
||||
bool ExIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (m_kvs.end() == it)
|
||||
return false;
|
||||
|
||||
|
@ -42,9 +42,9 @@ bool TsIniSection::GetStr(const ex_wstr& strKey, ex_wstr& strValue)
|
|||
return true;
|
||||
}
|
||||
|
||||
void TsIniSection::GetInt(const ex_wstr& strKey, int& iValue, int iDefault)
|
||||
void ExIniSection::GetInt(const ex_wstr& strKey, int& iValue, int iDefault)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (m_kvs.end() == it)
|
||||
{
|
||||
iValue = iDefault;
|
||||
|
@ -58,9 +58,9 @@ void TsIniSection::GetInt(const ex_wstr& strKey, int& iValue, int iDefault)
|
|||
#endif
|
||||
}
|
||||
|
||||
bool TsIniSection::GetInt(const ex_wstr& strKey, int& iValue)
|
||||
bool ExIniSection::GetInt(const ex_wstr& strKey, int& iValue)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (m_kvs.end() == it)
|
||||
return false;
|
||||
|
||||
|
@ -73,9 +73,9 @@ bool TsIniSection::GetInt(const ex_wstr& strKey, int& iValue)
|
|||
return true;
|
||||
}
|
||||
|
||||
void TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue, bool bDefault)
|
||||
void ExIniSection::GetBool(const ex_wstr& strKey, bool& bValue, bool bDefault)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (m_kvs.end() == it)
|
||||
{
|
||||
bValue = bDefault;
|
||||
|
@ -95,9 +95,9 @@ void TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue, bool bDefault)
|
|||
bValue = false;
|
||||
}
|
||||
|
||||
bool TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue)
|
||||
bool ExIniSection::GetBool(const ex_wstr& strKey, bool& bValue)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (m_kvs.end() == it)
|
||||
return false;
|
||||
|
||||
|
@ -117,9 +117,9 @@ bool TsIniSection::GetBool(const ex_wstr& strKey, bool& bValue)
|
|||
}
|
||||
|
||||
|
||||
bool TsIniSection::SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool bAddIfNotExists)
|
||||
bool ExIniSection::SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool bAddIfNotExists)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
ex_ini_kvs::iterator it = m_kvs.find(strKey);
|
||||
if (it != m_kvs.end())
|
||||
{
|
||||
it->second = strValue;
|
||||
|
@ -135,14 +135,14 @@ bool TsIniSection::SetValue(const ex_wstr& strKey, const ex_wstr& strValue, bool
|
|||
return false;
|
||||
}
|
||||
|
||||
void TsIniSection::ClearUp(void)
|
||||
void ExIniSection::ClearUp(void)
|
||||
{
|
||||
m_kvs.clear();
|
||||
}
|
||||
|
||||
void TsIniSection::Save(FILE* file, int codepage)
|
||||
void ExIniSection::Save(FILE* file, int codepage)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.begin();
|
||||
ex_ini_kvs::iterator it = m_kvs.begin();
|
||||
for (; it != m_kvs.end(); ++it)
|
||||
{
|
||||
ex_wstr temp;
|
||||
|
@ -157,27 +157,27 @@ void TsIniSection::Save(FILE* file, int codepage)
|
|||
return;
|
||||
}
|
||||
|
||||
#ifdef _DEBUG
|
||||
void TsIniSection::Dump(void)
|
||||
#ifdef EX_DEBUG
|
||||
void ExIniSection::Dump(void)
|
||||
{
|
||||
ts_ini_kvs::iterator it = m_kvs.begin();
|
||||
ex_ini_kvs::iterator it = m_kvs.begin();
|
||||
for (; it != m_kvs.end(); ++it)
|
||||
{
|
||||
TSLOGD(_T(" [%s]=[%s]\n"), it->first.c_str(), it->second.c_str());
|
||||
EXLOGD(_T(" [%s]=[%s]\n"), it->first.c_str(), it->second.c_str());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
TsIniFile::TsIniFile()
|
||||
ExIniFile::ExIniFile()
|
||||
{
|
||||
}
|
||||
|
||||
TsIniFile::~TsIniFile()
|
||||
ExIniFile::~ExIniFile()
|
||||
{
|
||||
ClearUp();
|
||||
}
|
||||
|
||||
bool TsIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld)
|
||||
bool ExIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
HANDLE hFile = ::CreateFileW(strFileName.c_str(), GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
|
||||
|
@ -233,26 +233,21 @@ bool TsIniFile::LoadFromFile(const ex_wstr& strFileName, bool bClearOld)
|
|||
pOffset += 3;
|
||||
}
|
||||
// 配置文件均使用UTF8编码
|
||||
// #ifdef EX_OS_WIN32
|
||||
ex_wstr fileData;
|
||||
if (!ex_astr2wstr(pOffset, fileData, EX_CODEPAGE_UTF8))
|
||||
return false;
|
||||
// #else
|
||||
// ex_wstr fileData = pOffset;
|
||||
// #endif
|
||||
|
||||
return LoadFromMemory(fileData, bClearOld);
|
||||
|
||||
}
|
||||
|
||||
bool TsIniFile::LoadFromMemory(const ex_wstr& strData, bool bClearOld)
|
||||
bool ExIniFile::LoadFromMemory(const ex_wstr& strData, bool bClearOld)
|
||||
{
|
||||
if (strData.empty())
|
||||
return false;
|
||||
|
||||
ex_wstr strAll(strData);
|
||||
bool bRet = true;
|
||||
TsIniSection* pCurSection = NULL;
|
||||
ExIniSection* pCurSection = NULL;
|
||||
|
||||
do {
|
||||
// Clear old data.
|
||||
|
@ -305,7 +300,7 @@ bool TsIniFile::LoadFromMemory(const ex_wstr& strData, bool bClearOld)
|
|||
return bRet;
|
||||
}
|
||||
|
||||
void TsIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/)
|
||||
void ExIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/)
|
||||
{
|
||||
ex_astr temp;
|
||||
ex_wstr2astr(m_file_path, temp);
|
||||
|
@ -318,10 +313,15 @@ void TsIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/)
|
|||
{
|
||||
return;
|
||||
}
|
||||
ts_ini_sections::iterator it = m_secs.begin();
|
||||
|
||||
// 如果有不属于任何小节的值对,先保存之
|
||||
if (m_dumy_sec.Count() > 0)
|
||||
m_dumy_sec.Save(file, codepage);
|
||||
|
||||
ex_ini_sections::iterator it = m_secs.begin();
|
||||
for (; it != m_secs.end(); ++it)
|
||||
{
|
||||
TSLOGD(_T("{%s}\n"), it->first.c_str());
|
||||
EXLOGD(_T("{%s}\n"), it->first.c_str());
|
||||
ex_wstr temp;
|
||||
temp += _T("[");
|
||||
temp += it->first.c_str();
|
||||
|
@ -335,21 +335,21 @@ void TsIniFile::Save(int codepage/* = EX_CODEPAGE_UTF8*/)
|
|||
fclose(file);
|
||||
}
|
||||
|
||||
#ifdef _DEBUG
|
||||
void TsIniFile::Dump(void)
|
||||
#ifdef EX_DEBUG
|
||||
void ExIniFile::Dump(void)
|
||||
{
|
||||
ts_ini_sections::iterator it = m_secs.begin();
|
||||
ex_ini_sections::iterator it = m_secs.begin();
|
||||
for (; it != m_secs.end(); ++it)
|
||||
{
|
||||
TSLOGD(_T("{%s}\n"), it->first.c_str());
|
||||
EXLOGD(_T("{%s}\n"), it->first.c_str());
|
||||
it->second->Dump();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void TsIniFile::ClearUp(void)
|
||||
void ExIniFile::ClearUp(void)
|
||||
{
|
||||
ts_ini_sections::iterator it = m_secs.begin();
|
||||
ex_ini_sections::iterator it = m_secs.begin();
|
||||
for (; it != m_secs.end(); ++it)
|
||||
{
|
||||
delete it->second;
|
||||
|
@ -357,9 +357,9 @@ void TsIniFile::ClearUp(void)
|
|||
m_secs.clear();
|
||||
}
|
||||
|
||||
TsIniSection* TsIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExists)
|
||||
ExIniSection* ExIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExists)
|
||||
{
|
||||
ts_ini_sections::iterator it = m_secs.find(strName);
|
||||
ex_ini_sections::iterator it = m_secs.find(strName);
|
||||
if (it != m_secs.end())
|
||||
return it->second;
|
||||
|
||||
|
@ -368,7 +368,7 @@ TsIniSection* TsIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExi
|
|||
|
||||
|
||||
|
||||
TsIniSection* pSec = new TsIniSection(strName);
|
||||
ExIniSection* pSec = new ExIniSection(strName);
|
||||
m_secs.insert(std::make_pair(strName, pSec));
|
||||
return pSec;
|
||||
}
|
||||
|
@ -377,7 +377,7 @@ TsIniSection* TsIniFile::GetSection(const ex_wstr& strName, bool bCreateIfNotExi
|
|||
// 解析一行,返回值为 [节名/值对/注释/什么也不是/出错了]
|
||||
// 节名 => strKey = [section_name]
|
||||
// 值对 => strKey = strValue
|
||||
TsIniFile::PARSE_RV TsIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& strKey, ex_wstr& strValue)
|
||||
ExIniFile::PARSE_RV ExIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& strKey, ex_wstr& strValue)
|
||||
{
|
||||
// 首先去掉行首的空格或者 TAB 控制
|
||||
ex_wstr strLine(strOrigLine);
|
||||
|
@ -427,7 +427,7 @@ TsIniFile::PARSE_RV TsIniFile::_ParseLine(const ex_wstr& strOrigLine, ex_wstr& s
|
|||
return PARSE_OTHER;
|
||||
}
|
||||
|
||||
bool TsIniFile::_ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection)
|
||||
bool ExIniFile::_ProcessLine(const ex_wstr strLine, ExIniSection** pCurSection)
|
||||
{
|
||||
if (strLine.empty())
|
||||
return true;
|
||||
|
@ -449,7 +449,7 @@ bool TsIniFile::_ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection)
|
|||
case PARSE_SECTION:
|
||||
{
|
||||
// 创建一个节
|
||||
TsIniSection* pSection = GetSection(strKey, true);
|
||||
ExIniSection* pSection = GetSection(strKey, true);
|
||||
if (NULL == pSection)
|
||||
{
|
||||
bError = true;
|
||||
|
@ -460,17 +460,20 @@ bool TsIniFile::_ProcessLine(const ex_wstr strLine, TsIniSection** pCurSection)
|
|||
}
|
||||
break;
|
||||
case PARSE_KEYVALUE:
|
||||
// ´´½¨Ò»¸öÖµ¶Ô
|
||||
if (NULL == pCurSection || NULL == *pCurSection)
|
||||
{
|
||||
bError = true;
|
||||
break;
|
||||
//bError = true;
|
||||
//break;
|
||||
*pCurSection = &m_dumy_sec;
|
||||
}
|
||||
|
||||
// 创建一个值对
|
||||
if (!(*pCurSection)->SetValue(strKey, strValue, true))
|
||||
{
|
||||
bError = true;
|
||||
break;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case PARSE_COMMENT:
|
|
@ -0,0 +1,701 @@
|
|||
#include <ex/ex_log.h>
|
||||
#include <ex/ex_path.h>
|
||||
#include <ex/ex_thread.h>
|
||||
#include <vector>
|
||||
#include <deque>
|
||||
#include <algorithm>
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
#include <io.h>
|
||||
#include <stdio.h>
|
||||
#include <direct.h>
|
||||
#else
|
||||
#include <dirent.h>
|
||||
#include <sys/time.h>
|
||||
#endif
|
||||
|
||||
#define EX_LOG_CONTENT_MAX_LEN 2048
|
||||
|
||||
typedef enum EX_COLORS
|
||||
{
|
||||
EX_COLOR_BLACK = 0,
|
||||
EX_COLOR_BLUE = 1,
|
||||
EX_COLOR_GREEN = 2,
|
||||
EX_COLOR_CYAN = 3,
|
||||
EX_COLOR_RED = 4,
|
||||
EX_COLOR_MAGENTA = 5,
|
||||
EX_COLOR_YELLOW = 6,
|
||||
EX_COLOR_LIGHT_GRAY = 7,
|
||||
EX_COLOR_GRAY = 8,
|
||||
EX_COLOR_LIGHT_BLUE = 9,
|
||||
EX_COLOR_LIGHT_GREEN = 10,
|
||||
EX_COLOR_LIGHT_CYAN = 11,
|
||||
EX_COLOR_LIGHT_RED = 12,
|
||||
EX_COLOR_LIGHT_MAGENTA = 13,
|
||||
EX_COLOR_LIGHT_YELLOW = 14,
|
||||
EX_COLOR_WHITE = 15,
|
||||
|
||||
EX_COLOR_NORMAL = 0xFF,
|
||||
}EX_COLORS;
|
||||
|
||||
ExThreadLock g_log_lock;
|
||||
|
||||
typedef std::deque<unsigned long long> log_file_deque;
|
||||
|
||||
class ExLogFile
|
||||
{
|
||||
public:
|
||||
ExLogFile() {
|
||||
m_hFile = NULL;
|
||||
m_filesize = 0;
|
||||
}
|
||||
~ExLogFile() {}
|
||||
|
||||
bool init(const ex_wstr& log_path, const ex_wstr& log_name, ex_u32 max_filesize, ex_u8 max_count);
|
||||
|
||||
//bool write(int level, char* buf, int len);
|
||||
bool write(int level, const char* buf);
|
||||
bool write(int level, const wchar_t* buf);
|
||||
|
||||
protected:
|
||||
bool _open_file();
|
||||
//bool _backup_file();
|
||||
bool _rotate_file(void); // 将现有日志文件改名备份,然后新开一个日志文件
|
||||
//bool _load_file_list();
|
||||
|
||||
protected:
|
||||
FILE* m_hFile;
|
||||
ex_u32 m_filesize;
|
||||
|
||||
ex_u32 m_max_filesize;
|
||||
ex_u8 m_max_count;
|
||||
ex_wstr m_path;
|
||||
ex_wstr m_filename;
|
||||
ex_wstr m_fullname;
|
||||
log_file_deque m_log_file_list;
|
||||
private:
|
||||
|
||||
};
|
||||
|
||||
|
||||
typedef struct EX_LOG_CFG
|
||||
{
|
||||
EX_LOG_CFG()
|
||||
{
|
||||
min_level = EX_LOG_LEVEL_INFO;
|
||||
debug_mode = false;
|
||||
to_console = true;
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
console_handle = GetStdHandle(STD_OUTPUT_HANDLE);
|
||||
#endif
|
||||
}
|
||||
|
||||
int min_level;
|
||||
bool debug_mode;
|
||||
bool to_console;
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
HANDLE console_handle;
|
||||
#endif
|
||||
|
||||
ExLogFile logfile;
|
||||
}EX_LOG_CFG;
|
||||
|
||||
static EX_LOG_CFG g_log_cfg;
|
||||
|
||||
void EXLOG_LEVEL(int min_level)
|
||||
{
|
||||
g_log_cfg.min_level = min_level;
|
||||
}
|
||||
|
||||
void EXLOG_CONSOLE(bool output_to_console)
|
||||
{
|
||||
g_log_cfg.to_console = output_to_console;
|
||||
}
|
||||
|
||||
void EXLOG_FILE(const wchar_t* log_file, const wchar_t* log_path /*= NULL*/, ex_u32 max_filesize /*= EX_LOG_FILE_MAX_SIZE*/, ex_u8 max_filecount /*= EX_LOG_FILE_MAX_COUNT*/)
|
||||
{
|
||||
ex_wstr _path;
|
||||
if (NULL == log_path)
|
||||
{
|
||||
ex_exec_file(_path);
|
||||
ex_dirname(_path);
|
||||
ex_path_join(_path, false, L"log", NULL);
|
||||
}
|
||||
else
|
||||
{
|
||||
_path = log_path;
|
||||
}
|
||||
|
||||
g_log_cfg.logfile.init(_path, log_file, max_filesize, max_filecount);
|
||||
}
|
||||
|
||||
static void _ts_printf_a(int level, EX_COLORS clrBackGround, const char* fmt, va_list valist)
|
||||
{
|
||||
if (NULL == fmt)
|
||||
return;
|
||||
|
||||
if (g_log_cfg.min_level > level)
|
||||
return;
|
||||
|
||||
EX_COLORS clrForeGround = EX_COLOR_NORMAL;
|
||||
switch (level)
|
||||
{
|
||||
case EX_LOG_LEVEL_DEBUG:
|
||||
if (!g_log_cfg.debug_mode)
|
||||
return;
|
||||
clrForeGround = EX_COLOR_GRAY;
|
||||
break;
|
||||
case EX_LOG_LEVEL_VERBOSE:
|
||||
clrForeGround = EX_COLOR_LIGHT_GRAY;
|
||||
break;
|
||||
case EX_LOG_LEVEL_INFO:
|
||||
clrForeGround = EX_COLOR_LIGHT_MAGENTA;
|
||||
break;
|
||||
case EX_LOG_LEVEL_WARN:
|
||||
clrForeGround = EX_COLOR_LIGHT_RED;
|
||||
break;
|
||||
case EX_LOG_LEVEL_ERROR:
|
||||
clrForeGround = EX_COLOR_LIGHT_RED;
|
||||
break;
|
||||
}
|
||||
|
||||
if (EX_COLOR_NORMAL == clrForeGround)
|
||||
clrForeGround = EX_COLOR_LIGHT_GRAY;
|
||||
if (EX_COLOR_NORMAL == clrBackGround)
|
||||
clrBackGround = EX_COLOR_BLACK;
|
||||
|
||||
if (0 == strlen(fmt))
|
||||
return;
|
||||
|
||||
char szTmp[4096] = { 0 };
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
vsnprintf_s(szTmp, 4096, 4095, fmt, valist);
|
||||
if (NULL != g_log_cfg.console_handle)
|
||||
{
|
||||
SetConsoleTextAttribute(g_log_cfg.console_handle, (WORD)((clrBackGround << 4) | clrForeGround));
|
||||
printf_s("%s", szTmp);
|
||||
fflush(stdout);
|
||||
SetConsoleTextAttribute(g_log_cfg.console_handle, EX_COLOR_GRAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
OutputDebugStringA(szTmp);
|
||||
}
|
||||
#else
|
||||
vsnprintf(szTmp, 4095, fmt, valist);
|
||||
printf("%s", szTmp);
|
||||
fflush(stdout);
|
||||
#endif
|
||||
|
||||
// #ifdef LOG_TO_FILE
|
||||
// g_log_file.WriteData(level, szTmp, strlen(szTmp));
|
||||
// #endif
|
||||
g_log_cfg.logfile.write(level, szTmp);
|
||||
}
|
||||
|
||||
static void _ts_printf_w(int level, EX_COLORS clrBackGround, const wchar_t* fmt, va_list valist)
|
||||
{
|
||||
if (NULL == fmt || 0 == wcslen(fmt))
|
||||
return;
|
||||
if (g_log_cfg.min_level > level)
|
||||
return;
|
||||
|
||||
EX_COLORS clrForeGround = EX_COLOR_NORMAL;
|
||||
switch (level)
|
||||
{
|
||||
case EX_LOG_LEVEL_DEBUG:
|
||||
if (!g_log_cfg.debug_mode)
|
||||
return;
|
||||
clrForeGround = EX_COLOR_GRAY;
|
||||
break;
|
||||
case EX_LOG_LEVEL_VERBOSE:
|
||||
clrForeGround = EX_COLOR_LIGHT_GRAY;
|
||||
break;
|
||||
case EX_LOG_LEVEL_INFO:
|
||||
clrForeGround = EX_COLOR_LIGHT_MAGENTA;
|
||||
break;
|
||||
case EX_LOG_LEVEL_WARN:
|
||||
clrForeGround = EX_COLOR_LIGHT_RED;
|
||||
break;
|
||||
case EX_LOG_LEVEL_ERROR:
|
||||
clrForeGround = EX_COLOR_LIGHT_RED;
|
||||
break;
|
||||
}
|
||||
|
||||
if (EX_COLOR_NORMAL == clrForeGround)
|
||||
clrForeGround = EX_COLOR_LIGHT_GRAY;
|
||||
if (EX_COLOR_NORMAL == clrBackGround)
|
||||
clrBackGround = EX_COLOR_BLACK;
|
||||
|
||||
wchar_t szTmp[4096] = { 0 };
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
_vsnwprintf_s(szTmp, 4096, 4095, fmt, valist);
|
||||
if (NULL != g_log_cfg.console_handle)
|
||||
{
|
||||
SetConsoleTextAttribute(g_log_cfg.console_handle, (WORD)((clrBackGround << 4) | clrForeGround));
|
||||
wprintf_s(_T("%s"), szTmp);
|
||||
fflush(stdout);
|
||||
SetConsoleTextAttribute(g_log_cfg.console_handle, EX_COLOR_GRAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
OutputDebugStringW(szTmp);
|
||||
}
|
||||
#else
|
||||
vswprintf(szTmp, 4095, fmt, valist);
|
||||
wprintf(L"%s", szTmp);
|
||||
fflush(stdout);
|
||||
#endif
|
||||
|
||||
g_log_cfg.logfile.write(level, szTmp);
|
||||
}
|
||||
|
||||
#define EX_PRINTF_X(fn, level) \
|
||||
void fn(const char* fmt, ...) \
|
||||
{ \
|
||||
ExThreadSmartLock locker(g_log_lock); \
|
||||
va_list valist; \
|
||||
va_start(valist, fmt); \
|
||||
_ts_printf_a(level, EX_COLOR_BLACK, fmt, valist); \
|
||||
va_end(valist); \
|
||||
} \
|
||||
void fn(const wchar_t* fmt, ...) \
|
||||
{ \
|
||||
ExThreadSmartLock locker(g_log_lock); \
|
||||
va_list valist; \
|
||||
va_start(valist, fmt); \
|
||||
_ts_printf_w(level, EX_COLOR_BLACK, fmt, valist); \
|
||||
va_end(valist); \
|
||||
}
|
||||
|
||||
EX_PRINTF_X(ex_printf_d, EX_LOG_LEVEL_DEBUG)
|
||||
EX_PRINTF_X(ex_printf_v, EX_LOG_LEVEL_VERBOSE)
|
||||
EX_PRINTF_X(ex_printf_i, EX_LOG_LEVEL_INFO)
|
||||
EX_PRINTF_X(ex_printf_w, EX_LOG_LEVEL_WARN)
|
||||
EX_PRINTF_X(ex_printf_e, EX_LOG_LEVEL_ERROR)
|
||||
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
void ex_printf_e_lasterror(const char* fmt, ...)
|
||||
{
|
||||
ExThreadSmartLock locker(g_log_lock);
|
||||
|
||||
va_list valist;
|
||||
va_start(valist, fmt);
|
||||
_ts_printf_a(EX_COLOR_LIGHT_RED, EX_COLOR_BLACK, fmt, valist);
|
||||
va_end(valist);
|
||||
|
||||
//=========================================
|
||||
|
||||
LPVOID lpMsgBuf;
|
||||
DWORD dw = GetLastError();
|
||||
|
||||
FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||
NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
|
||||
(LPSTR)&lpMsgBuf, 0, NULL);
|
||||
|
||||
ex_printf_e(" - WinErr(%d): %s\n", dw, (LPSTR)lpMsgBuf);
|
||||
LocalFree(lpMsgBuf);
|
||||
}
|
||||
#endif
|
||||
|
||||
void ex_printf_bin(const ex_u8* bin_data, size_t bin_size, const char* fmt, ...)
|
||||
{
|
||||
if (!g_log_cfg.debug_mode)
|
||||
return;
|
||||
|
||||
ExThreadSmartLock locker(g_log_lock);
|
||||
|
||||
va_list valist;
|
||||
va_start(valist, fmt);
|
||||
_ts_printf_a(EX_COLOR_GRAY, EX_COLOR_BLACK, fmt, valist);
|
||||
va_end(valist);
|
||||
|
||||
ex_printf_d(" (%d/0x%02x Bytes)\n", bin_size, bin_size);
|
||||
|
||||
const ex_u8* line = bin_data;
|
||||
size_t thisline = 0;
|
||||
size_t offset = 0;
|
||||
unsigned int i = 0;
|
||||
|
||||
char szTmp[128] = { 0 };
|
||||
int _offset = 0;
|
||||
|
||||
while (offset < bin_size)
|
||||
{
|
||||
memset(szTmp, 0, 128);
|
||||
_offset = 0;
|
||||
|
||||
snprintf(szTmp + _offset, 128 - _offset, "%06x ", (int)offset);
|
||||
_offset += 8;
|
||||
|
||||
thisline = bin_size - offset;
|
||||
if (thisline > 16)
|
||||
thisline = 16;
|
||||
|
||||
for (i = 0; i < thisline; i++)
|
||||
{
|
||||
snprintf(szTmp + _offset, 128 - _offset, "%02x ", line[i]);
|
||||
_offset += 3;
|
||||
}
|
||||
|
||||
snprintf(szTmp + _offset, 128 - _offset, " ");
|
||||
_offset += 2;
|
||||
|
||||
for (; i < 16; i++)
|
||||
{
|
||||
snprintf(szTmp + _offset, 128 - _offset, " ");
|
||||
_offset += 3;
|
||||
}
|
||||
|
||||
for (i = 0; i < thisline; i++)
|
||||
{
|
||||
snprintf(szTmp + _offset, 128 - _offset, "%c", (line[i] >= 0x20 && line[i] < 0x7f) ? line[i] : '.');
|
||||
_offset += 1;
|
||||
}
|
||||
|
||||
snprintf(szTmp + _offset, 128 - _offset, "\n");
|
||||
_offset += 1;
|
||||
|
||||
ex_printf_d("%s", szTmp);
|
||||
|
||||
offset += thisline;
|
||||
line += thisline;
|
||||
}
|
||||
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
bool ExLogFile::init(const ex_wstr& log_path, const ex_wstr& log_name, ex_u32 max_filesize, ex_u8 max_count)
|
||||
{
|
||||
m_max_filesize = max_filesize;
|
||||
m_max_count = max_count;
|
||||
|
||||
m_filename = log_name;
|
||||
|
||||
m_path = log_path;
|
||||
ex_abspath(m_path);
|
||||
|
||||
m_fullname = m_path;
|
||||
ex_path_join(m_fullname, false, log_name.c_str(), NULL);
|
||||
|
||||
return _open_file();
|
||||
}
|
||||
|
||||
|
||||
bool ExLogFile::_open_file()
|
||||
{
|
||||
if (m_hFile)
|
||||
{
|
||||
fclose(m_hFile);
|
||||
m_hFile = NULL;
|
||||
}
|
||||
|
||||
ex_astr _fullname;
|
||||
ex_wstr2astr(m_fullname, _fullname);
|
||||
#ifdef EX_OS_WIN32
|
||||
// 注意:这里必须使用 _fsopen 来指定共享读方式打开日志文件,否则进程退出前别的进程无法查看日志文件内容。
|
||||
m_hFile = _fsopen(_fullname.c_str(), "a", _SH_DENYWR);
|
||||
#else
|
||||
m_hFile = fopen(_fullname.c_str(), "a");
|
||||
#endif
|
||||
|
||||
if (NULL == m_hFile)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
fseek(m_hFile, 0, SEEK_END);
|
||||
m_filesize = ftell(m_hFile);
|
||||
|
||||
return _rotate_file();
|
||||
}
|
||||
|
||||
bool ExLogFile::_rotate_file(void)
|
||||
{
|
||||
if (m_filesize < m_max_filesize)
|
||||
return true;
|
||||
|
||||
if (m_hFile)
|
||||
{
|
||||
fclose(m_hFile);
|
||||
m_hFile = NULL;
|
||||
}
|
||||
|
||||
//if (!_backup_file())
|
||||
// return false;
|
||||
|
||||
// make a name for backup file.
|
||||
wchar_t _tmpname[64] = { 0 };
|
||||
#ifdef EX_OS_WIN32
|
||||
SYSTEMTIME st;
|
||||
GetLocalTime(&st);
|
||||
//StringCbPrintf(_tmpname, 64, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
swprintf_s(_tmpname, 64, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
// sprintf_s(szBaseNewFileLogName, EX_LOG_PATH_MAX_LEN, "%04d%02d%02d%02d%02d%02d",
|
||||
// st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
#else
|
||||
time_t timep;
|
||||
time(&timep);
|
||||
struct tm *p = localtime(&timep);
|
||||
if (p == NULL)
|
||||
return false;
|
||||
|
||||
swprintf(_tmpname, L"%s.%04d%02d%02d%02d%02d%02d.bak", m_filename.c_str(), p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
// sprintf(szBaseNewFileLogName, "%04d%02d%02d%02d%02d%02d",
|
||||
// p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
#endif
|
||||
|
||||
ex_wstr _new_fullname(m_path);
|
||||
ex_path_join(_new_fullname, false, _tmpname, NULL);
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str()))
|
||||
{
|
||||
EXLOGE_WIN("can not rename log file, remove old one and try again.");
|
||||
DeleteFileW(_new_fullname.c_str());
|
||||
if (!MoveFileW(m_fullname.c_str(), _new_fullname.c_str()))
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
ex_astr _a_fullname;
|
||||
ex_astr _a_new_fullname;
|
||||
ex_wstr2astr(m_fullname, _a_fullname);
|
||||
ex_wstr2astr(_new_fullname, _a_new_fullname);
|
||||
|
||||
if (rename(_a_fullname.c_str(), _a_new_fullname.c_str()) != 0)
|
||||
{
|
||||
remove(_a_new_fullname.c_str());
|
||||
if (0 != (rename(_a_fullname.c_str(), _a_new_fullname.c_str())))
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
return _open_file();
|
||||
}
|
||||
|
||||
#if 0
|
||||
bool ExLogFile::_backup_file()
|
||||
{
|
||||
char szNewFileLogName[EX_LOG_PATH_MAX_LEN] = { 0 };
|
||||
char szBaseNewFileLogName[EX_LOG_PATH_MAX_LEN] = { 0 };
|
||||
#ifdef EX_OS_WIN32
|
||||
SYSTEMTIME st;
|
||||
GetLocalTime(&st);
|
||||
sprintf_s(szNewFileLogName, EX_LOG_PATH_MAX_LEN, "%s\\%04d%02d%02d%02d%02d%02d.log",
|
||||
m_log_file_dir.c_str(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
|
||||
sprintf_s(szBaseNewFileLogName, EX_LOG_PATH_MAX_LEN, "%04d%02d%02d%02d%02d%02d",
|
||||
st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
#else
|
||||
time_t timep;
|
||||
struct tm *p;
|
||||
time(&timep);
|
||||
p = localtime(&timep); //get server's time
|
||||
if (p == NULL)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
sprintf(szNewFileLogName, "%s/%04d%02d%02d%02d%02d%02d.log",
|
||||
m_log_file_dir.c_str(), p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
sprintf(szBaseNewFileLogName, "%04d%02d%02d%02d%02d%02d",
|
||||
p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
#endif
|
||||
if (m_hFile)
|
||||
{
|
||||
fclose(m_hFile);
|
||||
m_hFile = 0;
|
||||
}
|
||||
#ifdef EX_OS_WIN32
|
||||
if (!MoveFileA(m_path.c_str(), szNewFileLogName))
|
||||
{
|
||||
DWORD dwError = GetLastError();
|
||||
|
||||
DeleteFileA(szNewFileLogName);
|
||||
|
||||
MoveFileA(m_path.c_str(), szNewFileLogName);
|
||||
}
|
||||
#else
|
||||
if (rename(m_path.c_str(), szNewFileLogName) != 0)
|
||||
{
|
||||
remove(szNewFileLogName);
|
||||
|
||||
rename(m_path.c_str(), szNewFileLogName);
|
||||
}
|
||||
#endif
|
||||
unsigned long long value = atoll(szBaseNewFileLogName);
|
||||
if (value != 0)
|
||||
{
|
||||
m_log_file_list.push_back(value);
|
||||
}
|
||||
int try_count = 0;
|
||||
while ((m_log_file_list.size() > m_max_count))
|
||||
{
|
||||
unsigned long long value = m_log_file_list.front();
|
||||
char szDeleteFile[256] = { 0 };
|
||||
#ifdef EX_OS_WIN32
|
||||
sprintf_s(szDeleteFile, 256, "%s\\%llu.log", m_log_file_dir.c_str(), value);
|
||||
if (DeleteFileA(szDeleteFile))
|
||||
{
|
||||
m_log_file_list.pop_front();
|
||||
}
|
||||
#else
|
||||
sprintf(szDeleteFile, "%s/%llu.log", m_log_file_dir.c_str(), value);
|
||||
if (remove(szDeleteFile) == 0)
|
||||
{
|
||||
m_log_file_list.pop_front();
|
||||
}
|
||||
#endif
|
||||
else
|
||||
{
|
||||
if (try_count > 5)
|
||||
{
|
||||
break;
|
||||
}
|
||||
try_count++;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
#endif // if 0
|
||||
|
||||
bool ExLogFile::write(int level, const char* buf)
|
||||
{
|
||||
if (NULL == m_hFile)
|
||||
return false;
|
||||
|
||||
size_t len = strlen(buf);
|
||||
|
||||
if (len > EX_LOG_CONTENT_MAX_LEN)
|
||||
return false;
|
||||
|
||||
char szTime[100] = { 0 };
|
||||
#ifdef EX_OS_WIN32
|
||||
SYSTEMTIME st;
|
||||
GetLocalTime(&st);
|
||||
sprintf_s(szTime, 100, "[%04d-%02d-%02d %02d:%02d:%02d] ", st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
|
||||
#else
|
||||
time_t timep;
|
||||
struct tm *p;
|
||||
time(&timep);
|
||||
p = localtime(&timep);
|
||||
if (p == NULL)
|
||||
return false;
|
||||
sprintf(szTime, "[%04d-%02d-%02d %02d:%02d:%02d] , p->tm_year + 1900, p->tm_mon + 1, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec);
|
||||
#endif
|
||||
|
||||
int lenTime = strlen(szTime);
|
||||
fwrite(szTime, lenTime, 1, m_hFile);
|
||||
m_filesize += lenTime;
|
||||
fwrite(buf, len, 1, m_hFile);
|
||||
m_filesize += len;
|
||||
|
||||
fflush(m_hFile);
|
||||
|
||||
return _rotate_file();
|
||||
}
|
||||
|
||||
bool ExLogFile::write(int level, const wchar_t* buf)
|
||||
{
|
||||
ex_astr _buf;
|
||||
ex_wstr2astr(buf, _buf, EX_CODEPAGE_UTF8);
|
||||
return write(level, _buf.c_str());
|
||||
}
|
||||
|
||||
|
||||
#if 0
|
||||
bool ExLogFile::_load_file_list()
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
struct _finddata_t data;
|
||||
std::string log_match = m_log_file_dir;
|
||||
log_match += "\\*.log";
|
||||
//log_match += "*.log";
|
||||
long hnd = _findfirst(log_match.c_str(), &data); // find the first file match `*.log`
|
||||
if (hnd < 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
int nRet = (hnd < 0) ? -1 : 1;
|
||||
int count = 0;
|
||||
while (nRet > 0)
|
||||
{
|
||||
if (data.attrib == _A_SUBDIR)
|
||||
{
|
||||
// do nothing to a folder.
|
||||
}
|
||||
else
|
||||
{
|
||||
if (m_filename.compare(data.name) == 0)
|
||||
{
|
||||
}
|
||||
else
|
||||
{
|
||||
char* match = strrchr(data.name, '.');
|
||||
if (match != NULL)
|
||||
{
|
||||
*match = '\0';
|
||||
}
|
||||
unsigned long long value = atoll(data.name);
|
||||
if (value == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
m_log_file_list.push_back(value);
|
||||
}
|
||||
}
|
||||
|
||||
nRet = _findnext(hnd, &data);
|
||||
count++;
|
||||
if (count > 100)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
_findclose(hnd);
|
||||
#else
|
||||
DIR *dir;
|
||||
|
||||
struct dirent *ptr;
|
||||
|
||||
dir = opendir(m_log_file_dir.c_str());
|
||||
|
||||
while ((ptr = readdir(dir)) != NULL)
|
||||
{
|
||||
if (ptr->d_type == 8)
|
||||
{
|
||||
char temp_file_name[PATH_MAX] = { 0 };
|
||||
strcpy(temp_file_name, ptr->d_name);
|
||||
if (m_filename.compare(temp_file_name) == 0)
|
||||
{
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
char* match = strrchr(temp_file_name, '.');
|
||||
if (match != NULL)
|
||||
{
|
||||
*match = '\0';
|
||||
}
|
||||
unsigned long long value = atoll(temp_file_name);
|
||||
if (value == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
m_log_file_list.push_back(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
closedir(dir);
|
||||
#endif // EX_OS_WIN32
|
||||
|
||||
std::sort(m_log_file_list.begin(), m_log_file_list.end(), std::less<unsigned long long>());
|
||||
return true;
|
||||
}
|
||||
#endif // if 0
|
|
@ -106,6 +106,7 @@ bool ex_dirname(ex_wstr& inout_filename)
|
|||
{
|
||||
*match = EX_NULL_END;
|
||||
inout_filename = ret;
|
||||
ex_free(ret);
|
||||
return true;
|
||||
}
|
||||
else
|
||||
|
@ -113,7 +114,6 @@ bool ex_dirname(ex_wstr& inout_filename)
|
|||
ex_free(ret);
|
||||
inout_filename = EX_CURRENT_DIR_STR;
|
||||
return true;
|
||||
//return ex_wcsdup(EX_CURRENT_DIR_STR);
|
||||
}
|
||||
|
||||
ex_free(ret);
|
||||
|
@ -341,7 +341,6 @@ bool ex_path_join(ex_wstr& inout_path, bool auto_abspath, ...)
|
|||
if (!ex_abspath(_path))
|
||||
return false;
|
||||
|
||||
//return ex_wcsdup(_path.c_str());
|
||||
inout_path = _path;
|
||||
return true;
|
||||
}
|
||||
|
@ -403,11 +402,11 @@ bool ex_mkdirs(const ex_wstr& in_path)
|
|||
|
||||
ex_astr _path;
|
||||
#ifdef EX_OS_WIN32
|
||||
ex_wstr2astr(in_path, _path, EX_CODEPAGE_ACP);
|
||||
ex_wstr2astr(in_path, _path);
|
||||
if (0 == _mkdir(_path.c_str()))
|
||||
return true;
|
||||
#else
|
||||
ex_wstr2astr(in_path, _path, EX_CODEPAGE_UTF8);
|
||||
ex_wstr2astr(in_path, _path);
|
||||
int status = mkdir(_path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
|
||||
if (0 != status)
|
||||
return false;
|
||||
|
@ -416,3 +415,14 @@ bool ex_mkdirs(const ex_wstr& in_path)
|
|||
return true;
|
||||
}
|
||||
|
||||
bool ex_path_ext_name(const ex_wstr& in_filename, ex_wstr& out_ext)
|
||||
{
|
||||
ex_wstr::size_type pos_dot = in_filename.rfind(L'.');
|
||||
ex_wstr::size_type pos_sep = in_filename.rfind(EX_SEP);
|
||||
|
||||
if (pos_dot == ex_wstr::npos || pos_dot <= pos_sep)
|
||||
return false;
|
||||
|
||||
out_ext.assign(in_filename, pos_dot + 1, in_filename.length() - pos_dot - 1);
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ wchar_t** ex_make_wargv(int argc, char** argv)
|
|||
|
||||
for (i = 0; i < argc; ++i)
|
||||
{
|
||||
ret[i] = ex_str2wcs_alloc(argv[i], EX_CODEPAGE_ACP);
|
||||
ret[i] = ex_str2wcs_alloc(argv[i], EX_CODEPAGE_DEFAULT);
|
||||
if (NULL == ret[i])
|
||||
goto err;
|
||||
}
|
||||
|
@ -223,12 +223,12 @@ EX_BOOL ex_wcs_only_white_space(const char* src)
|
|||
|
||||
|
||||
#ifdef __cplusplus
|
||||
bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_ACP*/)
|
||||
bool ex_wstr2astr(const ex_wstr& in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/)
|
||||
{
|
||||
return ex_wstr2astr(in_str.c_str(), out_str, code_page);
|
||||
}
|
||||
|
||||
bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_ACP*/)
|
||||
bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/)
|
||||
{
|
||||
char* astr = ex_wcs2str_alloc(in_str, code_page);
|
||||
if (NULL == astr)
|
||||
|
@ -239,12 +239,12 @@ bool ex_wstr2astr(const wchar_t* in_str, ex_astr& out_str, int code_page/* = EX_
|
|||
return true;
|
||||
}
|
||||
|
||||
bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_ACP*/)
|
||||
bool ex_astr2wstr(const ex_astr& in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/)
|
||||
{
|
||||
return ex_astr2wstr(in_str.c_str(), out_str, code_page);
|
||||
}
|
||||
|
||||
bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_ACP*/)
|
||||
bool ex_astr2wstr(const char* in_str, ex_wstr& out_str, int code_page/* = EX_CODEPAGE_DEFAULT*/)
|
||||
{
|
||||
wchar_t* wstr = ex_str2wcs_alloc(in_str, code_page);
|
||||
if (NULL == wstr)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "stdafx.h"
|
||||
#include "ts_thread.h"
|
||||
#include <ex/ex_thread.h>
|
||||
#include <ex/ex_log.h>
|
||||
|
||||
//=========================================================
|
||||
//
|
||||
|
@ -7,9 +7,9 @@
|
|||
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
unsigned int WINAPI TsThreadBase::_thread_func(LPVOID lpParam)
|
||||
unsigned int WINAPI ExThreadBase::_thread_func(LPVOID lpParam)
|
||||
{
|
||||
TsThreadBase* p = (TsThreadBase*)lpParam;
|
||||
ExThreadBase* p = (ExThreadBase*)lpParam;
|
||||
p->m_is_running = true;
|
||||
p->_thread_loop();
|
||||
p->m_is_running = false;
|
||||
|
@ -19,9 +19,9 @@ unsigned int WINAPI TsThreadBase::_thread_func(LPVOID lpParam)
|
|||
return 0;
|
||||
}
|
||||
#else
|
||||
void* TsThreadBase::_thread_func(void* pParam)
|
||||
void* ExThreadBase::_thread_func(void* pParam)
|
||||
{
|
||||
TsThreadBase* p = (TsThreadBase*)pParam;
|
||||
ExThreadBase* p = (ExThreadBase*)pParam;
|
||||
p->m_is_running = true;
|
||||
p->_thread_loop();
|
||||
p->m_is_running = false;
|
||||
|
@ -31,7 +31,7 @@ void* TsThreadBase::_thread_func(void* pParam)
|
|||
}
|
||||
#endif
|
||||
|
||||
TsThreadBase::TsThreadBase(TsThreadManager* tm, const char* thread_name) :
|
||||
ExThreadBase::ExThreadBase(ExThreadManager* tm, const char* thread_name) :
|
||||
m_thread_manager(tm),
|
||||
m_handle(0),
|
||||
m_is_running(false),
|
||||
|
@ -41,13 +41,13 @@ TsThreadBase::TsThreadBase(TsThreadManager* tm, const char* thread_name) :
|
|||
m_thread_manager->_add_thread(this);
|
||||
}
|
||||
|
||||
TsThreadBase::~TsThreadBase()
|
||||
ExThreadBase::~ExThreadBase()
|
||||
{
|
||||
}
|
||||
|
||||
bool TsThreadBase::start(void)
|
||||
bool ExThreadBase::start(void)
|
||||
{
|
||||
TSLOGV(" -- thread [%s] starting.\n", m_thread_name.c_str());
|
||||
EXLOGV(" -- thread [%s] starting.\n", m_thread_name.c_str());
|
||||
#ifdef WIN32
|
||||
HANDLE h = (HANDLE)_beginthreadex(NULL, 0, _thread_func, (void*)this, 0, NULL);
|
||||
|
||||
|
@ -70,13 +70,13 @@ bool TsThreadBase::start(void)
|
|||
return true;
|
||||
}
|
||||
|
||||
bool TsThreadBase::stop(void)
|
||||
bool ExThreadBase::stop(void)
|
||||
{
|
||||
TSLOGV(" . try to stop thread [%s].\n", m_thread_name.c_str());
|
||||
EXLOGV(" . try to stop thread [%s].\n", m_thread_name.c_str());
|
||||
m_stop_by_request = true;
|
||||
_set_stop_flag();
|
||||
|
||||
TSLOGV(" . wait thread [%s] end.\n", m_thread_name.c_str());
|
||||
EXLOGV(" . wait thread [%s] end.\n", m_thread_name.c_str());
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
if (WaitForSingleObject(m_handle, INFINITE) != WAIT_OBJECT_0)
|
||||
|
@ -89,12 +89,12 @@ bool TsThreadBase::stop(void)
|
|||
return false;
|
||||
}
|
||||
#endif
|
||||
TSLOGV(" ## thread [%s] end.\n", m_thread_name.c_str());
|
||||
EXLOGV(" ## thread [%s] end.\n", m_thread_name.c_str());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool TsThreadBase::terminate(void)
|
||||
bool ExThreadBase::terminate(void)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
return TerminateThread(m_handle, 1) ? true : false;
|
||||
|
@ -103,28 +103,42 @@ bool TsThreadBase::terminate(void)
|
|||
#endif
|
||||
}
|
||||
|
||||
// void ExThreadBase::_thread_loop(void)
|
||||
// {
|
||||
// EXLOGE("--------thread-loop-not-impl-------\n");
|
||||
// }
|
||||
|
||||
// void ExThreadBase::_sleep_ms(int ms)
|
||||
// {
|
||||
// #ifdef EX_OS_WIN32
|
||||
// Sleep(ms);
|
||||
// #else
|
||||
// usleep(ms * 1000);
|
||||
// #endif
|
||||
// }
|
||||
|
||||
|
||||
//=========================================================
|
||||
//
|
||||
//=========================================================
|
||||
|
||||
TsThreadManager::TsThreadManager()
|
||||
ExThreadManager::ExThreadManager()
|
||||
{}
|
||||
|
||||
TsThreadManager::~TsThreadManager()
|
||||
ExThreadManager::~ExThreadManager()
|
||||
{
|
||||
if (m_threads.size() > 0)
|
||||
{
|
||||
TSLOGE("[ERROR] when destroy thread manager, there are %d thread not exit.\n", m_threads.size());
|
||||
EXLOGE("[ERROR] when destroy thread manager, there are %d thread not exit.\n", m_threads.size());
|
||||
stop_all();
|
||||
}
|
||||
}
|
||||
|
||||
void TsThreadManager::stop_all(void)
|
||||
void ExThreadManager::stop_all(void)
|
||||
{
|
||||
TsThreadSmartLock locker(m_lock);
|
||||
ExThreadSmartLock locker(m_lock);
|
||||
|
||||
ts_threads::iterator it = m_threads.begin();
|
||||
ex_threads::iterator it = m_threads.begin();
|
||||
for (; it != m_threads.end(); ++it)
|
||||
{
|
||||
(*it)->stop();
|
||||
|
@ -133,16 +147,16 @@ void TsThreadManager::stop_all(void)
|
|||
m_threads.clear();
|
||||
}
|
||||
|
||||
void TsThreadManager::_add_thread(TsThreadBase* tb)
|
||||
void ExThreadManager::_add_thread(ExThreadBase* tb)
|
||||
{
|
||||
TsThreadSmartLock locker(m_lock);
|
||||
ExThreadSmartLock locker(m_lock);
|
||||
|
||||
ts_threads::iterator it = m_threads.begin();
|
||||
ex_threads::iterator it = m_threads.begin();
|
||||
for (; it != m_threads.end(); ++it)
|
||||
{
|
||||
if ((*it) == tb)
|
||||
{
|
||||
TSLOGE("[ERROR] when add thread to manager, it already exist.\n");
|
||||
EXLOGE("[ERROR] when add thread to manager, it already exist.\n");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -150,11 +164,11 @@ void TsThreadManager::_add_thread(TsThreadBase* tb)
|
|||
m_threads.push_back(tb);
|
||||
}
|
||||
|
||||
void TsThreadManager::_remove_thread(TsThreadBase* tb)
|
||||
void ExThreadManager::_remove_thread(ExThreadBase* tb)
|
||||
{
|
||||
TsThreadSmartLock locker(m_lock);
|
||||
ExThreadSmartLock locker(m_lock);
|
||||
|
||||
ts_threads::iterator it = m_threads.begin();
|
||||
ex_threads::iterator it = m_threads.begin();
|
||||
for (; it != m_threads.end(); ++it)
|
||||
{
|
||||
if ((*it) == tb)
|
||||
|
@ -164,14 +178,14 @@ void TsThreadManager::_remove_thread(TsThreadBase* tb)
|
|||
return;
|
||||
}
|
||||
}
|
||||
TSLOGE("[ERROR] when remove thread from manager, it not exist.\n");
|
||||
EXLOGE("[ERROR] when remove thread from manager, it not exist.\n");
|
||||
}
|
||||
|
||||
//=========================================================
|
||||
//
|
||||
//=========================================================
|
||||
|
||||
TsThreadLock::TsThreadLock()
|
||||
ExThreadLock::ExThreadLock()
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
InitializeCriticalSection(&m_locker);
|
||||
|
@ -184,7 +198,7 @@ TsThreadLock::TsThreadLock()
|
|||
#endif
|
||||
}
|
||||
|
||||
TsThreadLock::~TsThreadLock()
|
||||
ExThreadLock::~ExThreadLock()
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
DeleteCriticalSection(&m_locker);
|
||||
|
@ -193,7 +207,7 @@ TsThreadLock::~TsThreadLock()
|
|||
#endif
|
||||
}
|
||||
|
||||
void TsThreadLock::lock(void)
|
||||
void ExThreadLock::lock(void)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
EnterCriticalSection(&m_locker);
|
||||
|
@ -202,7 +216,7 @@ void TsThreadLock::lock(void)
|
|||
#endif
|
||||
}
|
||||
|
||||
void TsThreadLock::unlock(void)
|
||||
void ExThreadLock::unlock(void)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
LeaveCriticalSection(&m_locker);
|
||||
|
@ -215,7 +229,7 @@ void TsThreadLock::unlock(void)
|
|||
//
|
||||
//=========================================================
|
||||
|
||||
int ts_atomic_add(volatile int* pt, int t)
|
||||
int ex_atomic_add(volatile int* pt, int t)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
return (int)InterlockedExchangeAdd((long*)pt, (long)t);
|
||||
|
@ -224,7 +238,7 @@ int ts_atomic_add(volatile int* pt, int t)
|
|||
#endif
|
||||
}
|
||||
|
||||
int ts_atomic_inc(volatile int* pt)
|
||||
int ex_atomic_inc(volatile int* pt)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
return (int)InterlockedIncrement((long*)pt);
|
||||
|
@ -233,7 +247,7 @@ int ts_atomic_inc(volatile int* pt)
|
|||
#endif
|
||||
}
|
||||
|
||||
int ts_atomic_dec(volatile int* pt)
|
||||
int ex_atomic_dec(volatile int* pt)
|
||||
{
|
||||
#ifdef EX_OS_WIN32
|
||||
return (int)InterlockedDecrement((long*)pt);
|
|
@ -2,6 +2,8 @@
|
|||
#include <ex/ex_util.h>
|
||||
#include <ex/ex_str.h>
|
||||
|
||||
// #include <vld.h>
|
||||
|
||||
EX_BOOL ex_initialize(const char* lc_ctype)
|
||||
{
|
||||
#ifdef EX_OS_UNIX
|
||||
|
|
|
@ -0,0 +1,391 @@
|
|||
#include <ex/ex_winsrv.h>
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
|
||||
#include <ex/ex_const.h>
|
||||
|
||||
class winsrv_helper
|
||||
{
|
||||
public:
|
||||
winsrv_helper(SC_HANDLE scm, SC_HANDLE sc) : m_scm(scm), m_sc(sc)
|
||||
{
|
||||
}
|
||||
~winsrv_helper()
|
||||
{
|
||||
if(NULL != m_sc)
|
||||
CloseServiceHandle(m_sc);
|
||||
if(NULL != m_scm)
|
||||
CloseServiceHandle(m_scm);
|
||||
}
|
||||
|
||||
protected:
|
||||
SC_HANDLE m_sc;
|
||||
SC_HANDLE m_scm;
|
||||
};
|
||||
|
||||
|
||||
ex_rv ex_winsrv_install(const ex_wstr& srv_name, const ex_wstr& disp_name, const ex_wstr& exec_path)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
if (NULL == (sc = CreateServiceW(scm, srv_name.c_str(), disp_name.c_str(),
|
||||
SERVICE_ALL_ACCESS,
|
||||
SERVICE_WIN32_OWN_PROCESS,
|
||||
SERVICE_AUTO_START, SERVICE_ERROR_NORMAL, exec_path.c_str(), NULL, NULL, NULL, NULL, NULL))
|
||||
)
|
||||
{
|
||||
return EXRV_CANNOT_CREATE;
|
||||
}
|
||||
|
||||
SERVICE_FAILURE_ACTIONS failure_action;
|
||||
failure_action.dwResetPeriod = 0; // reset failure count to zero 的时间,单位为秒
|
||||
failure_action.lpRebootMsg = NULL; // Message to broadcast to server users before rebooting
|
||||
failure_action.lpCommand = NULL; // Command line of the process for the CreateProcess function to execute in response
|
||||
failure_action.cActions = 3; // action数组的个数
|
||||
|
||||
SC_ACTION actionarray[3];
|
||||
actionarray[0].Type = SC_ACTION_RESTART; // 重新启动服务
|
||||
actionarray[0].Delay = 60000; // 单位为毫秒
|
||||
actionarray[1].Type = SC_ACTION_RESTART;
|
||||
actionarray[1].Delay = 60000;
|
||||
actionarray[2].Type = SC_ACTION_RESTART;
|
||||
actionarray[2].Delay = 60000;
|
||||
failure_action.lpsaActions = actionarray;
|
||||
|
||||
ChangeServiceConfig2(sc, SERVICE_CONFIG_FAILURE_ACTIONS, &failure_action);
|
||||
|
||||
return EXRV_OK;
|
||||
}
|
||||
|
||||
bool ex_winsrv_is_exists(const ex_wstr& srv_name)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return false;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_ALL_ACCESS);
|
||||
if (NULL == sc)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_uninstall(const ex_wstr& srv_name)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_ALL_ACCESS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
if (!DeleteService(sc))
|
||||
return EXRV_CANNOT_REMOVE;
|
||||
else
|
||||
return EXRV_OK;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_start(const ex_wstr& srv_name)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_START | SERVICE_QUERY_STATUS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
SERVICE_STATUS ss;
|
||||
if (!QueryServiceStatus(sc, &ss))
|
||||
return EXRV_FAILED;
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
return EXRV_OK;
|
||||
|
||||
int i = 0;
|
||||
if (ss.dwCurrentState == SERVICE_START_PENDING)
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState != SERVICE_START_PENDING)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_STOPPED)
|
||||
{
|
||||
if (StartService(sc, 0, NULL))
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
return EXRV_OK;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
return EXRV_OK;
|
||||
else
|
||||
return EXRV_FAILED;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_config(const ex_wstr& srv_name, QUERY_SERVICE_CONFIG& cfg)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_QUERY_CONFIG);
|
||||
if (sc == NULL)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
DWORD dwBytesNeeded;
|
||||
if (!QueryServiceConfig(sc, &cfg, 4096, &dwBytesNeeded))
|
||||
return EXRV_FAILED;
|
||||
else
|
||||
return EXRV_OK;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_status(const ex_wstr& srv_name, ex_ulong& status)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_QUERY_STATUS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
SERVICE_STATUS ss;
|
||||
if (!QueryServiceStatus(sc, &ss))
|
||||
return EXRV_FAILED;
|
||||
|
||||
status = ss.dwCurrentState;
|
||||
return EXRV_OK;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_stop(const ex_wstr& srv_name)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_STOP | SERVICE_QUERY_STATUS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
SERVICE_STATUS ss;
|
||||
if (!QueryServiceStatus(sc, &ss))
|
||||
return EXRV_FAILED;
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_STOPPED)
|
||||
return EXRV_OK;
|
||||
|
||||
int i = 0;
|
||||
|
||||
DWORD dwStatus = ss.dwCurrentState;
|
||||
if (ss.dwCurrentState == SERVICE_START_PENDING || ss.dwCurrentState == SERVICE_PAUSE_PENDING || ss.dwCurrentState == SERVICE_CONTINUE_PENDING || ss.dwCurrentState == SERVICE_STOP_PENDING)
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState != dwStatus)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING || ss.dwCurrentState == SERVICE_PAUSED)
|
||||
{
|
||||
if (ControlService(sc, SERVICE_CONTROL_STOP, &ss))
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState == SERVICE_STOPPED)
|
||||
return EXRV_OK;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_STOPPED)
|
||||
return EXRV_OK;
|
||||
else
|
||||
return EXRV_FAILED;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_pause(const ex_wstr& srv_name)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_PAUSE_CONTINUE | SERVICE_QUERY_STATUS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
SERVICE_STATUS ss;
|
||||
if(!QueryServiceStatus(sc, &ss))
|
||||
return EXRV_FAILED;
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_PAUSED)
|
||||
return EXRV_OK;
|
||||
|
||||
int i = 0;
|
||||
|
||||
DWORD dwStatus = ss.dwCurrentState;
|
||||
if (ss.dwCurrentState == SERVICE_START_PENDING || ss.dwCurrentState == SERVICE_PAUSE_PENDING || ss.dwCurrentState == SERVICE_CONTINUE_PENDING)
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState != dwStatus)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
{
|
||||
if (ControlService(sc, SERVICE_CONTROL_PAUSE, &ss))
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState == SERVICE_PAUSED)
|
||||
return EXRV_OK;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_PAUSED)
|
||||
return EXRV_OK;
|
||||
else
|
||||
return EXRV_FAILED;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_resume(const ex_wstr& srv_name)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_PAUSE_CONTINUE | SERVICE_QUERY_STATUS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
SERVICE_STATUS ss;
|
||||
if (!QueryServiceStatus(sc, &ss))
|
||||
return EXRV_FAILED;
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
return EXRV_OK;
|
||||
|
||||
int i = 0;
|
||||
|
||||
DWORD dwStatus = ss.dwCurrentState;
|
||||
if (ss.dwCurrentState == SERVICE_START_PENDING || ss.dwCurrentState == SERVICE_PAUSE_PENDING || ss.dwCurrentState == SERVICE_CONTINUE_PENDING)
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState != dwStatus)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_PAUSED)
|
||||
{
|
||||
if (ControlService(sc, SERVICE_CONTROL_CONTINUE, &ss))
|
||||
{
|
||||
for (i = 0; i < 100; ++i)
|
||||
{
|
||||
Sleep(100);
|
||||
QueryServiceStatus(sc, &ss);
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
return EXRV_OK;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ss.dwCurrentState == SERVICE_RUNNING)
|
||||
return EXRV_OK;
|
||||
else
|
||||
return EXRV_FAILED;
|
||||
}
|
||||
|
||||
ex_rv ex_winsrv_pid(const ex_wstr& srv_name, ex_ulong& pid)
|
||||
{
|
||||
SC_HANDLE sc = NULL;
|
||||
SC_HANDLE scm = NULL;
|
||||
winsrv_helper srv(scm, sc);
|
||||
|
||||
scm = OpenSCManager(NULL, NULL, SC_MANAGER_ALL_ACCESS);
|
||||
if (scm == NULL)
|
||||
return EXRV_CANNOT_OPEN;
|
||||
|
||||
sc = OpenServiceW(scm, srv_name.c_str(), SERVICE_QUERY_STATUS);
|
||||
if (NULL == sc)
|
||||
return EXRV_NOT_EXISTS;
|
||||
|
||||
DWORD byteneeded = 0;
|
||||
ex_u8 buf[1024] = { 0 };
|
||||
QueryServiceStatusEx(sc, SC_STATUS_PROCESS_INFO, buf, 1024, &byteneeded);
|
||||
|
||||
LPSERVICE_STATUS_PROCESS lp = (LPSERVICE_STATUS_PROCESS)buf;
|
||||
if (lp->dwCurrentState != SERVICE_RUNNING)
|
||||
return EXRV_NOT_START;
|
||||
|
||||
pid = lp->dwProcessId;
|
||||
|
||||
return EXRV_OK;
|
||||
}
|
||||
#endif
|
|
@ -0,0 +1,287 @@
|
|||
#ifndef __PYS_H__
|
||||
#define __PYS_H__
|
||||
|
||||
#include <ex.h>
|
||||
|
||||
//=========================================================================
|
||||
// Type define
|
||||
//=========================================================================
|
||||
#if defined(EX_OS_WIN32)
|
||||
# define DYLIB_HANDLE HINSTANCE
|
||||
#else
|
||||
# define DYLIB_HANDLE void*
|
||||
#endif
|
||||
|
||||
//=========================================================================
|
||||
// Python API
|
||||
//=========================================================================
|
||||
#define MS_NO_COREDLL 1
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#if defined(EX_OS_WIN32)
|
||||
# define PYS_USE_PYLIB_SHARED
|
||||
# include <Python.h>
|
||||
#elif defined(EX_OS_LINUX)
|
||||
# define PYS_USE_PYLIB_STATIC
|
||||
# include <Python.h>
|
||||
#else
|
||||
# error This platform not supported yet.
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef PYS_USE_PYLIB_SHARED
|
||||
//========================================================
|
||||
// WIN32
|
||||
//========================================================
|
||||
#define EXTDECLPROC(result, name, args) \
|
||||
typedef result (__cdecl* __PROC__ ## name) args; \
|
||||
extern __PROC__ ## name pylib_ ## name;
|
||||
|
||||
#define EXTDECLVAR(vartyp, name) \
|
||||
typedef vartyp __VAR__ ## name; \
|
||||
extern __VAR__ ## name* pylib_ ## name;
|
||||
|
||||
|
||||
EXTDECLVAR(int, Py_FrozenFlag);
|
||||
EXTDECLVAR(int, Py_NoSiteFlag);
|
||||
EXTDECLVAR(int, Py_OptimizeFlag);
|
||||
EXTDECLVAR(const char*, Py_FileSystemDefaultEncoding);
|
||||
EXTDECLVAR(int, Py_VerboseFlag);
|
||||
EXTDECLVAR(int, Py_IgnoreEnvironmentFlag);
|
||||
EXTDECLVAR(int, Py_DontWriteBytecodeFlag);
|
||||
EXTDECLVAR(int, Py_NoUserSiteDirectory);
|
||||
|
||||
EXTDECLPROC(void, Py_Initialize, (void));
|
||||
EXTDECLPROC(void, Py_Finalize, (void));
|
||||
EXTDECLPROC(void, Py_IncRef, (PyObject *));
|
||||
EXTDECLPROC(void, Py_DecRef, (PyObject *));
|
||||
EXTDECLPROC(void, Py_SetProgramName, (wchar_t *));
|
||||
EXTDECLPROC(void, Py_SetPythonHome, (wchar_t *));
|
||||
EXTDECLPROC(void, Py_SetPath, (wchar_t *)); /* new in Python 3 */
|
||||
EXTDECLPROC(int, PySys_SetArgvEx, (int, wchar_t **, int));
|
||||
EXTDECLPROC(PyObject *, PyImport_ImportModule, (const char *));
|
||||
EXTDECLPROC(PyObject *, PyObject_GetAttrString, (PyObject *, const char *));
|
||||
|
||||
// in python3.0~3.4, it is _Py_char2wchar, but renamed to Py_DecodeLocale in python3.5. WTF.
|
||||
//EXTDECLPROC(wchar_t *, _Py_char2wchar, (char *, size_t *));
|
||||
|
||||
//EXTDECLPROC(PyObject*, PyUnicode_FromWideChar, (const wchar_t*, size_t size ));
|
||||
|
||||
EXTDECLPROC(PyObject *, Py_BuildValue, (char *, ...));
|
||||
|
||||
EXTDECLPROC(void, PyErr_Clear, (void));
|
||||
EXTDECLPROC(PyObject *, PyErr_Occurred, (void));
|
||||
EXTDECLPROC(void, PyErr_Print, (void));
|
||||
|
||||
EXTDECLPROC(PyObject *, PyObject_Call, (PyObject *callable_object, PyObject *args, PyObject *kw));
|
||||
EXTDECLPROC(int, PyArg_Parse, (PyObject *, const char *, ...));
|
||||
|
||||
EXTDECLPROC(PyObject *, PyObject_CallFunction, (PyObject *, char *, ...));
|
||||
EXTDECLPROC(PyObject *, PyModule_GetDict, (PyObject *));
|
||||
EXTDECLPROC(PyObject *, PyDict_GetItemString, (PyObject *, char *));
|
||||
EXTDECLPROC(int, PyDict_SetItemString, (PyObject *dp, const char *key, PyObject *item));
|
||||
EXTDECLPROC(long, PyLong_AsLong, (PyObject *));
|
||||
EXTDECLPROC(PyObject *, PyLong_FromLong, (long));
|
||||
EXTDECLPROC(PyObject *, PyLong_FromUnsignedLong, (unsigned long));
|
||||
EXTDECLPROC(PyObject *, PyLong_FromUnsignedLongLong, (unsigned PY_LONG_LONG));
|
||||
EXTDECLPROC(PyObject *, PyBytes_FromString, (const char *));
|
||||
EXTDECLPROC(PyObject *, PyBytes_FromStringAndSize, (const char *, Py_ssize_t));
|
||||
EXTDECLPROC(PyObject *, PyUnicode_FromString, (const char *));
|
||||
EXTDECLPROC(PyObject *, PyBool_FromLong, (long));
|
||||
|
||||
|
||||
EXTDECLPROC(int, PyImport_ExtendInittab, (struct _inittab *newtab));
|
||||
EXTDECLPROC(PyObject *, PyModule_Create2, (struct PyModuleDef*, int apiver));
|
||||
EXTDECLPROC(int, PyArg_ParseTuple, (PyObject *, const char *, ...));
|
||||
EXTDECLPROC(PyObject *, PyTuple_Pack, (Py_ssize_t, ...));
|
||||
|
||||
|
||||
#else // for linux, link to static python lib.
|
||||
|
||||
#define pylib_Py_FrozenFlag Py_FrozenFlag
|
||||
#define pylib_Py_NoSiteFlag Py_NoSiteFlag
|
||||
#define pylib_Py_OptimizeFlag Py_OptimizeFlag
|
||||
#define pylib_Py_FileSystemDefaultEncoding Py_FileSystemDefaultEncoding
|
||||
#define pylib_Py_VerboseFlag Py_VerboseFlag
|
||||
#define pylib_Py_IgnoreEnvironmentFlag Py_IgnoreEnvironmentFlag
|
||||
#define pylib_Py_DontWriteBytecodeFlag Py_DontWriteBytecodeFlag
|
||||
#define pylib_Py_NoUserSiteDirectory Py_NoUserSiteDirectory
|
||||
#define pylib_Py_Initialize Py_Initialize
|
||||
#define pylib_Py_Finalize Py_Finalize
|
||||
#define pylib_Py_IncRef Py_IncRef
|
||||
#define pylib_Py_DecRef Py_DecRef
|
||||
#define pylib_Py_SetProgramName Py_SetProgramName
|
||||
#define pylib_Py_SetPythonHome Py_SetPythonHome
|
||||
#define pylib_Py_SetPath Py_SetPath
|
||||
#define pylib_PySys_SetArgvEx PySys_SetArgvEx
|
||||
#define pylib_PyImport_ImportModule PyImport_ImportModule
|
||||
#define pylib_PyObject_GetAttrString PyObject_GetAttrString
|
||||
#define pylib_Py_BuildValue Py_BuildValue
|
||||
#define pylib_PyErr_Clear PyErr_Clear
|
||||
#define pylib_PyErr_Occurred PyErr_Occurred
|
||||
#define pylib_PyErr_Print PyErr_Print
|
||||
#define pylib_PyObject_Call PyObject_Call
|
||||
#define pylib_PyArg_Parse PyArg_Parse
|
||||
#define pylib_PyObject_CallFunction PyObject_CallFunction
|
||||
#define pylib_PyModule_GetDict PyModule_GetDict
|
||||
#define pylib_PyDict_GetItemString PyDict_GetItemString
|
||||
#define pylib_PyDict_SetItemString PyDict_SetItemString
|
||||
#define pylib_PyLong_AsLong PyLong_AsLong
|
||||
#define pylib_PyLong_FromLong PyLong_FromLong
|
||||
#define pylib_PyLong_FromUnsignedLong PyLong_FromUnsignedLong
|
||||
#define pylib_PyLong_FromUnsignedLongLong PyLong_FromUnsignedLongLong
|
||||
#define pylib_PyBytes_FromString PyBytes_FromString
|
||||
#define pylib_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
|
||||
#define pylib_PyUnicode_FromString PyUnicode_FromString
|
||||
#define pylib_PyBool_FromLong PyBool_FromLong
|
||||
#define pylib_PyImport_ExtendInittab PyImport_ExtendInittab
|
||||
#define pylib_PyModule_Create2 PyModule_Create2
|
||||
#define pylib_PyArg_ParseTuple PyArg_ParseTuple
|
||||
#define pylib_PyTuple_Pack PyTuple_Pack
|
||||
|
||||
#define pylib_Py_IncRef Py_IncRef
|
||||
#define pylib_Py_DecRef Py_DecRef
|
||||
#define pylib_PyBool_FromLong PyBool_FromLong
|
||||
#define pylib_PyBool_FromLong PyBool_FromLong
|
||||
|
||||
#endif
|
||||
|
||||
#define PYLIB_XINCREF(o) pylib_Py_IncRef(o)
|
||||
#define PYLIB_XDECREF(o) pylib_Py_DecRef(o)
|
||||
#define PYLIB_DECREF(o) PYLIB_XDECREF(o)
|
||||
#define PYLIB_INCREF(o) PYLIB_XINCREF(o)
|
||||
|
||||
#define PYLIB_RETURN_TRUE return pylib_PyBool_FromLong(1)
|
||||
#define PYLIB_RETURN_FALSE return pylib_PyBool_FromLong(0)
|
||||
|
||||
|
||||
typedef int PYS_BOOL;
|
||||
#define PYS_TRUE 1
|
||||
#define PYS_FALSE 0
|
||||
|
||||
|
||||
//=========================================================================
|
||||
// PyShell API
|
||||
//=========================================================================
|
||||
typedef unsigned long PYS_RET;
|
||||
#define PYSR_OK 0x00000000
|
||||
#define PYSR_FAILED 0x00000005
|
||||
|
||||
#if 0
|
||||
#ifdef EX_OS_WIN32
|
||||
# ifdef EX_DEBUG
|
||||
# if defined(_M_X64)
|
||||
# pragma comment(lib, "pys_64d.lib")
|
||||
# elif defined(_M_IX86)
|
||||
# pragma comment(lib, "pys_32d.lib")
|
||||
# else
|
||||
# error unsupport platform.
|
||||
# endif
|
||||
# else
|
||||
# if defined(_M_X64)
|
||||
# pragma comment(lib, "pys_64.lib")
|
||||
# elif defined(_M_IX86)
|
||||
# pragma comment(lib, "pys_32.lib")
|
||||
# else
|
||||
# error unsupport platform.
|
||||
# endif
|
||||
# endif
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
typedef void* PYS_HANDLE;
|
||||
|
||||
// 创建一个PyShell句柄,所有操作均对应此句柄进行(一个进程仅有一个句柄)
|
||||
PYS_HANDLE pys_create(void);
|
||||
// 销毁一个PyShell句柄
|
||||
void pys_destroy(PYS_HANDLE* pysh);
|
||||
|
||||
// 使用指定的运行时路径进行初始化(运行时路径中包含pythonXX.dll/python.zip/modules等等)
|
||||
PYS_BOOL pys_init_runtime(PYS_HANDLE pysh, const wchar_t* exec_file, const wchar_t* runtime_path);
|
||||
|
||||
// 设置python包搜索路径,可多次调用进行追加(可省略)
|
||||
PYS_BOOL pys_add_search_path(PYS_HANDLE pysh, const wchar_t* path);
|
||||
|
||||
// 设置python运行时的命令行参数(可省略)
|
||||
void pys_set_argv(PYS_HANDLE pysh, int argc, wchar_t** argv);
|
||||
// 追加python运行时的命令行参数(可省略)
|
||||
void pys_add_arg(PYS_HANDLE pysh, const wchar_t* arg);
|
||||
// 设置python解释器名称(可省略,默认为当前可执行程序文件名的绝对路径)
|
||||
void pys_set_program(PYS_HANDLE pysh, const wchar_t* program_name);
|
||||
|
||||
// 设置入口脚本文件名,可以是一个.py文件,也可以是一个.zip文件
|
||||
void pys_set_startup_file(PYS_HANDLE pysh, const wchar_t* filename);
|
||||
|
||||
// 设置启动模块名和入口函数名,func_name为NULL时默认执行指定模块中的main函数
|
||||
// 本函数可以省略,默认情况下:
|
||||
// 如果startup_file是一个.py文件,则默认module_name就是.py文件的文件名本身,
|
||||
// 如果startup_file是一个.zip文件,则默认module_name是`pysmain`。
|
||||
void pys_set_bootstrap_module(PYS_HANDLE pysh, const char* module_name, const char* func_name);
|
||||
|
||||
// 初始化模块的函数原型
|
||||
typedef PyObject* (*pys_init_module_func)(void);
|
||||
|
||||
typedef struct PYS_BUILTIN_FUNC
|
||||
{
|
||||
const char* py_func_name; // Python中调用时使用的函数名
|
||||
PyCFunction c_func_addr; // 对应的C的函数
|
||||
PYS_BOOL have_args; // 此函数是否需要参数
|
||||
const char* py_func_desc; // 此函数的文档注释,可以为NULL。
|
||||
}PYS_BUILTIN_FUNC;
|
||||
|
||||
typedef enum PYS_CONST_TYPE
|
||||
{
|
||||
PYS_CONST_BOOL, // Python中得到 True/False 的值
|
||||
PYS_CONST_LONG, // Python中得到一个整数
|
||||
PYS_CONST_STRING, // Python中得到一个字符串
|
||||
PYS_CONST_BYTES // Python中得到一个Bytes类型数据
|
||||
}PYS_CONST_TYPE;
|
||||
|
||||
typedef struct PYS_BUILTIN_CONST
|
||||
{
|
||||
char* py_const_name; // Python中调用时使用的变量名
|
||||
PYS_CONST_TYPE type; // 常量类型
|
||||
size_t size; // 常量数据的长度
|
||||
void* buffer; // 常量数据的内容
|
||||
}PYS_BUILTIN_CONST;
|
||||
|
||||
// 增加一个内建模块,其中,如果没有函数或常量,那么对应的funcs/consts可以为NULL。
|
||||
// 可多次调用本函数来创建多个内建模块。如果多次调用时使用相同的模块名,则函数和常量会追加到此模块中
|
||||
// 同一个模块中,函数名和常量名不能重复(但可以通过大小写区分)
|
||||
PYS_BOOL pys_add_builtin_module(PYS_HANDLE pysh, const char* module_name, pys_init_module_func init_func);
|
||||
|
||||
PyObject* pys_create_module(const char* module_name, PYS_BUILTIN_FUNC* funcs);
|
||||
void pys_builtin_const_bool(PyObject* mod, const char* name, PYS_BOOL val);
|
||||
void pys_builtin_const_long(PyObject* mod, const char* name, long val);
|
||||
void pys_builtin_const_utf8(PyObject* mod, const char* name, const char* val); // val 必须是utf8编码的字符串
|
||||
void pys_builtin_const_wcs(PyObject* mod, const char* name, const wchar_t* val);
|
||||
void pys_builtin_const_bin(PyObject* mod, const char* name, const ex_u8* val, size_t size);
|
||||
|
||||
// 运行python解释器
|
||||
int pys_run(PYS_HANDLE pysh);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
class PysHandleHolder
|
||||
{
|
||||
public:
|
||||
PysHandleHolder(PYS_HANDLE h) :m_handle(h) { }
|
||||
~PysHandleHolder() { pys_destroy(&m_handle); }
|
||||
private:
|
||||
PYS_HANDLE m_handle;
|
||||
};
|
||||
#endif
|
||||
|
||||
#endif // __PYS_H__
|
|
@ -0,0 +1,205 @@
|
|||
#include <pys.h>
|
||||
#include "pys_core.h"
|
||||
#include "pys_util.h"
|
||||
|
||||
#include <ex/ex_log.h>
|
||||
|
||||
PYS_HANDLE pys_create(void)
|
||||
{
|
||||
pys::Core* core = new pys::Core;
|
||||
return core;
|
||||
}
|
||||
|
||||
void pys_destroy(PYS_HANDLE* pysh)
|
||||
{
|
||||
if (NULL == pysh)
|
||||
return;
|
||||
if (NULL == *pysh)
|
||||
return;
|
||||
pys::Core* core = (pys::Core*)*pysh;
|
||||
delete core;
|
||||
*pysh = NULL;
|
||||
}
|
||||
|
||||
PYS_BOOL pys_init_runtime(PYS_HANDLE pysh, const wchar_t* exec_file, const wchar_t* runtime_path)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
if (!core->init(exec_file, runtime_path))
|
||||
return PYS_FALSE;
|
||||
|
||||
return PYS_TRUE;
|
||||
}
|
||||
|
||||
int pys_run(PYS_HANDLE pysh)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
return core->run();
|
||||
}
|
||||
|
||||
PYS_BOOL pys_add_search_path(PYS_HANDLE pysh, const wchar_t* path)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
core->add_search_path(path);
|
||||
return PYS_TRUE;
|
||||
}
|
||||
|
||||
|
||||
void pys_set_program(PYS_HANDLE pysh, const wchar_t* program_name)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
core->m_prog_name = program_name;
|
||||
}
|
||||
|
||||
void pys_set_startup_file(PYS_HANDLE pysh, const wchar_t* filename)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
core->set_startup_file(filename);
|
||||
}
|
||||
|
||||
void pys_set_bootstrap_module(PYS_HANDLE pysh, const char* module_name, const char* func_name)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
if(NULL != module_name)
|
||||
core->m_bootstrap_module = module_name;
|
||||
if (NULL != func_name)
|
||||
core->m_bootstrap_func = func_name;
|
||||
}
|
||||
|
||||
void pys_set_argv(PYS_HANDLE pysh, int argc, wchar_t** argv)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
core->m_py_args.clear();
|
||||
|
||||
int i = 0;
|
||||
for (i = 0; i < argc; ++i)
|
||||
{
|
||||
core->m_py_args.push_back(argv[i]);
|
||||
}
|
||||
}
|
||||
|
||||
void pys_add_arg(PYS_HANDLE pysh, const wchar_t* arg)
|
||||
{
|
||||
if (NULL == arg)
|
||||
return;
|
||||
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
core->m_py_args.push_back(arg);
|
||||
}
|
||||
|
||||
PYS_BOOL pys_add_builtin_module(PYS_HANDLE pysh, const char* module_name, pys_init_module_func init_func)
|
||||
{
|
||||
pys::Core* core = (pys::Core*)pysh;
|
||||
if (!core->add_builtin_module(module_name, init_func))
|
||||
return PYS_FALSE;
|
||||
return PYS_TRUE;
|
||||
}
|
||||
|
||||
PyObject* pys_create_module(const char* module_name, PYS_BUILTIN_FUNC* funcs)
|
||||
{
|
||||
PyMethodDef* _method_def = NULL;
|
||||
PyModuleDef* _module_def = NULL;
|
||||
|
||||
int i = 0;
|
||||
int func_count = 0;
|
||||
|
||||
if (funcs != NULL)
|
||||
{
|
||||
for (i = 0; ; ++i)
|
||||
{
|
||||
if (funcs[i].py_func_name == NULL)
|
||||
break;
|
||||
func_count++;
|
||||
}
|
||||
}
|
||||
|
||||
_method_def = new PyMethodDef[func_count + 1];
|
||||
memset(_method_def, 0, sizeof(PyMethodDef)*(func_count + 1));
|
||||
for (i = 0; i < func_count; ++i)
|
||||
{
|
||||
_method_def[i].ml_name = funcs[i].py_func_name;
|
||||
_method_def[i].ml_meth = funcs[i].c_func_addr;
|
||||
_method_def[i].ml_doc = funcs[i].py_func_desc;
|
||||
if(funcs[i].have_args)
|
||||
_method_def[i].ml_flags = METH_VARARGS;
|
||||
else
|
||||
_method_def[i].ml_flags = METH_NOARGS;
|
||||
}
|
||||
|
||||
_module_def = new PyModuleDef;
|
||||
memset(_module_def, 0, sizeof(PyModuleDef));
|
||||
_module_def->m_name = module_name;
|
||||
_module_def->m_size = -1;
|
||||
_module_def->m_methods = _method_def;
|
||||
|
||||
// 托管这两个动态分配的变量
|
||||
pys::g_builtin_module_info.add(_method_def, _module_def);
|
||||
|
||||
PyObject* module = pylib_PyModule_Create2(_module_def, PYTHON_API_VERSION);
|
||||
|
||||
if (NULL == module)
|
||||
{
|
||||
EXLOGE("[pys]: can not create builtin module `%s`.\n", module_name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return module;
|
||||
}
|
||||
|
||||
void pys_builtin_const_bool(PyObject* mod, const char* name, PYS_BOOL val)
|
||||
{
|
||||
PyObject* dict = NULL;
|
||||
PyObject* tmp_obj = NULL;
|
||||
if (NULL == (dict = pylib_PyModule_GetDict(mod)))
|
||||
return;
|
||||
tmp_obj = pylib_PyBool_FromLong(val);
|
||||
pylib_PyDict_SetItemString(dict, name, tmp_obj);
|
||||
PYLIB_DECREF(tmp_obj);
|
||||
}
|
||||
|
||||
void pys_builtin_const_long(PyObject* mod, const char* name, long val)
|
||||
{
|
||||
PyObject* dict = NULL;
|
||||
PyObject* tmp_obj = NULL;
|
||||
if (NULL == (dict = pylib_PyModule_GetDict(mod)))
|
||||
return;
|
||||
tmp_obj = pylib_PyLong_FromLong(val);
|
||||
pylib_PyDict_SetItemString(dict, name, tmp_obj);
|
||||
PYLIB_DECREF(tmp_obj);
|
||||
}
|
||||
|
||||
void pys_builtin_const_utf8(PyObject* mod, const char* name, const char* val) // val 必须是utf8编码的字符串
|
||||
{
|
||||
PyObject* dict = NULL;
|
||||
PyObject* tmp_obj = NULL;
|
||||
if (NULL == (dict = pylib_PyModule_GetDict(mod)))
|
||||
return;
|
||||
tmp_obj = pylib_PyUnicode_FromString(val);
|
||||
pylib_PyDict_SetItemString(dict, name, tmp_obj);
|
||||
PYLIB_DECREF(tmp_obj);
|
||||
}
|
||||
|
||||
void pys_builtin_const_wcs(PyObject* mod, const char* name, const wchar_t* val)
|
||||
{
|
||||
ex_astr strval;
|
||||
if (!ex_wstr2astr(val, strval, EX_CODEPAGE_UTF8))
|
||||
return;
|
||||
|
||||
PyObject* dict = NULL;
|
||||
PyObject* tmp_obj = NULL;
|
||||
if (NULL == (dict = pylib_PyModule_GetDict(mod)))
|
||||
return;
|
||||
tmp_obj = pylib_PyUnicode_FromString(strval.c_str());
|
||||
pylib_PyDict_SetItemString(dict, name, tmp_obj);
|
||||
PYLIB_DECREF(tmp_obj);
|
||||
}
|
||||
|
||||
void pys_builtin_const_bin(PyObject* mod, const char* name, const ex_u8* val, size_t size)
|
||||
{
|
||||
PyObject* dict = NULL;
|
||||
PyObject* tmp_obj = NULL;
|
||||
if (NULL == (dict = pylib_PyModule_GetDict(mod)))
|
||||
return;
|
||||
tmp_obj = pylib_PyBytes_FromStringAndSize((char*)val, size);
|
||||
pylib_PyDict_SetItemString(dict, name, tmp_obj);
|
||||
PYLIB_DECREF(tmp_obj);
|
||||
}
|
|
@ -0,0 +1,604 @@
|
|||
#include <pys.h>
|
||||
#include "pys_core.h"
|
||||
#include "pys_util.h"
|
||||
|
||||
#ifdef PYS_USE_PYLIB_SHARED
|
||||
//========================================================
|
||||
// WIN32
|
||||
//========================================================
|
||||
#define DECLPROC(name) \
|
||||
__PROC__ ## name pylib_ ## name = NULL;
|
||||
|
||||
#define GETPROCOPT(lib, name, sym) \
|
||||
pylib_ ## name = (__PROC__ ## name)GetProcAddress(lib, #sym)
|
||||
|
||||
#define GETPROC(lib, name) \
|
||||
GETPROCOPT(lib, name, name); \
|
||||
if(!pylib_ ## name) { \
|
||||
EXLOGE("[pys] can not GetProcAddress for " #name "\n"); \
|
||||
return -1;\
|
||||
}
|
||||
|
||||
#pragma warning(disable:4054)
|
||||
|
||||
#define DECLVAR(name) \
|
||||
__VAR__ ## name* pylib_ ## name = NULL;
|
||||
#define GETVAR(lib, name) \
|
||||
pylib_ ## name = (__VAR__ ## name*)GetProcAddress(lib, #name); \
|
||||
if (!pylib_ ## name) { \
|
||||
EXLOGE("[pys] can not GetProcAddress for " #name "\n"); \
|
||||
return -1; \
|
||||
}
|
||||
|
||||
|
||||
static int _pys_map_python_lib(DYLIB_HANDLE handle);
|
||||
static DYLIB_HANDLE _pys_dlopen(const wchar_t* dylib_path);
|
||||
|
||||
static int pys_pylib_load(const wchar_t* lib_path)
|
||||
{
|
||||
DYLIB_HANDLE lib = NULL;
|
||||
|
||||
EXLOGD(L"[pys] py-lib: %ls\n", lib_path);
|
||||
|
||||
lib = _pys_dlopen(lib_path);
|
||||
if (NULL == lib)
|
||||
return -1;
|
||||
|
||||
if (0 != _pys_map_python_lib(lib))
|
||||
return -1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
DYLIB_HANDLE _pys_dlopen(const wchar_t* dylib_path)
|
||||
{
|
||||
DYLIB_HANDLE handle = NULL;
|
||||
#ifdef EX_OS_WIN32
|
||||
// PYSLOGW(L"[pys] py-lib: %ls\n", dylib_path);
|
||||
handle = LoadLibraryExW(dylib_path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
|
||||
if (NULL == handle)
|
||||
{
|
||||
EXLOGE(L"[pys] can not load python lib: %ls.\n", dylib_path);
|
||||
return NULL;
|
||||
}
|
||||
#else
|
||||
ex_astr path;
|
||||
if (!ex_wstr2astr(dylib_path, path, EX_CODEPAGE_UTF8))
|
||||
{
|
||||
EXLOGE("[pys] convert dylib_path failed.\n");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
EXLOGD("[pys] py-lib-a: %s\n", path);
|
||||
|
||||
handle = dlopen(path.c_str(), RTLD_NOW | RTLD_GLOBAL);
|
||||
|
||||
if (NULL == handle)
|
||||
{
|
||||
EXLOGE("[pys] dlopen() failed: %s.\n", dlerror());
|
||||
return NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
return handle;
|
||||
}
|
||||
|
||||
|
||||
int _pys_map_python_lib(DYLIB_HANDLE handle)
|
||||
{
|
||||
GETVAR(handle, Py_DontWriteBytecodeFlag);
|
||||
GETVAR(handle, Py_FileSystemDefaultEncoding);
|
||||
GETVAR(handle, Py_FrozenFlag);
|
||||
GETVAR(handle, Py_IgnoreEnvironmentFlag);
|
||||
GETVAR(handle, Py_NoSiteFlag);
|
||||
GETVAR(handle, Py_NoUserSiteDirectory);
|
||||
GETVAR(handle, Py_OptimizeFlag);
|
||||
GETVAR(handle, Py_VerboseFlag);
|
||||
|
||||
|
||||
GETPROC(handle, Py_BuildValue);
|
||||
GETPROC(handle, Py_DecRef);
|
||||
GETPROC(handle, Py_Finalize);
|
||||
GETPROC(handle, Py_IncRef);
|
||||
GETPROC(handle, Py_Initialize);
|
||||
GETPROC(handle, Py_SetPath);
|
||||
GETPROC(handle, Py_SetProgramName);
|
||||
GETPROC(handle, Py_SetPythonHome);
|
||||
GETPROC(handle, PySys_SetArgvEx);
|
||||
|
||||
GETPROC(handle, PyImport_ImportModule);
|
||||
GETPROC(handle, PyObject_GetAttrString);
|
||||
|
||||
//GETPROC(handle, _Py_char2wchar);
|
||||
//GETPROC(handle, PyUnicode_FromWideChar);
|
||||
|
||||
|
||||
GETPROC(handle, PyErr_Clear);
|
||||
GETPROC(handle, PyErr_Occurred);
|
||||
GETPROC(handle, PyErr_Print);
|
||||
|
||||
//GETPROC(handle, PyMem_RawFree);
|
||||
GETPROC(handle, PyObject_Call);
|
||||
GETPROC(handle, PyArg_Parse);
|
||||
|
||||
GETPROC(handle, PyObject_CallFunction);
|
||||
GETPROC(handle, PyModule_GetDict);
|
||||
GETPROC(handle, PyDict_GetItemString);
|
||||
GETPROC(handle, PyDict_SetItemString);
|
||||
GETPROC(handle, PyLong_AsLong);
|
||||
GETPROC(handle, PyLong_FromLong);
|
||||
GETPROC(handle, PyLong_FromUnsignedLong);
|
||||
GETPROC(handle, PyLong_FromUnsignedLongLong);
|
||||
GETPROC(handle, PyBytes_FromString);
|
||||
GETPROC(handle, PyBytes_FromStringAndSize);
|
||||
GETPROC(handle, PyUnicode_FromString);
|
||||
GETPROC(handle, PyBool_FromLong);
|
||||
|
||||
GETPROC(handle, PyImport_ExtendInittab);
|
||||
GETPROC(handle, PyModule_Create2);
|
||||
GETPROC(handle, PyArg_ParseTuple);
|
||||
GETPROC(handle, PyTuple_Pack);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
DECLVAR(Py_DontWriteBytecodeFlag);
|
||||
DECLVAR(Py_FileSystemDefaultEncoding);
|
||||
DECLVAR(Py_FrozenFlag);
|
||||
DECLVAR(Py_IgnoreEnvironmentFlag);
|
||||
DECLVAR(Py_NoSiteFlag);
|
||||
DECLVAR(Py_NoUserSiteDirectory);
|
||||
DECLVAR(Py_OptimizeFlag);
|
||||
DECLVAR(Py_VerboseFlag);
|
||||
|
||||
|
||||
DECLPROC(Py_BuildValue);
|
||||
DECLPROC(Py_DecRef);
|
||||
DECLPROC(Py_Finalize);
|
||||
DECLPROC(Py_IncRef);
|
||||
DECLPROC(Py_Initialize);
|
||||
DECLPROC(Py_SetPath);
|
||||
DECLPROC(Py_SetProgramName);
|
||||
DECLPROC(Py_SetPythonHome);
|
||||
DECLPROC(PySys_SetArgvEx);
|
||||
|
||||
DECLPROC(PyImport_ImportModule);
|
||||
DECLPROC(PyObject_GetAttrString);
|
||||
|
||||
//DECLPROC(_Py_char2wchar);
|
||||
//DECLPROC(PyUnicode_FromWideChar);
|
||||
|
||||
DECLPROC(PyErr_Clear);
|
||||
DECLPROC(PyErr_Occurred);
|
||||
DECLPROC(PyErr_Print);
|
||||
|
||||
//DECLPROC(PyMem_RawFree);
|
||||
DECLPROC(PyObject_Call);
|
||||
DECLPROC(PyArg_Parse);
|
||||
|
||||
DECLPROC(PyObject_CallFunction);
|
||||
DECLPROC(PyModule_GetDict);
|
||||
DECLPROC(PyDict_GetItemString);
|
||||
DECLPROC(PyDict_SetItemString);
|
||||
DECLPROC(PyLong_AsLong);
|
||||
DECLPROC(PyLong_FromLong);
|
||||
DECLPROC(PyLong_FromUnsignedLong);
|
||||
DECLPROC(PyLong_FromUnsignedLongLong);
|
||||
DECLPROC(PyBytes_FromString);
|
||||
DECLPROC(PyBytes_FromStringAndSize);
|
||||
DECLPROC(PyUnicode_FromString);
|
||||
DECLPROC(PyBool_FromLong);
|
||||
|
||||
DECLPROC(PyImport_ExtendInittab);
|
||||
DECLPROC(PyModule_Create2);
|
||||
DECLPROC(PyArg_ParseTuple);
|
||||
DECLPROC(PyTuple_Pack);
|
||||
|
||||
|
||||
#else
|
||||
int pys_pylib_load(const wchar_t* lib_path)
|
||||
{
|
||||
EXLOGD("[pys] link to python static lib.\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
//================================================================
|
||||
//
|
||||
//================================================================
|
||||
|
||||
namespace pys
|
||||
{
|
||||
BuiltinModuleInfo g_builtin_module_info;
|
||||
|
||||
BuiltinModuleInfo::BuiltinModuleInfo()
|
||||
{}
|
||||
|
||||
BuiltinModuleInfo::~BuiltinModuleInfo()
|
||||
{
|
||||
builtin_module_infos::iterator it = m_infos.begin();
|
||||
for (; it != m_infos.end(); ++it)
|
||||
{
|
||||
delete[] (*it)->method_def;
|
||||
delete (*it)->module_def;
|
||||
delete (*it);
|
||||
}
|
||||
m_infos.clear();
|
||||
}
|
||||
|
||||
void BuiltinModuleInfo::add(PyMethodDef* method_def, PyModuleDef* module_def)
|
||||
{
|
||||
BUILTIN_MODULE_INFO* info = new BUILTIN_MODULE_INFO;
|
||||
info->method_def = method_def;
|
||||
info->module_def = module_def;
|
||||
m_infos.push_back(info);
|
||||
}
|
||||
|
||||
//================================================================
|
||||
//
|
||||
//================================================================
|
||||
|
||||
Core::Core()
|
||||
{
|
||||
m_init_tab = NULL;
|
||||
}
|
||||
|
||||
Core::~Core()
|
||||
{
|
||||
if (NULL != m_init_tab)
|
||||
delete[] m_init_tab;
|
||||
}
|
||||
|
||||
bool Core::init(const wchar_t* exec_file, const wchar_t* runtime_path)
|
||||
{
|
||||
// if (!ex_exec_file(m_exec_file))
|
||||
// return false;
|
||||
|
||||
m_exec_file = exec_file;
|
||||
|
||||
m_exec_path = m_exec_file;
|
||||
if (!ex_dirname(m_exec_path))
|
||||
return false;
|
||||
|
||||
m_runtime_path = runtime_path;
|
||||
return _load_dylib();
|
||||
}
|
||||
|
||||
bool Core::set_startup_file(const wchar_t* filename)
|
||||
{
|
||||
if (NULL == filename)
|
||||
return false;
|
||||
ex_wstr fname = filename;
|
||||
if (!ex_is_abspath(fname.c_str()))
|
||||
ex_abspath(fname);
|
||||
if (!ex_is_file_exists(fname.c_str()))
|
||||
return false;
|
||||
|
||||
ex_wstr ext;
|
||||
if (!ex_path_ext_name(fname, ext))
|
||||
return false;
|
||||
|
||||
m_start_file = fname;
|
||||
|
||||
if (ext == L"zip")
|
||||
{
|
||||
m_is_zipped_app = true;
|
||||
// 将.zip文件加入搜索路径
|
||||
m_search_path.push_back(m_start_file);
|
||||
}
|
||||
else
|
||||
{
|
||||
m_is_zipped_app = false;
|
||||
|
||||
// 将.py文件所在路径加入搜索路径
|
||||
ex_wstr tmp_path(m_start_file);
|
||||
ex_dirname(tmp_path);
|
||||
m_search_path.push_back(tmp_path);
|
||||
|
||||
// 如果尚未设置启动模块名称,则以.py文件的文件名作为启动模块名称
|
||||
if (m_bootstrap_module.empty())
|
||||
{
|
||||
ex_wstr wmod(m_start_file);
|
||||
wmod.assign(m_start_file, tmp_path.length() + 1, m_start_file.length() - tmp_path.length() - 1 - 3);
|
||||
ex_wstr2astr(wmod, m_bootstrap_module);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Core::add_builtin_module(const char* module_name, pys_init_module_func init_func)
|
||||
{
|
||||
builtin_modules::iterator it = m_builtin_modules.find(module_name);
|
||||
if (it != m_builtin_modules.end())
|
||||
return false;
|
||||
|
||||
m_builtin_modules.insert(std::make_pair(module_name, init_func));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Core::get_builtin_module_by_init_func(pys_init_module_func init_func, ex_astr& module_name)
|
||||
{
|
||||
builtin_modules::iterator it = m_builtin_modules.begin();
|
||||
for (; it != m_builtin_modules.end(); ++it)
|
||||
{
|
||||
if (init_func == it->second)
|
||||
{
|
||||
module_name = it->first;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Core::_load_dylib(void)
|
||||
{
|
||||
#ifdef PYS_USE_PYLIB_SHARED
|
||||
ex_wstr ver_file = m_runtime_path;
|
||||
if (!ex_path_join(ver_file, true, L"python.ver", NULL))
|
||||
return false;
|
||||
FILE* f = pys_open_file(ver_file.c_str(), L"rb");
|
||||
if (NULL == f)
|
||||
{
|
||||
EXLOGE(L"[pys] can not open file: %ls\n", ver_file.c_str());
|
||||
return false;
|
||||
}
|
||||
fseek(f, 0L, SEEK_SET);
|
||||
char dll_name[64] = { 0 };
|
||||
size_t read_size = fread(dll_name, 1, 64, f);
|
||||
fclose(f);
|
||||
if (64 != read_size)
|
||||
{
|
||||
EXLOGE(L"[pys] read file failed, need 64B, read %dB\n", read_size);
|
||||
return false;
|
||||
}
|
||||
|
||||
ex_wstr wstr_dll;
|
||||
if (!ex_astr2wstr(dll_name, wstr_dll))
|
||||
return false;
|
||||
|
||||
ex_wstr dll_file = m_runtime_path;
|
||||
if (!ex_path_join(dll_file, true, wstr_dll.c_str(), NULL))
|
||||
return false;
|
||||
|
||||
if (0 != pys_pylib_load(dll_file.c_str()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Core::add_search_path(const wchar_t* wpath)
|
||||
{
|
||||
ex_wstr wstr_path = wpath;
|
||||
if (!ex_abspath(wstr_path))
|
||||
{
|
||||
EXLOGE(L"can not get abspath of `%ls`.\n", wpath);
|
||||
return false;
|
||||
}
|
||||
|
||||
pys_wstr_list::iterator it = m_search_path.begin();
|
||||
for (; it != m_search_path.end(); ++it)
|
||||
{
|
||||
// TODO: windows平台不区分大小写比较
|
||||
if (wstr_path == (*it))
|
||||
return false;
|
||||
}
|
||||
|
||||
m_search_path.push_back(wstr_path);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Core::add_search_path(const char* apath, int code_page)
|
||||
{
|
||||
ex_wstr wstr_path;
|
||||
if (!ex_astr2wstr(apath, wstr_path, code_page))
|
||||
return false;
|
||||
return add_search_path(wstr_path.c_str());
|
||||
}
|
||||
|
||||
|
||||
bool Core::_run_prepare(void)
|
||||
{
|
||||
if(m_bootstrap_module.empty())
|
||||
m_bootstrap_module = "pysmain";
|
||||
if(m_bootstrap_func.empty())
|
||||
m_bootstrap_func = "main";
|
||||
|
||||
#ifdef PYS_USE_PYLIB_SHARED
|
||||
*pylib_Py_NoSiteFlag = 1;
|
||||
*pylib_Py_OptimizeFlag = 2; // 进行操作码优化(编译成操作码,去掉assert及doc-string)
|
||||
*pylib_Py_FrozenFlag = 1;
|
||||
*pylib_Py_DontWriteBytecodeFlag = 1; // 对于加载的.py脚本,内存中编译为操作码,但不要保存.pyo缓存文件
|
||||
*pylib_Py_NoUserSiteDirectory = 1;
|
||||
*pylib_Py_IgnoreEnvironmentFlag = 1;
|
||||
*pylib_Py_VerboseFlag = 0;
|
||||
#else
|
||||
pylib_Py_NoSiteFlag = 1;
|
||||
pylib_Py_OptimizeFlag = 2;
|
||||
pylib_Py_FrozenFlag = 1;
|
||||
pylib_Py_DontWriteBytecodeFlag = 1;
|
||||
pylib_Py_NoUserSiteDirectory = 1;
|
||||
pylib_Py_IgnoreEnvironmentFlag = 1;
|
||||
pylib_Py_VerboseFlag = 0;
|
||||
#endif
|
||||
|
||||
ex_wstr tmp_path = m_runtime_path;
|
||||
ex_path_join(tmp_path, true, L"modules", NULL);
|
||||
add_search_path(tmp_path.c_str());
|
||||
|
||||
tmp_path = m_runtime_path;
|
||||
ex_path_join(tmp_path, true, L"python.zip", NULL);
|
||||
add_search_path(tmp_path.c_str());
|
||||
|
||||
if (m_search_path.size() > 0)
|
||||
{
|
||||
pys_wstr_list::iterator it = m_search_path.begin();
|
||||
for (; it != m_search_path.end(); ++it)
|
||||
{
|
||||
add_search_path(it->c_str());
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Core::_run_set_program(void)
|
||||
{
|
||||
if(m_prog_name.empty())
|
||||
pylib_Py_SetProgramName((wchar_t*)m_exec_file.c_str());
|
||||
else
|
||||
pylib_Py_SetProgramName((wchar_t*)m_prog_name.c_str());
|
||||
}
|
||||
|
||||
void Core::_run_set_path(void)
|
||||
{
|
||||
pys_wstr_list::iterator it = m_search_path.begin();
|
||||
for (; it != m_search_path.end(); ++it)
|
||||
{
|
||||
if (!m_search_path_tmp.empty())
|
||||
m_search_path_tmp += EX_PATH_SEP_STR;
|
||||
m_search_path_tmp += (*it);
|
||||
}
|
||||
|
||||
EXLOGD(L"[pys] search path: %ls\n", m_search_path_tmp.c_str());
|
||||
pylib_Py_SetPath((wchar_t*)m_search_path_tmp.c_str());
|
||||
}
|
||||
|
||||
void Core::_run_set_argv(void)
|
||||
{
|
||||
int tmp_argc = m_py_args.size();
|
||||
wchar_t** tmp_wargv = (wchar_t**)calloc(tmp_argc + 1, sizeof(wchar_t*));
|
||||
if (!tmp_wargv)
|
||||
return;
|
||||
|
||||
int i = 0;
|
||||
pys_wstr_list::iterator it = m_py_args.begin();
|
||||
for (; it != m_py_args.end(); ++it)
|
||||
{
|
||||
tmp_wargv[i] = ex_wcsdup(it->c_str());
|
||||
i++;
|
||||
}
|
||||
|
||||
pylib_PySys_SetArgvEx(tmp_argc, tmp_wargv, 0);
|
||||
|
||||
ex_free_wargv(tmp_argc, tmp_wargv);
|
||||
}
|
||||
|
||||
bool Core::_run_init_builtin_modules(void)
|
||||
{
|
||||
m_init_tab = NULL;
|
||||
int cnt = m_builtin_modules.size();
|
||||
if (0 == cnt)
|
||||
return true;
|
||||
|
||||
m_init_tab = new struct _inittab[cnt + 1];
|
||||
memset(m_init_tab, 0, sizeof(struct _inittab)*(cnt + 1));
|
||||
int i = 0;
|
||||
builtin_modules::iterator it = m_builtin_modules.begin();
|
||||
for (; it != m_builtin_modules.end(); ++it, ++i)
|
||||
{
|
||||
m_init_tab[i].name = it->first.c_str();
|
||||
m_init_tab[i].initfunc = it->second;
|
||||
}
|
||||
|
||||
if (-1 == pylib_PyImport_ExtendInittab(m_init_tab))
|
||||
{
|
||||
EXLOGE("[pys] can not init builtin module.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
int Core::run(void)
|
||||
{
|
||||
int ret = 0;
|
||||
|
||||
PyObject* pModule = NULL;
|
||||
PyObject* pDict = NULL;
|
||||
PyObject* pFunc = NULL;
|
||||
PyObject* pModuleName = NULL;
|
||||
PyObject* pRunArgs = NULL;
|
||||
PyObject* pyRet = NULL;
|
||||
PYS_BOOL has_error = PYS_TRUE;
|
||||
|
||||
if (!_run_init_builtin_modules())
|
||||
return PYSR_FAILED;
|
||||
|
||||
if (!_run_prepare())
|
||||
return PYSR_FAILED;
|
||||
_run_set_program();
|
||||
_run_set_path();
|
||||
|
||||
// Py_Initialize()必须在初始化内建模块之后进行
|
||||
pylib_Py_Initialize();
|
||||
|
||||
_run_set_argv();
|
||||
|
||||
for (;;)
|
||||
{
|
||||
pModule = pylib_PyImport_ImportModule(m_bootstrap_module.c_str());
|
||||
if (pModule == NULL)
|
||||
{
|
||||
EXLOGE("[pys] can not import module: %s\n", m_bootstrap_module.c_str());
|
||||
|
||||
ret = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
pDict = pylib_PyModule_GetDict(pModule); /* NO ref added */
|
||||
if (pDict == NULL)
|
||||
{
|
||||
EXLOGE("[pys] can not get module dict: %s\n", m_bootstrap_module.c_str());
|
||||
ret = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
pFunc = pylib_PyDict_GetItemString(pDict, (char*)m_bootstrap_func.c_str());
|
||||
if (pFunc == NULL)
|
||||
{
|
||||
EXLOGE("[pys] module [%s] have no function named `%s`.\n", m_bootstrap_module.c_str(), m_bootstrap_func.c_str());
|
||||
ret = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
pyRet = pylib_PyObject_CallFunction(pFunc, "");
|
||||
if (pyRet == NULL)
|
||||
{
|
||||
EXLOGE("[pys] %s.%s() return nothing.\n", m_bootstrap_module.c_str(), m_bootstrap_func.c_str());
|
||||
ret = -1;
|
||||
break;
|
||||
}
|
||||
|
||||
pylib_PyErr_Clear();
|
||||
ret = pylib_PyLong_AsLong(pyRet);
|
||||
|
||||
has_error = PYS_FALSE;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (pylib_PyErr_Occurred())
|
||||
pylib_PyErr_Print();
|
||||
pylib_PyErr_Clear();
|
||||
|
||||
if (pFunc) { PYLIB_DECREF(pFunc); }
|
||||
if (pModule) { PYLIB_DECREF(pModule); }
|
||||
if (pModuleName) { PYLIB_DECREF(pModuleName); }
|
||||
if (pRunArgs) { PYLIB_DECREF(pRunArgs); }
|
||||
if (pyRet) { PYLIB_DECREF(pyRet); }
|
||||
|
||||
pylib_Py_Finalize();
|
||||
EXLOGD("[pys] python finalized. ExitCode=%d\n", ret);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
#ifndef __PYS_CORE_H__
|
||||
#define __PYS_CORE_H__
|
||||
|
||||
//#include "pys_str.h"
|
||||
|
||||
#include "pys_util.h"
|
||||
#include <map>
|
||||
|
||||
namespace pys
|
||||
{
|
||||
typedef std::map<ex_astr, pys_init_module_func> builtin_modules;
|
||||
typedef std::list<ex_astr> pys_astr_list;
|
||||
typedef std::list<ex_wstr> pys_wstr_list;
|
||||
|
||||
class Core
|
||||
{
|
||||
public:
|
||||
Core();
|
||||
~Core();
|
||||
|
||||
// 调用各个成员函数设置必要信息之后再运行
|
||||
int run(void);
|
||||
|
||||
// 初始化为默认设置
|
||||
bool init(const wchar_t* exec_file, const wchar_t* runtime_path);
|
||||
|
||||
bool add_search_path(const wchar_t* wpath);
|
||||
bool add_search_path(const char* apath, int code_page = EX_CODEPAGE_DEFAULT);
|
||||
|
||||
bool set_startup_file(const wchar_t* filename);
|
||||
bool add_builtin_module(const char* module_name, pys_init_module_func init_func);
|
||||
bool get_builtin_module_by_init_func(pys_init_module_func init_func, ex_astr& module_name);
|
||||
|
||||
private:
|
||||
bool _load_dylib(void);
|
||||
|
||||
bool _run_init_builtin_modules(void);
|
||||
bool _run_prepare(void);
|
||||
void _run_set_program(void);
|
||||
void _run_set_path(void);
|
||||
void _run_set_argv(void);
|
||||
|
||||
|
||||
public:
|
||||
ex_wstr m_prog_name; // 传递给Python解释器的,如果没有设置此值,则默认使用m_exec_file。
|
||||
ex_astr m_bootstrap_module;
|
||||
ex_astr m_bootstrap_func;
|
||||
pys_wstr_list m_py_args; // 传递给Python脚本的参数
|
||||
|
||||
private:
|
||||
bool m_is_zipped_app;
|
||||
|
||||
ex_wstr m_exec_file; // 当前可执行程序的文件名(绝对路径)
|
||||
ex_wstr m_exec_path; // 当前可执行程序所在的路径(绝对路径)
|
||||
ex_wstr m_runtime_path; // python运行环境路径,默认为可执行程序所在路径下的 `pysrt` 目录。
|
||||
ex_wstr m_start_file;
|
||||
ex_wstr m_search_path_tmp;
|
||||
|
||||
pys_wstr_list m_search_path;
|
||||
|
||||
builtin_modules m_builtin_modules;
|
||||
struct _inittab* m_init_tab;
|
||||
};
|
||||
|
||||
|
||||
typedef struct BUILTIN_MODULE_INFO
|
||||
{
|
||||
PyMethodDef* method_def;
|
||||
PyModuleDef* module_def;
|
||||
}BUILTIN_MODULE_INFO;
|
||||
|
||||
typedef std::list<BUILTIN_MODULE_INFO*> builtin_module_infos;
|
||||
|
||||
class BuiltinModuleInfo
|
||||
{
|
||||
public:
|
||||
BuiltinModuleInfo();
|
||||
~BuiltinModuleInfo();
|
||||
|
||||
void add(PyMethodDef* method_def, PyModuleDef* module_def);
|
||||
|
||||
private:
|
||||
builtin_module_infos m_infos;
|
||||
};
|
||||
|
||||
extern BuiltinModuleInfo g_builtin_module_info;
|
||||
|
||||
} // namespace pys
|
||||
|
||||
#endif // __PYS_CORE_H__
|
|
@ -0,0 +1,20 @@
|
|||
#include "pys_util.h"
|
||||
|
||||
FILE* pys_open_file(const ex_wstr& file_name, const wchar_t* mode)
|
||||
{
|
||||
FILE* f = NULL;
|
||||
#ifdef EX_OS_WIN32
|
||||
errno_t err = 0;
|
||||
err = _wfopen_s(&f, file_name.c_str(), mode);
|
||||
if (0 == err)
|
||||
return f;
|
||||
else
|
||||
return NULL;
|
||||
#else
|
||||
ex_astr _file_name, _mode;
|
||||
ex_wstr2astr(file_name, _file_name, EX_CODEPAGE_UTF8);
|
||||
ex_wstr2astr(mode, _mode, EX_CODEPAGE_UTF8);
|
||||
f = fopen(_file_name.c_str(), _mode.c_str());
|
||||
return f;
|
||||
#endif
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
#ifndef __PYS_UTIL_H__
|
||||
#define __PYS_UTIL_H__
|
||||
|
||||
#include <ex/ex_str.h>
|
||||
#include <list>
|
||||
|
||||
FILE* pys_open_file(const ex_wstr& file_name, const wchar_t* mode);
|
||||
|
||||
#endif // __PYS_UTIL_H__
|
|
@ -0,0 +1,88 @@
|
|||
|
||||
CREATE TABLE `ts_account` (
|
||||
`account_id` integer PRIMARY KEY AUTOINCREMENT,
|
||||
`account_type` int(11) DEFAULT 0,
|
||||
`account_name` varchar(32) DEFAULT NULL,
|
||||
`account_pwd` varchar(32) DEFAULT NULL,
|
||||
`account_status` int(11) DEFAULT 0,
|
||||
`account_lock` int(11) DEFAULT 0,
|
||||
`account_desc` varchar(255)
|
||||
);
|
||||
|
||||
INSERT INTO "main"."ts_account" VALUES (1, 100, 'admin', '8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918', 0, 0, '超级管理员');
|
||||
|
||||
CREATE TABLE "ts_auth"(
|
||||
"auth_id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"account_name" varchar(256),
|
||||
"host_id" INTEGER,
|
||||
"host_auth_id" int(11) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "ts_cert" (
|
||||
"cert_id" integer PRIMARY KEY AUTOINCREMENT,
|
||||
"cert_name" varchar(256),
|
||||
"cert_pub" varchar(2048) DEFAULT '',
|
||||
"cert_pri" varchar(4096) DEFAULT '',
|
||||
"cert_desc" varchar(256)
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE "ts_config" (
|
||||
"name" varchar(256) NOT NULL,
|
||||
"value" varchar(256),
|
||||
PRIMARY KEY ("name" ASC)
|
||||
);
|
||||
|
||||
|
||||
INSERT INTO "main"."ts_config" VALUES ('ts_server_ip', '127.0.0.1');
|
||||
INSERT INTO "main"."ts_config" VALUES ('ts_server_rpc_port', 52080);
|
||||
INSERT INTO "main"."ts_config" VALUES ('ts_server_rdp_port', 52089);
|
||||
INSERT INTO "main"."ts_config" VALUES ('ts_server_ssh_port', 52189);
|
||||
INSERT INTO "main"."ts_config" VALUES ('ts_server_telnet_port', 52389);
|
||||
INSERT INTO "main"."ts_config" VALUES ('ts_server_rpc_ip', '127.0.0.1');
|
||||
|
||||
CREATE TABLE `ts_group` (
|
||||
`group_id` integer PRIMARY KEY AUTOINCREMENT,
|
||||
`group_name` varchar(255) DEFAULT''
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE "ts_host_info"(
|
||||
"host_id" integer PRIMARY KEY AUTOINCREMENT,
|
||||
"group_id" int(11) DEFAULT 0,
|
||||
"host_sys_type" int(11) DEFAULT 1,
|
||||
"host_ip" varchar(32) DEFAULT '',
|
||||
"host_port" int(11) DEFAULT 0,
|
||||
"protocol" int(11) DEFAULT 0,
|
||||
"host_lock" int(11) DEFAULT 0,
|
||||
"host_desc" DEFAULT ''
|
||||
);
|
||||
|
||||
CREATE TABLE "ts_auth_info"(
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"host_id" INTEGER,
|
||||
"auth_mode" INTEGER,
|
||||
"user_name" varchar(256),
|
||||
"user_pswd" varchar(256),
|
||||
"user_param" varchar(256),
|
||||
"cert_id" INTEGER,
|
||||
"encrypt" INTEGER,
|
||||
"log_time" varchar(60)
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE "ts_log" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"session_id" varchar(32),
|
||||
"account_name" varchar(64),
|
||||
"host_ip" varchar(32),
|
||||
"host_port" INTEGER,
|
||||
"sys_type" INTEGER DEFAULT 0,
|
||||
"auth_type" INTEGER,
|
||||
"protocol" INTEGER,
|
||||
"user_name" varchar(64),
|
||||
"ret_code" INTEGER,
|
||||
"begin_time" INTEGER,
|
||||
"end_time" INTEGER,
|
||||
"log_time" varchar(64)
|
||||
);
|
|
@ -0,0 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
server_port = 7190
|
||||
|
||||
log_file = 'E:\work\eomsoft\teleport-github\server\share\log\web.log'
|
||||
|
||||
# log_level can be 0 ~ 4
|
||||
# LOG_LEVEL_DEBUG 0 log every-thing.
|
||||
# LOG_LEVEL_VERBOSE 1 log every-thing but without debug message.
|
||||
# LOG_LEVEL_INFO 2 log infomation/warning/error message.
|
||||
# LOG_LEVEL_WARN 3 log warning and error message.
|
||||
# LOG_LEVEL_ERROR 4 log error message only.
|
||||
log_level = 0
|
|
@ -0,0 +1,499 @@
|
|||
#include "ts_env.h"
|
||||
#include "ts_ver.h"
|
||||
|
||||
#include <ex.h>
|
||||
#include <pys.h>
|
||||
|
||||
// 命令行参数说明(不带参数运行则以服务方式启动)
|
||||
// tp_web [-i|-u|--version] [ [-d] start] [...]
|
||||
// -d 启动程序并输出调试信息(不会运行为守护进程/服务模式)
|
||||
// -i 安装服务然后退出(仅限Win平台)
|
||||
// -u 卸载服务然后退出(仅限Win平台)
|
||||
// --version 打印版本号然后退出
|
||||
// start 以服务方式运行
|
||||
// ... 剩余的所有参数均传递给python脚本
|
||||
//
|
||||
//
|
||||
// 执行指定的Python脚本:
|
||||
// tp_web --py [-f FuncName] script_file.py ...
|
||||
// --py 必须为第一个参数,表示本次执行为执行指定脚本
|
||||
// -f FuncName 指定入口函数,默认为main。
|
||||
// script-file.py 被执行的脚本文件
|
||||
// ... 剩余的所有参数均传递给Python脚本
|
||||
|
||||
|
||||
bool g_is_debug = false;
|
||||
static ex_wstrs g_py_args;
|
||||
|
||||
// 如果是执行指定脚本
|
||||
static ex_wstr g_py_script_file;
|
||||
static ex_wstr g_py_main_func;
|
||||
|
||||
#define RUN_UNKNOWN 0
|
||||
#define RUN_WEB 1
|
||||
#define RUN_PY_SCRIPT 2
|
||||
#define RUN_INSTALL_SRV 3
|
||||
#define RUN_UNINST_SRV 4
|
||||
static ex_u8 g_run_type = RUN_UNKNOWN;
|
||||
|
||||
#define EOM_WEB_SERVICE_NAME L"EOM Teleport Web Service"
|
||||
|
||||
static bool _run_daemon(void);
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
static int service_install()
|
||||
{
|
||||
ex_wstr exec_file(g_env.m_exec_file);
|
||||
exec_file += L" start";
|
||||
|
||||
if (EXRV_OK == ex_winsrv_install(EOM_WEB_SERVICE_NAME, EOM_WEB_SERVICE_NAME, exec_file))
|
||||
return 0;
|
||||
else
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int service_uninstall()
|
||||
{
|
||||
if (EXRV_OK != ex_winsrv_stop(EOM_WEB_SERVICE_NAME))
|
||||
return 1;
|
||||
|
||||
if (EXRV_OK != ex_winsrv_uninstall(EOM_WEB_SERVICE_NAME))
|
||||
return 2;
|
||||
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
static bool _process_cmd_line(int argc, wchar_t** argv)
|
||||
{
|
||||
if (argc <= 1)
|
||||
{
|
||||
EXLOGE("[tpweb] nothing to do.\n\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
g_run_type = RUN_UNKNOWN;
|
||||
bool is_py_arg = false;
|
||||
|
||||
if (0 == wcscmp(argv[1], L"--version"))
|
||||
{
|
||||
EXLOGV("\nTeleport Web Server, version %ls.\n\n", TP_SERVER_VER);
|
||||
return false;
|
||||
}
|
||||
else if (0 == wcscmp(argv[1], L"--py"))
|
||||
{
|
||||
g_run_type = RUN_PY_SCRIPT;
|
||||
|
||||
for (int i = 2; i < argc; ++i)
|
||||
{
|
||||
if (is_py_arg)
|
||||
{
|
||||
g_py_args.push_back(argv[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (0 == wcscmp(argv[i], L"-f"))
|
||||
{
|
||||
g_py_main_func = argv[i];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (g_py_script_file.length() == 0)
|
||||
{
|
||||
g_py_script_file = argv[i];
|
||||
is_py_arg = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (0 == wcscmp(argv[1], L"-i"))
|
||||
{
|
||||
g_run_type = RUN_INSTALL_SRV;
|
||||
}
|
||||
else if (0 == wcscmp(argv[1], L"-u"))
|
||||
{
|
||||
g_run_type = RUN_UNINST_SRV;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 1; i < argc; ++i)
|
||||
{
|
||||
if (is_py_arg)
|
||||
{
|
||||
g_py_args.push_back(argv[i]);
|
||||
continue;
|
||||
}
|
||||
if (0 == wcscmp(argv[i], L"start"))
|
||||
{
|
||||
g_run_type = RUN_WEB;
|
||||
is_py_arg = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (0 == wcscmp(argv[i], L"-d"))
|
||||
{
|
||||
g_is_debug = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
EXLOGE(L"[tpweb] Unknown option: %ls\n", argv[i]);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (g_run_type == RUN_UNKNOWN)
|
||||
{
|
||||
EXLOGE("[tpweb] nothing to do.\n\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static int _main_loop(void)
|
||||
{
|
||||
PYS_HANDLE pysh = pys_create();
|
||||
if (NULL == pysh)
|
||||
{
|
||||
EXLOGE("pys_create() failed.\n");
|
||||
return 1;
|
||||
}
|
||||
PysHandleHolder hh(pysh);
|
||||
|
||||
ex_wstr pysrt_path(g_env.m_exec_path);
|
||||
if(!ex_path_join(pysrt_path, false, L"pysrt", NULL))
|
||||
{
|
||||
EXLOGE("pysrt not exists.\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!pys_init_runtime(pysh, g_env.m_exec_file.c_str(), pysrt_path.c_str()))
|
||||
{
|
||||
EXLOGE("pys_init_runtime() failed.\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
// 设置web的路径
|
||||
ex_wstr sf_path;
|
||||
if (g_run_type == RUN_WEB)
|
||||
{
|
||||
sf_path = g_env.m_www_path;
|
||||
|
||||
if (!ex_path_join(sf_path, false, L"teleport", L"app", L"eom_main.py", NULL))
|
||||
{
|
||||
EXLOGE(L"[tpweb] invalid path [%ls].\n", sf_path.c_str());
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (ex_is_file_exists(sf_path.c_str()))
|
||||
{
|
||||
pys_set_startup_file(pysh, sf_path.c_str());
|
||||
}
|
||||
else
|
||||
{
|
||||
EXLOGE(L"[tpweb] teleport web app not found at [%ls].\n", sf_path.c_str());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
else if (g_run_type == RUN_PY_SCRIPT)
|
||||
{
|
||||
sf_path = g_env.m_exec_path;
|
||||
|
||||
if (!ex_is_file_exists(g_py_script_file.c_str()))
|
||||
{
|
||||
EXLOGE("[tpweb] file not found: [%s].\n", g_py_script_file.c_str());
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (g_py_main_func.length() == 0)
|
||||
{
|
||||
pys_set_startup_file(pysh, g_py_script_file.c_str());
|
||||
}
|
||||
else
|
||||
{
|
||||
ex_astr file_name;
|
||||
ex_astr func_name;
|
||||
ex_wstr2astr(g_py_script_file, file_name);
|
||||
ex_wstr2astr(g_py_main_func, func_name);
|
||||
|
||||
pys_set_bootstrap_module(pysh, file_name.c_str(), func_name.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
ex_wstrs::const_iterator it = g_py_args.begin();
|
||||
for (; it != g_py_args.end(); ++it)
|
||||
{
|
||||
pys_add_arg(pysh, it->c_str());
|
||||
}
|
||||
|
||||
return pys_run(pysh);
|
||||
}
|
||||
|
||||
int _app_main(int argc, wchar_t** argv)
|
||||
{
|
||||
if (!_process_cmd_line(argc, argv))
|
||||
return 1;
|
||||
|
||||
if (!g_env.init())
|
||||
{
|
||||
EXLOGE("[tpweb] env init failed.\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
#ifdef EX_DEBUG
|
||||
EXLOG_LEVEL(EX_LOG_LEVEL_DEBUG);
|
||||
#endif
|
||||
|
||||
if (g_run_type == RUN_PY_SCRIPT)
|
||||
{
|
||||
return _main_loop();
|
||||
}
|
||||
#ifdef EX_OS_WIN32
|
||||
else if (g_run_type == RUN_INSTALL_SRV)
|
||||
{
|
||||
return service_install();
|
||||
}
|
||||
else if(g_run_type == RUN_UNINST_SRV)
|
||||
{
|
||||
return service_uninstall();
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!g_is_debug)
|
||||
{
|
||||
if (!_run_daemon())
|
||||
{
|
||||
EXLOGE("[tpweb] can not run in daemon mode.\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
return _main_loop();
|
||||
}
|
||||
|
||||
|
||||
|
||||
#ifdef EX_OS_WIN32
|
||||
|
||||
// #ifdef EX_DEBUG
|
||||
// #include <vld.h>
|
||||
// #endif
|
||||
|
||||
static SERVICE_STATUS g_ServiceStatus = { 0 };
|
||||
static SERVICE_STATUS_HANDLE g_hServiceStatusHandle = NULL;
|
||||
HANDLE g_hWorkerThread = NULL;
|
||||
|
||||
VOID WINAPI service_main(DWORD argc, wchar_t** argv);
|
||||
void WINAPI service_handler(DWORD fdwControl);
|
||||
|
||||
static DWORD WINAPI service_thread_func(LPVOID lpParam);
|
||||
|
||||
int main()
|
||||
{
|
||||
int ret = 0;
|
||||
LPWSTR szCmdLine = (LPWSTR)::GetCommandLineW(); //获取命令行参数;
|
||||
|
||||
int _argc = 0;
|
||||
wchar_t** _argv = ::CommandLineToArgvW(szCmdLine, &_argc); //拆分命令行参数字符串;
|
||||
|
||||
ret = _app_main(_argc, _argv);
|
||||
|
||||
LocalFree(_argv);
|
||||
_argv = NULL;
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static bool _run_daemon(void)
|
||||
{
|
||||
SERVICE_TABLE_ENTRY DispatchTable[2];
|
||||
DispatchTable[0].lpServiceName = EOM_WEB_SERVICE_NAME;
|
||||
DispatchTable[0].lpServiceProc = service_main;
|
||||
DispatchTable[1].lpServiceName = NULL;
|
||||
DispatchTable[1].lpServiceProc = NULL;
|
||||
|
||||
if (!StartServiceCtrlDispatcher(DispatchTable))
|
||||
{
|
||||
EXLOGE_WIN("StartServiceCtrlDispatcher()");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static DWORD WINAPI service_thread_func(LPVOID lpParam)
|
||||
{
|
||||
int ret = _main_loop();
|
||||
|
||||
// 更新服务状态(如果服务还在运行,将其设置为停止状态)
|
||||
g_ServiceStatus.dwWin32ExitCode = 0;
|
||||
g_ServiceStatus.dwCurrentState = SERVICE_STOPPED;
|
||||
g_ServiceStatus.dwCheckPoint = 0;
|
||||
g_ServiceStatus.dwWaitHint = 0;
|
||||
if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus))
|
||||
EXLOGE_WIN("SetServiceStatus()");
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void WINAPI service_handler(DWORD fdwControl)
|
||||
{
|
||||
switch (fdwControl)
|
||||
{
|
||||
case SERVICE_CONTROL_STOP:
|
||||
case SERVICE_CONTROL_SHUTDOWN:
|
||||
{
|
||||
if (g_hWorkerThread)
|
||||
{
|
||||
TerminateThread(g_hWorkerThread, 1);
|
||||
g_hWorkerThread = NULL;
|
||||
}
|
||||
|
||||
g_ServiceStatus.dwWin32ExitCode = 0;
|
||||
g_ServiceStatus.dwCurrentState = SERVICE_STOPPED;
|
||||
g_ServiceStatus.dwCheckPoint = 0;
|
||||
g_ServiceStatus.dwWaitHint = 0;
|
||||
|
||||
}break;
|
||||
|
||||
default:
|
||||
return;
|
||||
};
|
||||
|
||||
if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus))
|
||||
{
|
||||
EXLOGE_WIN("SetServiceStatus(STOP)");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
VOID WINAPI service_main(DWORD argc, wchar_t** argv)
|
||||
{
|
||||
g_ServiceStatus.dwServiceType = SERVICE_WIN32;
|
||||
g_ServiceStatus.dwCurrentState = SERVICE_START_PENDING;
|
||||
g_ServiceStatus.dwControlsAccepted = SERVICE_ACCEPT_STOP | SERVICE_ACCEPT_SHUTDOWN;
|
||||
g_ServiceStatus.dwWin32ExitCode = 0;
|
||||
g_ServiceStatus.dwServiceSpecificExitCode = 0;
|
||||
g_ServiceStatus.dwCheckPoint = 0;
|
||||
g_ServiceStatus.dwWaitHint = 0;
|
||||
g_hServiceStatusHandle = RegisterServiceCtrlHandler(EOM_WEB_SERVICE_NAME, service_handler);
|
||||
if (g_hServiceStatusHandle == 0)
|
||||
{
|
||||
EXLOGE_WIN("RegisterServiceCtrlHandler()");
|
||||
return;
|
||||
}
|
||||
|
||||
DWORD tid = 0;
|
||||
g_hWorkerThread = CreateThread(NULL, 0, service_thread_func, NULL, 0, &tid);
|
||||
if (NULL == g_hWorkerThread)
|
||||
{
|
||||
EXLOGE_WIN("CreateThread(python)");
|
||||
|
||||
g_ServiceStatus.dwWin32ExitCode = 0;
|
||||
g_ServiceStatus.dwCurrentState = SERVICE_STOPPED;
|
||||
g_ServiceStatus.dwCheckPoint = 0;
|
||||
g_ServiceStatus.dwWaitHint = 0;
|
||||
if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus))
|
||||
EXLOGE_WIN("SetServiceStatus()");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
g_ServiceStatus.dwCurrentState = SERVICE_RUNNING;
|
||||
g_ServiceStatus.dwCheckPoint = 0;
|
||||
g_ServiceStatus.dwWaitHint = 9000;
|
||||
if (!SetServiceStatus(g_hServiceStatusHandle, &g_ServiceStatus))
|
||||
{
|
||||
EXLOGE_WIN("SetServiceStatus()");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
// not EX_OS_WIN32
|
||||
#include "ts_util.h"
|
||||
#include <fcntl.h>
|
||||
#include <signal.h>
|
||||
|
||||
static void _sig_handler(int signum, siginfo_t* info, void* ptr);
|
||||
//static int _daemon(int nochdir, int noclose);
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
struct sigaction act;
|
||||
memset(&act, 0, sizeof(act));
|
||||
act.sa_sigaction = _sig_handler;
|
||||
act.sa_flags = SA_SIGINFO;
|
||||
sigaction(SIGINT, &act, NULL);
|
||||
|
||||
wchar_t** wargv = ex_make_wargv(argc, argv);
|
||||
int ret = _app_main(argc, wargv);
|
||||
|
||||
ex_free_wargv(argc, wargv);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
void _sig_handler(int signum, siginfo_t* info, void* ptr)
|
||||
{
|
||||
if (signum == SIGINT || signum == SIGTERM)
|
||||
{
|
||||
printf("[ts] received signal SIGINT, exit now.\n");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
static bool _run_daemon(void)
|
||||
{
|
||||
pid_t pid = fork();
|
||||
if (pid < 0)
|
||||
{
|
||||
printf("[ERROR] can not fork daemon.\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
else if (pid > 0)
|
||||
{
|
||||
exit(EXIT_SUCCESS); // parent exit.
|
||||
}
|
||||
|
||||
// now I'm first children.
|
||||
if (setsid() == -1)
|
||||
{
|
||||
printf("setsid() failed.\n");
|
||||
assert(0);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
umask(0);
|
||||
|
||||
pid = fork();
|
||||
if (pid < 0)
|
||||
{
|
||||
printf("[ERROR] can not fork daemon.\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
else if (pid > 0)
|
||||
{
|
||||
exit(0); // first children exit.
|
||||
}
|
||||
|
||||
// now I'm second children.
|
||||
int ret = chdir("/");
|
||||
close(STDIN_FILENO);
|
||||
|
||||
int stdfd = open("/dev/null", O_RDWR);
|
||||
close(STDOUT_FILENO);
|
||||
close(STDERR_FILENO);
|
||||
dup2(stdfd, STDOUT_FILENO);
|
||||
dup2(stdfd, STDERR_FILENO);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endif
|
Binary file not shown.
After Width: | Height: | Size: 22 KiB |
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,22 @@
|
|||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 14
|
||||
VisualStudioVersion = 14.0.23107.0
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tp_web", "tp_web.vs2015.vcxproj", "{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|x86 = Debug|x86
|
||||
Release|x86 = Release|x86
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.ActiveCfg = Debug|Win32
|
||||
{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Debug|x86.Build.0 = Debug|Win32
|
||||
{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.ActiveCfg = Release|Win32
|
||||
{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}.Release|x86.Build.0 = Release|Win32
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
EndGlobal
|
|
@ -0,0 +1,210 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|Win32">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|Win32">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{6548CB1D-A7BA-4A68-9B3F-A5129F77868B}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>tp_web</RootNamespace>
|
||||
<WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
|
||||
<ProjectName>tp_web</ProjectName>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140_xp</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140_xp</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="Shared">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<OutDir>..\..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</OutDir>
|
||||
<IntDir>..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</IntDir>
|
||||
<IncludePath>D:\apps\vld\include;$(IncludePath)</IncludePath>
|
||||
<LibraryPath>D:\apps\vld\lib\Win32;$(LibraryPath)</LibraryPath>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<OutDir>..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</OutDir>
|
||||
<IntDir>..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<OutDir>..\..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</OutDir>
|
||||
<IntDir>..\..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<OutDir>..\..\out\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</OutDir>
|
||||
<IntDir>..\..\out\_tmp_\$(ProjectName)\$(PlatformTarget)\$(Configuration)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;LIBSSH_STATIC;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<AdditionalIncludeDirectories>../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include</AdditionalIncludeDirectories>
|
||||
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<AdditionalLibraryDirectories>../../../common/pyshell/pys/lib</AdditionalLibraryDirectories>
|
||||
<IgnoreSpecificDefaultLibraries>libcmt.lib</IgnoreSpecificDefaultLibraries>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>_DEBUG;_WINDOWS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<AdditionalIncludeDirectories>../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Windows</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<AdditionalLibraryDirectories>../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib</AdditionalLibraryDirectories>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;LIBSSH_STATIC;_CRT_SECURE_NO_WARNINGS;_WINSOCK_DEPRECATED_NO_WARNINGS;MG_ENABLE_THREADS;MG_DISABLE_HTTP_DIGEST_AUTH;MG_DISABLE_MQTT;MG_DISABLE_SSI;MG_DISABLE_FILESYSTEM;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<AdditionalIncludeDirectories>../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include</AdditionalIncludeDirectories>
|
||||
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<AdditionalLibraryDirectories>../../../common/pyshell/pys/lib</AdditionalLibraryDirectories>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>NDEBUG;_WINDOWS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<AdditionalIncludeDirectories>../../../common/libex/include;../../../common/pyshell/include;../../../external/python/include</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Windows</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<AdditionalLibraryDirectories>../../external/windows/openssl/lib;../../external/windows/zlib/lib;../../external/windows/libssh/lib</AdditionalLibraryDirectories>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_ini.cpp" />
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_log.cpp" />
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_path.cpp" />
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_str.cpp" />
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_thread.cpp" />
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_util.cpp" />
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_winsrv.cpp" />
|
||||
<ClCompile Include="..\..\..\common\pyshell\src\pys_api.cpp" />
|
||||
<ClCompile Include="..\..\..\common\pyshell\src\pys_core.cpp" />
|
||||
<ClCompile Include="..\..\..\common\pyshell\src\pys_util.cpp" />
|
||||
<ClCompile Include="main.cpp" />
|
||||
<ClCompile Include="ts_env.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_const.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_ini.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_log.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_path.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_platform.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_str.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_thread.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_types.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_util.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_winsrv.h" />
|
||||
<ClInclude Include="..\..\..\common\pyshell\include\pys.h" />
|
||||
<ClInclude Include="..\..\..\common\pyshell\src\pys_core.h" />
|
||||
<ClInclude Include="..\..\..\common\pyshell\src\pys_util.h" />
|
||||
<ClInclude Include="resource.h" />
|
||||
<ClInclude Include="ts_env.h" />
|
||||
<ClInclude Include="ts_ver.h" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ResourceCompile Include="tp_web.rc" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Image Include="res\tp_web.ico" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
|
@ -0,0 +1,129 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<Filter Include="Resource Files">
|
||||
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
|
||||
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="main app">
|
||||
<UniqueIdentifier>{0155895f-d6be-4e0f-970d-9b6b5c759502}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="libex">
|
||||
<UniqueIdentifier>{0da131e6-c187-4632-a82b-c9b84238b97a}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="libex\header">
|
||||
<UniqueIdentifier>{ffe9fc8a-0268-4a71-8681-ab835e44fd83}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="libex\src">
|
||||
<UniqueIdentifier>{f9606240-3c34-4d3d-8623-7913fe36b8b4}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="pyshell">
|
||||
<UniqueIdentifier>{465c4847-7106-4020-ae5f-bcc649ae7ca9}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="pyshell\src">
|
||||
<UniqueIdentifier>{4a9f6402-c1c7-4c13-a390-794b6ac77697}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="pyshell\header">
|
||||
<UniqueIdentifier>{5696c8d5-f56a-429d-b058-cbe79a1a17ca}</UniqueIdentifier>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="ts_env.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="main.cpp">
|
||||
<Filter>main app</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_path.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_str.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_util.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_winsrv.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_thread.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_log.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\libex\src\ex_ini.cpp">
|
||||
<Filter>libex\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\pyshell\src\pys_api.cpp">
|
||||
<Filter>pyshell\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\pyshell\src\pys_core.cpp">
|
||||
<Filter>pyshell\src</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\..\common\pyshell\src\pys_util.cpp">
|
||||
<Filter>pyshell\src</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="ts_env.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="resource.h" />
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_const.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_path.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_platform.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_str.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_types.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_util.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ts_ver.h">
|
||||
<Filter>main app</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_winsrv.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_thread.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_log.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\libex\include\ex\ex_ini.h">
|
||||
<Filter>libex\header</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\pyshell\src\pys_core.h">
|
||||
<Filter>pyshell\src</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\pyshell\src\pys_util.h">
|
||||
<Filter>pyshell\src</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\..\common\pyshell\include\pys.h">
|
||||
<Filter>pyshell\header</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ResourceCompile Include="tp_web.rc">
|
||||
<Filter>Resource Files</Filter>
|
||||
</ResourceCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Image Include="res\tp_web.ico">
|
||||
<Filter>Resource Files</Filter>
|
||||
</Image>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,94 @@
|
|||
#include "ts_env.h"
|
||||
|
||||
TsEnv g_env;
|
||||
|
||||
TsEnv::TsEnv()
|
||||
{}
|
||||
|
||||
TsEnv::~TsEnv()
|
||||
{}
|
||||
|
||||
bool TsEnv::init(void)
|
||||
{
|
||||
EXLOG_LEVEL(EX_LOG_LEVEL_INFO);
|
||||
|
||||
ex_exec_file(m_exec_file);
|
||||
|
||||
m_exec_path = m_exec_file;
|
||||
ex_dirname(m_exec_path);
|
||||
|
||||
// 默认情况下,以上三个目录均位于本可执行程序的 ../ 相对位置,
|
||||
// 如果不存在,则可能是开发调试模式,则尝试从源代码仓库根目录下的share目录中查找。
|
||||
ex_wstr base_path = m_exec_path;
|
||||
ex_path_join(base_path, true, L"..", NULL);
|
||||
|
||||
ex_wstr conf_file = base_path;
|
||||
ex_path_join(conf_file, false, L"etc", L"web.conf", NULL);
|
||||
|
||||
if (ex_is_file_exists(conf_file.c_str()))
|
||||
{
|
||||
m_www_path = base_path;
|
||||
ex_path_join(conf_file, false, L"www", NULL);
|
||||
}
|
||||
else
|
||||
{
|
||||
EXLOGW("===== DEVELOPMENT MODE =====\n");
|
||||
base_path = m_exec_path;
|
||||
ex_path_join(base_path, true, L"..", L"..", L"..", L"..", L"server", L"share", NULL);
|
||||
|
||||
conf_file = base_path;
|
||||
ex_path_join(conf_file, false, L"etc", L"web.conf", NULL);
|
||||
|
||||
m_www_path = m_exec_path;
|
||||
ex_path_join(m_www_path, true, L"..", L"..", L"..", L"..", L"server", L"www", NULL);
|
||||
}
|
||||
|
||||
if (!ex_is_file_exists(conf_file.c_str()))
|
||||
{
|
||||
EXLOGE("[tpweb] web.conf not found.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
ExIniFile cfg;
|
||||
if (!cfg.LoadFromFile(conf_file))
|
||||
{
|
||||
EXLOGE("[tpweb] can not load web.conf.\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
ex_wstr log_file;
|
||||
ExIniSection* ps = cfg.GetDumySection();
|
||||
if (!ps->GetStr(L"log_file", log_file))
|
||||
{
|
||||
ex_wstr log_path = base_path;
|
||||
ex_path_join(log_path, false, _T("log"), NULL);
|
||||
EXLOG_FILE(L"tpweb.log", log_path.c_str());
|
||||
}
|
||||
else
|
||||
{
|
||||
ex_remove_white_space(log_file);
|
||||
if (log_file[0] == L'"' || log_file[0] == L'\'')
|
||||
log_file.erase(0, 1);
|
||||
if (log_file[ log_file.length() - 1 ] == L'"' || log_file[log_file.length() - 1] == L'\'')
|
||||
log_file.erase(log_file.length() - 1, 1);
|
||||
|
||||
ex_wstr log_path = log_file;
|
||||
ex_dirname(log_path);
|
||||
ex_wstr file_name;
|
||||
file_name.assign(log_file, log_path.length() + 1, log_file.length());
|
||||
|
||||
EXLOG_FILE(file_name.c_str(), log_path.c_str());
|
||||
}
|
||||
|
||||
int log_level = EX_LOG_LEVEL_INFO;
|
||||
if (ps->GetInt(L"log_level", log_level))
|
||||
{
|
||||
EXLOGV("[tpweb] log-level: %d\n", log_level);
|
||||
EXLOG_LEVEL(log_level);
|
||||
}
|
||||
|
||||
EXLOGI("==============================\n");
|
||||
EXLOGI("[tpweb] start...\n");
|
||||
|
||||
return true;
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
#ifndef __TS_ENV_H__
|
||||
#define __TS_ENV_H__
|
||||
|
||||
//#include "ts_common.h"
|
||||
#include <ex.h>
|
||||
|
||||
class TsEnv
|
||||
{
|
||||
public:
|
||||
TsEnv();
|
||||
~TsEnv();
|
||||
|
||||
bool init(void);
|
||||
|
||||
public:
|
||||
ex_wstr m_exec_file;
|
||||
ex_wstr m_exec_path;
|
||||
ex_wstr m_www_path;
|
||||
};
|
||||
|
||||
extern TsEnv g_env;
|
||||
|
||||
#endif // __TS_ENV_H__
|
|
@ -0,0 +1,6 @@
|
|||
#ifndef __TS_SERVER_VER_H__
|
||||
#define __TS_SERVER_VER_H__
|
||||
|
||||
#define TP_SERVER_VER L"1.6.225.1"
|
||||
|
||||
#endif // __TS_SERVER_VER_H__
|
|
@ -0,0 +1,202 @@
|
|||
"""
|
||||
Patch recently added ABCs into the standard lib module
|
||||
``collections.abc`` (Py3) or ``collections`` (Py2).
|
||||
|
||||
Usage::
|
||||
|
||||
import backports_abc
|
||||
backports_abc.patch()
|
||||
|
||||
or::
|
||||
|
||||
try:
|
||||
from collections.abc import Generator
|
||||
except ImportError:
|
||||
from backports_abc import Generator
|
||||
"""
|
||||
|
||||
try:
|
||||
import collections.abc as _collections_abc
|
||||
except ImportError:
|
||||
import collections as _collections_abc
|
||||
|
||||
|
||||
def mk_gen():
|
||||
from abc import abstractmethod
|
||||
|
||||
required_methods = (
|
||||
'__iter__', '__next__' if hasattr(iter(()), '__next__') else 'next',
|
||||
'send', 'throw', 'close')
|
||||
|
||||
class Generator(_collections_abc.Iterator):
|
||||
__slots__ = ()
|
||||
|
||||
if '__next__' in required_methods:
|
||||
def __next__(self):
|
||||
return self.send(None)
|
||||
else:
|
||||
def next(self):
|
||||
return self.send(None)
|
||||
|
||||
@abstractmethod
|
||||
def send(self, value):
|
||||
raise StopIteration
|
||||
|
||||
@abstractmethod
|
||||
def throw(self, typ, val=None, tb=None):
|
||||
if val is None:
|
||||
if tb is None:
|
||||
raise typ
|
||||
val = typ()
|
||||
if tb is not None:
|
||||
val = val.with_traceback(tb)
|
||||
raise val
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
self.throw(GeneratorExit)
|
||||
except (GeneratorExit, StopIteration):
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError('generator ignored GeneratorExit')
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
if cls is Generator:
|
||||
mro = C.__mro__
|
||||
for method in required_methods:
|
||||
for base in mro:
|
||||
if method in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return NotImplemented
|
||||
return True
|
||||
return NotImplemented
|
||||
|
||||
generator = type((lambda: (yield))())
|
||||
Generator.register(generator)
|
||||
return Generator
|
||||
|
||||
|
||||
def mk_awaitable():
|
||||
from abc import abstractmethod, ABCMeta
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self):
|
||||
yield
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
if cls is Awaitable:
|
||||
for B in C.__mro__:
|
||||
if '__await__' in B.__dict__:
|
||||
if B.__dict__['__await__']:
|
||||
return True
|
||||
break
|
||||
return NotImplemented
|
||||
|
||||
# calling metaclass directly as syntax differs in Py2/Py3
|
||||
Awaitable = ABCMeta('Awaitable', (), {
|
||||
'__slots__': (),
|
||||
'__await__': __await__,
|
||||
'__subclasshook__': __subclasshook__,
|
||||
})
|
||||
|
||||
return Awaitable
|
||||
|
||||
|
||||
def mk_coroutine():
|
||||
from abc import abstractmethod
|
||||
|
||||
class Coroutine(Awaitable):
|
||||
__slots__ = ()
|
||||
|
||||
@abstractmethod
|
||||
def send(self, value):
|
||||
"""Send a value into the coroutine.
|
||||
Return next yielded value or raise StopIteration.
|
||||
"""
|
||||
raise StopIteration
|
||||
|
||||
@abstractmethod
|
||||
def throw(self, typ, val=None, tb=None):
|
||||
"""Raise an exception in the coroutine.
|
||||
Return next yielded value or raise StopIteration.
|
||||
"""
|
||||
if val is None:
|
||||
if tb is None:
|
||||
raise typ
|
||||
val = typ()
|
||||
if tb is not None:
|
||||
val = val.with_traceback(tb)
|
||||
raise val
|
||||
|
||||
def close(self):
|
||||
"""Raise GeneratorExit inside coroutine.
|
||||
"""
|
||||
try:
|
||||
self.throw(GeneratorExit)
|
||||
except (GeneratorExit, StopIteration):
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError('coroutine ignored GeneratorExit')
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
if cls is Coroutine:
|
||||
mro = C.__mro__
|
||||
for method in ('__await__', 'send', 'throw', 'close'):
|
||||
for base in mro:
|
||||
if method in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return NotImplemented
|
||||
return True
|
||||
return NotImplemented
|
||||
|
||||
return Coroutine
|
||||
|
||||
|
||||
###
|
||||
# make all ABCs available in this module
|
||||
|
||||
try:
|
||||
Generator = _collections_abc.Generator
|
||||
except AttributeError:
|
||||
Generator = mk_gen()
|
||||
|
||||
try:
|
||||
Awaitable = _collections_abc.Awaitable
|
||||
except AttributeError:
|
||||
Awaitable = mk_awaitable()
|
||||
|
||||
try:
|
||||
Coroutine = _collections_abc.Coroutine
|
||||
except AttributeError:
|
||||
Coroutine = mk_coroutine()
|
||||
|
||||
try:
|
||||
from inspect import isawaitable
|
||||
except ImportError:
|
||||
def isawaitable(obj):
|
||||
return isinstance(obj, Awaitable)
|
||||
|
||||
|
||||
###
|
||||
# allow patching the stdlib
|
||||
|
||||
PATCHED = {}
|
||||
|
||||
|
||||
def patch(patch_inspect=True):
|
||||
"""
|
||||
Main entry point for patching the ``collections.abc`` and ``inspect``
|
||||
standard library modules.
|
||||
"""
|
||||
PATCHED['collections.abc.Generator'] = _collections_abc.Generator = Generator
|
||||
PATCHED['collections.abc.Coroutine'] = _collections_abc.Coroutine = Coroutine
|
||||
PATCHED['collections.abc.Awaitable'] = _collections_abc.Awaitable = Awaitable
|
||||
|
||||
if patch_inspect:
|
||||
import inspect
|
||||
PATCHED['inspect.isawaitable'] = inspect.isawaitable = isawaitable
|
|
@ -0,0 +1,8 @@
|
|||
# mako/__init__.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
__version__ = '1.0.3'
|
|
@ -0,0 +1,851 @@
|
|||
# mako/_ast_util.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
ast
|
||||
~~~
|
||||
|
||||
The `ast` module helps Python applications to process trees of the Python
|
||||
abstract syntax grammar. The abstract syntax itself might change with
|
||||
each Python release; this module helps to find out programmatically what
|
||||
the current grammar looks like and allows modifications of it.
|
||||
|
||||
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
|
||||
a flag to the `compile()` builtin function or by using the `parse()`
|
||||
function from this module. The result will be a tree of objects whose
|
||||
classes all inherit from `ast.AST`.
|
||||
|
||||
A modified abstract syntax tree can be compiled into a Python code object
|
||||
using the built-in `compile()` function.
|
||||
|
||||
Additionally various helper functions are provided that make working with
|
||||
the trees simpler. The main intention of the helper functions and this
|
||||
module in general is to provide an easy to use interface for libraries
|
||||
that work tightly with the python syntax (template engines for example).
|
||||
|
||||
|
||||
:copyright: Copyright 2008 by Armin Ronacher.
|
||||
:license: Python License.
|
||||
"""
|
||||
from _ast import * # noqa
|
||||
from mako.compat import arg_stringname
|
||||
|
||||
BOOLOP_SYMBOLS = {
|
||||
And: 'and',
|
||||
Or: 'or'
|
||||
}
|
||||
|
||||
BINOP_SYMBOLS = {
|
||||
Add: '+',
|
||||
Sub: '-',
|
||||
Mult: '*',
|
||||
Div: '/',
|
||||
FloorDiv: '//',
|
||||
Mod: '%',
|
||||
LShift: '<<',
|
||||
RShift: '>>',
|
||||
BitOr: '|',
|
||||
BitAnd: '&',
|
||||
BitXor: '^'
|
||||
}
|
||||
|
||||
CMPOP_SYMBOLS = {
|
||||
Eq: '==',
|
||||
Gt: '>',
|
||||
GtE: '>=',
|
||||
In: 'in',
|
||||
Is: 'is',
|
||||
IsNot: 'is not',
|
||||
Lt: '<',
|
||||
LtE: '<=',
|
||||
NotEq: '!=',
|
||||
NotIn: 'not in'
|
||||
}
|
||||
|
||||
UNARYOP_SYMBOLS = {
|
||||
Invert: '~',
|
||||
Not: 'not',
|
||||
UAdd: '+',
|
||||
USub: '-'
|
||||
}
|
||||
|
||||
ALL_SYMBOLS = {}
|
||||
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
|
||||
ALL_SYMBOLS.update(BINOP_SYMBOLS)
|
||||
ALL_SYMBOLS.update(CMPOP_SYMBOLS)
|
||||
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
|
||||
|
||||
|
||||
def parse(expr, filename='<unknown>', mode='exec'):
|
||||
"""Parse an expression into an AST node."""
|
||||
return compile(expr, filename, mode, PyCF_ONLY_AST)
|
||||
|
||||
|
||||
def to_source(node, indent_with=' ' * 4):
|
||||
"""
|
||||
This function can convert a node tree back into python sourcecode. This
|
||||
is useful for debugging purposes, especially if you're dealing with custom
|
||||
asts not generated by python itself.
|
||||
|
||||
It could be that the sourcecode is evaluable when the AST itself is not
|
||||
compilable / evaluable. The reason for this is that the AST contains some
|
||||
more data than regular sourcecode does, which is dropped during
|
||||
conversion.
|
||||
|
||||
Each level of indentation is replaced with `indent_with`. Per default this
|
||||
parameter is equal to four spaces as suggested by PEP 8, but it might be
|
||||
adjusted to match the application's styleguide.
|
||||
"""
|
||||
generator = SourceGenerator(indent_with)
|
||||
generator.visit(node)
|
||||
return ''.join(generator.result)
|
||||
|
||||
|
||||
def dump(node):
|
||||
"""
|
||||
A very verbose representation of the node passed. This is useful for
|
||||
debugging purposes.
|
||||
"""
|
||||
def _format(node):
|
||||
if isinstance(node, AST):
|
||||
return '%s(%s)' % (node.__class__.__name__,
|
||||
', '.join('%s=%s' % (a, _format(b))
|
||||
for a, b in iter_fields(node)))
|
||||
elif isinstance(node, list):
|
||||
return '[%s]' % ', '.join(_format(x) for x in node)
|
||||
return repr(node)
|
||||
if not isinstance(node, AST):
|
||||
raise TypeError('expected AST, got %r' % node.__class__.__name__)
|
||||
return _format(node)
|
||||
|
||||
|
||||
def copy_location(new_node, old_node):
|
||||
"""
|
||||
Copy the source location hint (`lineno` and `col_offset`) from the
|
||||
old to the new node if possible and return the new one.
|
||||
"""
|
||||
for attr in 'lineno', 'col_offset':
|
||||
if attr in old_node._attributes and attr in new_node._attributes \
|
||||
and hasattr(old_node, attr):
|
||||
setattr(new_node, attr, getattr(old_node, attr))
|
||||
return new_node
|
||||
|
||||
|
||||
def fix_missing_locations(node):
|
||||
"""
|
||||
Some nodes require a line number and the column offset. Without that
|
||||
information the compiler will abort the compilation. Because it can be
|
||||
a dull task to add appropriate line numbers and column offsets when
|
||||
adding new nodes this function can help. It copies the line number and
|
||||
column offset of the parent node to the child nodes without this
|
||||
information.
|
||||
|
||||
Unlike `copy_location` this works recursive and won't touch nodes that
|
||||
already have a location information.
|
||||
"""
|
||||
def _fix(node, lineno, col_offset):
|
||||
if 'lineno' in node._attributes:
|
||||
if not hasattr(node, 'lineno'):
|
||||
node.lineno = lineno
|
||||
else:
|
||||
lineno = node.lineno
|
||||
if 'col_offset' in node._attributes:
|
||||
if not hasattr(node, 'col_offset'):
|
||||
node.col_offset = col_offset
|
||||
else:
|
||||
col_offset = node.col_offset
|
||||
for child in iter_child_nodes(node):
|
||||
_fix(child, lineno, col_offset)
|
||||
_fix(node, 1, 0)
|
||||
return node
|
||||
|
||||
|
||||
def increment_lineno(node, n=1):
|
||||
"""
|
||||
Increment the line numbers of all nodes by `n` if they have line number
|
||||
attributes. This is useful to "move code" to a different location in a
|
||||
file.
|
||||
"""
|
||||
for node in zip((node,), walk(node)):
|
||||
if 'lineno' in node._attributes:
|
||||
node.lineno = getattr(node, 'lineno', 0) + n
|
||||
|
||||
|
||||
def iter_fields(node):
|
||||
"""Iterate over all fields of a node, only yielding existing fields."""
|
||||
# CPython 2.5 compat
|
||||
if not hasattr(node, '_fields') or not node._fields:
|
||||
return
|
||||
for field in node._fields:
|
||||
try:
|
||||
yield field, getattr(node, field)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def get_fields(node):
|
||||
"""Like `iter_fiels` but returns a dict."""
|
||||
return dict(iter_fields(node))
|
||||
|
||||
|
||||
def iter_child_nodes(node):
|
||||
"""Iterate over all child nodes or a node."""
|
||||
for name, field in iter_fields(node):
|
||||
if isinstance(field, AST):
|
||||
yield field
|
||||
elif isinstance(field, list):
|
||||
for item in field:
|
||||
if isinstance(item, AST):
|
||||
yield item
|
||||
|
||||
|
||||
def get_child_nodes(node):
|
||||
"""Like `iter_child_nodes` but returns a list."""
|
||||
return list(iter_child_nodes(node))
|
||||
|
||||
|
||||
def get_compile_mode(node):
|
||||
"""
|
||||
Get the mode for `compile` of a given node. If the node is not a `mod`
|
||||
node (`Expression`, `Module` etc.) a `TypeError` is thrown.
|
||||
"""
|
||||
if not isinstance(node, mod):
|
||||
raise TypeError('expected mod node, got %r' % node.__class__.__name__)
|
||||
return {
|
||||
Expression: 'eval',
|
||||
Interactive: 'single'
|
||||
}.get(node.__class__, 'expr')
|
||||
|
||||
|
||||
def get_docstring(node):
|
||||
"""
|
||||
Return the docstring for the given node or `None` if no docstring can be
|
||||
found. If the node provided does not accept docstrings a `TypeError`
|
||||
will be raised.
|
||||
"""
|
||||
if not isinstance(node, (FunctionDef, ClassDef, Module)):
|
||||
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
|
||||
if node.body and isinstance(node.body[0], Str):
|
||||
return node.body[0].s
|
||||
|
||||
|
||||
def walk(node):
|
||||
"""
|
||||
Iterate over all nodes. This is useful if you only want to modify nodes in
|
||||
place and don't care about the context or the order the nodes are returned.
|
||||
"""
|
||||
from collections import deque
|
||||
todo = deque([node])
|
||||
while todo:
|
||||
node = todo.popleft()
|
||||
todo.extend(iter_child_nodes(node))
|
||||
yield node
|
||||
|
||||
|
||||
class NodeVisitor(object):
|
||||
|
||||
"""
|
||||
Walks the abstract syntax tree and call visitor functions for every node
|
||||
found. The visitor functions may return values which will be forwarded
|
||||
by the `visit` method.
|
||||
|
||||
Per default the visitor functions for the nodes are ``'visit_'`` +
|
||||
class name of the node. So a `TryFinally` node visit function would
|
||||
be `visit_TryFinally`. This behavior can be changed by overriding
|
||||
the `get_visitor` function. If no visitor function exists for a node
|
||||
(return value `None`) the `generic_visit` visitor is used instead.
|
||||
|
||||
Don't use the `NodeVisitor` if you want to apply changes to nodes during
|
||||
traversing. For this a special visitor exists (`NodeTransformer`) that
|
||||
allows modifications.
|
||||
"""
|
||||
|
||||
def get_visitor(self, node):
|
||||
"""
|
||||
Return the visitor function for this node or `None` if no visitor
|
||||
exists for this node. In that case the generic visit function is
|
||||
used instead.
|
||||
"""
|
||||
method = 'visit_' + node.__class__.__name__
|
||||
return getattr(self, method, None)
|
||||
|
||||
def visit(self, node):
|
||||
"""Visit a node."""
|
||||
f = self.get_visitor(node)
|
||||
if f is not None:
|
||||
return f(node)
|
||||
return self.generic_visit(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for field, value in iter_fields(node):
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, AST):
|
||||
self.visit(item)
|
||||
elif isinstance(value, AST):
|
||||
self.visit(value)
|
||||
|
||||
|
||||
class NodeTransformer(NodeVisitor):
|
||||
|
||||
"""
|
||||
Walks the abstract syntax tree and allows modifications of nodes.
|
||||
|
||||
The `NodeTransformer` will walk the AST and use the return value of the
|
||||
visitor functions to replace or remove the old node. If the return
|
||||
value of the visitor function is `None` the node will be removed
|
||||
from the previous location otherwise it's replaced with the return
|
||||
value. The return value may be the original node in which case no
|
||||
replacement takes place.
|
||||
|
||||
Here an example transformer that rewrites all `foo` to `data['foo']`::
|
||||
|
||||
class RewriteName(NodeTransformer):
|
||||
|
||||
def visit_Name(self, node):
|
||||
return copy_location(Subscript(
|
||||
value=Name(id='data', ctx=Load()),
|
||||
slice=Index(value=Str(s=node.id)),
|
||||
ctx=node.ctx
|
||||
), node)
|
||||
|
||||
Keep in mind that if the node you're operating on has child nodes
|
||||
you must either transform the child nodes yourself or call the generic
|
||||
visit function for the node first.
|
||||
|
||||
Nodes that were part of a collection of statements (that applies to
|
||||
all statement nodes) may also return a list of nodes rather than just
|
||||
a single node.
|
||||
|
||||
Usually you use the transformer like this::
|
||||
|
||||
node = YourTransformer().visit(node)
|
||||
"""
|
||||
|
||||
def generic_visit(self, node):
|
||||
for field, old_value in iter_fields(node):
|
||||
old_value = getattr(node, field, None)
|
||||
if isinstance(old_value, list):
|
||||
new_values = []
|
||||
for value in old_value:
|
||||
if isinstance(value, AST):
|
||||
value = self.visit(value)
|
||||
if value is None:
|
||||
continue
|
||||
elif not isinstance(value, AST):
|
||||
new_values.extend(value)
|
||||
continue
|
||||
new_values.append(value)
|
||||
old_value[:] = new_values
|
||||
elif isinstance(old_value, AST):
|
||||
new_node = self.visit(old_value)
|
||||
if new_node is None:
|
||||
delattr(node, field)
|
||||
else:
|
||||
setattr(node, field, new_node)
|
||||
return node
|
||||
|
||||
|
||||
class SourceGenerator(NodeVisitor):
|
||||
|
||||
"""
|
||||
This visitor is able to transform a well formed syntax tree into python
|
||||
sourcecode. For more details have a look at the docstring of the
|
||||
`node_to_source` function.
|
||||
"""
|
||||
|
||||
def __init__(self, indent_with):
|
||||
self.result = []
|
||||
self.indent_with = indent_with
|
||||
self.indentation = 0
|
||||
self.new_lines = 0
|
||||
|
||||
def write(self, x):
|
||||
if self.new_lines:
|
||||
if self.result:
|
||||
self.result.append('\n' * self.new_lines)
|
||||
self.result.append(self.indent_with * self.indentation)
|
||||
self.new_lines = 0
|
||||
self.result.append(x)
|
||||
|
||||
def newline(self, n=1):
|
||||
self.new_lines = max(self.new_lines, n)
|
||||
|
||||
def body(self, statements):
|
||||
self.new_line = True
|
||||
self.indentation += 1
|
||||
for stmt in statements:
|
||||
self.visit(stmt)
|
||||
self.indentation -= 1
|
||||
|
||||
def body_or_else(self, node):
|
||||
self.body(node.body)
|
||||
if node.orelse:
|
||||
self.newline()
|
||||
self.write('else:')
|
||||
self.body(node.orelse)
|
||||
|
||||
def signature(self, node):
|
||||
want_comma = []
|
||||
|
||||
def write_comma():
|
||||
if want_comma:
|
||||
self.write(', ')
|
||||
else:
|
||||
want_comma.append(True)
|
||||
|
||||
padding = [None] * (len(node.args) - len(node.defaults))
|
||||
for arg, default in zip(node.args, padding + node.defaults):
|
||||
write_comma()
|
||||
self.visit(arg)
|
||||
if default is not None:
|
||||
self.write('=')
|
||||
self.visit(default)
|
||||
if node.vararg is not None:
|
||||
write_comma()
|
||||
self.write('*' + arg_stringname(node.vararg))
|
||||
if node.kwarg is not None:
|
||||
write_comma()
|
||||
self.write('**' + arg_stringname(node.kwarg))
|
||||
|
||||
def decorators(self, node):
|
||||
for decorator in node.decorator_list:
|
||||
self.newline()
|
||||
self.write('@')
|
||||
self.visit(decorator)
|
||||
|
||||
# Statements
|
||||
|
||||
def visit_Assign(self, node):
|
||||
self.newline()
|
||||
for idx, target in enumerate(node.targets):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(target)
|
||||
self.write(' = ')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_AugAssign(self, node):
|
||||
self.newline()
|
||||
self.visit(node.target)
|
||||
self.write(BINOP_SYMBOLS[type(node.op)] + '=')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_ImportFrom(self, node):
|
||||
self.newline()
|
||||
self.write('from %s%s import ' % ('.' * node.level, node.module))
|
||||
for idx, item in enumerate(node.names):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.write(item)
|
||||
|
||||
def visit_Import(self, node):
|
||||
self.newline()
|
||||
for item in node.names:
|
||||
self.write('import ')
|
||||
self.visit(item)
|
||||
|
||||
def visit_Expr(self, node):
|
||||
self.newline()
|
||||
self.generic_visit(node)
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
self.newline(n=2)
|
||||
self.decorators(node)
|
||||
self.newline()
|
||||
self.write('def %s(' % node.name)
|
||||
self.signature(node.args)
|
||||
self.write('):')
|
||||
self.body(node.body)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
have_args = []
|
||||
|
||||
def paren_or_comma():
|
||||
if have_args:
|
||||
self.write(', ')
|
||||
else:
|
||||
have_args.append(True)
|
||||
self.write('(')
|
||||
|
||||
self.newline(n=3)
|
||||
self.decorators(node)
|
||||
self.newline()
|
||||
self.write('class %s' % node.name)
|
||||
for base in node.bases:
|
||||
paren_or_comma()
|
||||
self.visit(base)
|
||||
# XXX: the if here is used to keep this module compatible
|
||||
# with python 2.6.
|
||||
if hasattr(node, 'keywords'):
|
||||
for keyword in node.keywords:
|
||||
paren_or_comma()
|
||||
self.write(keyword.arg + '=')
|
||||
self.visit(keyword.value)
|
||||
if getattr(node, "starargs", None):
|
||||
paren_or_comma()
|
||||
self.write('*')
|
||||
self.visit(node.starargs)
|
||||
if getattr(node, "kwargs", None):
|
||||
paren_or_comma()
|
||||
self.write('**')
|
||||
self.visit(node.kwargs)
|
||||
self.write(have_args and '):' or ':')
|
||||
self.body(node.body)
|
||||
|
||||
def visit_If(self, node):
|
||||
self.newline()
|
||||
self.write('if ')
|
||||
self.visit(node.test)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
||||
while True:
|
||||
else_ = node.orelse
|
||||
if len(else_) == 1 and isinstance(else_[0], If):
|
||||
node = else_[0]
|
||||
self.newline()
|
||||
self.write('elif ')
|
||||
self.visit(node.test)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
||||
else:
|
||||
self.newline()
|
||||
self.write('else:')
|
||||
self.body(else_)
|
||||
break
|
||||
|
||||
def visit_For(self, node):
|
||||
self.newline()
|
||||
self.write('for ')
|
||||
self.visit(node.target)
|
||||
self.write(' in ')
|
||||
self.visit(node.iter)
|
||||
self.write(':')
|
||||
self.body_or_else(node)
|
||||
|
||||
def visit_While(self, node):
|
||||
self.newline()
|
||||
self.write('while ')
|
||||
self.visit(node.test)
|
||||
self.write(':')
|
||||
self.body_or_else(node)
|
||||
|
||||
def visit_With(self, node):
|
||||
self.newline()
|
||||
self.write('with ')
|
||||
self.visit(node.context_expr)
|
||||
if node.optional_vars is not None:
|
||||
self.write(' as ')
|
||||
self.visit(node.optional_vars)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
||||
|
||||
def visit_Pass(self, node):
|
||||
self.newline()
|
||||
self.write('pass')
|
||||
|
||||
def visit_Print(self, node):
|
||||
# XXX: python 2.6 only
|
||||
self.newline()
|
||||
self.write('print ')
|
||||
want_comma = False
|
||||
if node.dest is not None:
|
||||
self.write(' >> ')
|
||||
self.visit(node.dest)
|
||||
want_comma = True
|
||||
for value in node.values:
|
||||
if want_comma:
|
||||
self.write(', ')
|
||||
self.visit(value)
|
||||
want_comma = True
|
||||
if not node.nl:
|
||||
self.write(',')
|
||||
|
||||
def visit_Delete(self, node):
|
||||
self.newline()
|
||||
self.write('del ')
|
||||
for idx, target in enumerate(node):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(target)
|
||||
|
||||
def visit_TryExcept(self, node):
|
||||
self.newline()
|
||||
self.write('try:')
|
||||
self.body(node.body)
|
||||
for handler in node.handlers:
|
||||
self.visit(handler)
|
||||
|
||||
def visit_TryFinally(self, node):
|
||||
self.newline()
|
||||
self.write('try:')
|
||||
self.body(node.body)
|
||||
self.newline()
|
||||
self.write('finally:')
|
||||
self.body(node.finalbody)
|
||||
|
||||
def visit_Global(self, node):
|
||||
self.newline()
|
||||
self.write('global ' + ', '.join(node.names))
|
||||
|
||||
def visit_Nonlocal(self, node):
|
||||
self.newline()
|
||||
self.write('nonlocal ' + ', '.join(node.names))
|
||||
|
||||
def visit_Return(self, node):
|
||||
self.newline()
|
||||
self.write('return ')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_Break(self, node):
|
||||
self.newline()
|
||||
self.write('break')
|
||||
|
||||
def visit_Continue(self, node):
|
||||
self.newline()
|
||||
self.write('continue')
|
||||
|
||||
def visit_Raise(self, node):
|
||||
# XXX: Python 2.6 / 3.0 compatibility
|
||||
self.newline()
|
||||
self.write('raise')
|
||||
if hasattr(node, 'exc') and node.exc is not None:
|
||||
self.write(' ')
|
||||
self.visit(node.exc)
|
||||
if node.cause is not None:
|
||||
self.write(' from ')
|
||||
self.visit(node.cause)
|
||||
elif hasattr(node, 'type') and node.type is not None:
|
||||
self.visit(node.type)
|
||||
if node.inst is not None:
|
||||
self.write(', ')
|
||||
self.visit(node.inst)
|
||||
if node.tback is not None:
|
||||
self.write(', ')
|
||||
self.visit(node.tback)
|
||||
|
||||
# Expressions
|
||||
|
||||
def visit_Attribute(self, node):
|
||||
self.visit(node.value)
|
||||
self.write('.' + node.attr)
|
||||
|
||||
def visit_Call(self, node):
|
||||
want_comma = []
|
||||
|
||||
def write_comma():
|
||||
if want_comma:
|
||||
self.write(', ')
|
||||
else:
|
||||
want_comma.append(True)
|
||||
|
||||
self.visit(node.func)
|
||||
self.write('(')
|
||||
for arg in node.args:
|
||||
write_comma()
|
||||
self.visit(arg)
|
||||
for keyword in node.keywords:
|
||||
write_comma()
|
||||
self.write(keyword.arg + '=')
|
||||
self.visit(keyword.value)
|
||||
if getattr(node, "starargs", None):
|
||||
write_comma()
|
||||
self.write('*')
|
||||
self.visit(node.starargs)
|
||||
if getattr(node, "kwargs", None):
|
||||
write_comma()
|
||||
self.write('**')
|
||||
self.visit(node.kwargs)
|
||||
self.write(')')
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.write(node.id)
|
||||
|
||||
def visit_NameConstant(self, node):
|
||||
self.write(str(node.value))
|
||||
|
||||
def visit_arg(self, node):
|
||||
self.write(node.arg)
|
||||
|
||||
def visit_Str(self, node):
|
||||
self.write(repr(node.s))
|
||||
|
||||
def visit_Bytes(self, node):
|
||||
self.write(repr(node.s))
|
||||
|
||||
def visit_Num(self, node):
|
||||
self.write(repr(node.n))
|
||||
|
||||
def visit_Tuple(self, node):
|
||||
self.write('(')
|
||||
idx = -1
|
||||
for idx, item in enumerate(node.elts):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(item)
|
||||
self.write(idx and ')' or ',)')
|
||||
|
||||
def sequence_visit(left, right):
|
||||
def visit(self, node):
|
||||
self.write(left)
|
||||
for idx, item in enumerate(node.elts):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(item)
|
||||
self.write(right)
|
||||
return visit
|
||||
|
||||
visit_List = sequence_visit('[', ']')
|
||||
visit_Set = sequence_visit('{', '}')
|
||||
del sequence_visit
|
||||
|
||||
def visit_Dict(self, node):
|
||||
self.write('{')
|
||||
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(key)
|
||||
self.write(': ')
|
||||
self.visit(value)
|
||||
self.write('}')
|
||||
|
||||
def visit_BinOp(self, node):
|
||||
self.write('(')
|
||||
self.visit(node.left)
|
||||
self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
|
||||
self.visit(node.right)
|
||||
self.write(')')
|
||||
|
||||
def visit_BoolOp(self, node):
|
||||
self.write('(')
|
||||
for idx, value in enumerate(node.values):
|
||||
if idx:
|
||||
self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
|
||||
self.visit(value)
|
||||
self.write(')')
|
||||
|
||||
def visit_Compare(self, node):
|
||||
self.write('(')
|
||||
self.visit(node.left)
|
||||
for op, right in zip(node.ops, node.comparators):
|
||||
self.write(' %s ' % CMPOP_SYMBOLS[type(op)])
|
||||
self.visit(right)
|
||||
self.write(')')
|
||||
|
||||
def visit_UnaryOp(self, node):
|
||||
self.write('(')
|
||||
op = UNARYOP_SYMBOLS[type(node.op)]
|
||||
self.write(op)
|
||||
if op == 'not':
|
||||
self.write(' ')
|
||||
self.visit(node.operand)
|
||||
self.write(')')
|
||||
|
||||
def visit_Subscript(self, node):
|
||||
self.visit(node.value)
|
||||
self.write('[')
|
||||
self.visit(node.slice)
|
||||
self.write(']')
|
||||
|
||||
def visit_Slice(self, node):
|
||||
if node.lower is not None:
|
||||
self.visit(node.lower)
|
||||
self.write(':')
|
||||
if node.upper is not None:
|
||||
self.visit(node.upper)
|
||||
if node.step is not None:
|
||||
self.write(':')
|
||||
if not (isinstance(node.step, Name) and node.step.id == 'None'):
|
||||
self.visit(node.step)
|
||||
|
||||
def visit_ExtSlice(self, node):
|
||||
for idx, item in node.dims:
|
||||
if idx:
|
||||
self.write(', ')
|
||||
self.visit(item)
|
||||
|
||||
def visit_Yield(self, node):
|
||||
self.write('yield ')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_Lambda(self, node):
|
||||
self.write('lambda ')
|
||||
self.signature(node.args)
|
||||
self.write(': ')
|
||||
self.visit(node.body)
|
||||
|
||||
def visit_Ellipsis(self, node):
|
||||
self.write('Ellipsis')
|
||||
|
||||
def generator_visit(left, right):
|
||||
def visit(self, node):
|
||||
self.write(left)
|
||||
self.visit(node.elt)
|
||||
for comprehension in node.generators:
|
||||
self.visit(comprehension)
|
||||
self.write(right)
|
||||
return visit
|
||||
|
||||
visit_ListComp = generator_visit('[', ']')
|
||||
visit_GeneratorExp = generator_visit('(', ')')
|
||||
visit_SetComp = generator_visit('{', '}')
|
||||
del generator_visit
|
||||
|
||||
def visit_DictComp(self, node):
|
||||
self.write('{')
|
||||
self.visit(node.key)
|
||||
self.write(': ')
|
||||
self.visit(node.value)
|
||||
for comprehension in node.generators:
|
||||
self.visit(comprehension)
|
||||
self.write('}')
|
||||
|
||||
def visit_IfExp(self, node):
|
||||
self.visit(node.body)
|
||||
self.write(' if ')
|
||||
self.visit(node.test)
|
||||
self.write(' else ')
|
||||
self.visit(node.orelse)
|
||||
|
||||
def visit_Starred(self, node):
|
||||
self.write('*')
|
||||
self.visit(node.value)
|
||||
|
||||
def visit_Repr(self, node):
|
||||
# XXX: python 2.6 only
|
||||
self.write('`')
|
||||
self.visit(node.value)
|
||||
self.write('`')
|
||||
|
||||
# Helper Nodes
|
||||
|
||||
def visit_alias(self, node):
|
||||
self.write(node.name)
|
||||
if node.asname is not None:
|
||||
self.write(' as ' + node.asname)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
self.write(' for ')
|
||||
self.visit(node.target)
|
||||
self.write(' in ')
|
||||
self.visit(node.iter)
|
||||
if node.ifs:
|
||||
for if_ in node.ifs:
|
||||
self.write(' if ')
|
||||
self.visit(if_)
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
self.newline()
|
||||
self.write('except')
|
||||
if node.type is not None:
|
||||
self.write(' ')
|
||||
self.visit(node.type)
|
||||
if node.name is not None:
|
||||
self.write(' as ')
|
||||
self.visit(node.name)
|
||||
self.write(':')
|
||||
self.body(node.body)
|
|
@ -0,0 +1,191 @@
|
|||
# mako/ast.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""utilities for analyzing expressions and blocks of Python
|
||||
code, as well as generating Python from AST nodes"""
|
||||
|
||||
from mako import exceptions, pyparser, compat
|
||||
import re
|
||||
|
||||
|
||||
class PythonCode(object):
|
||||
|
||||
"""represents information about a string containing Python code"""
|
||||
|
||||
def __init__(self, code, **exception_kwargs):
|
||||
self.code = code
|
||||
|
||||
# represents all identifiers which are assigned to at some point in
|
||||
# the code
|
||||
self.declared_identifiers = set()
|
||||
|
||||
# represents all identifiers which are referenced before their
|
||||
# assignment, if any
|
||||
self.undeclared_identifiers = set()
|
||||
|
||||
# note that an identifier can be in both the undeclared and declared
|
||||
# lists.
|
||||
|
||||
# using AST to parse instead of using code.co_varnames,
|
||||
# code.co_names has several advantages:
|
||||
# - we can locate an identifier as "undeclared" even if
|
||||
# its declared later in the same block of code
|
||||
# - AST is less likely to break with version changes
|
||||
# (for example, the behavior of co_names changed a little bit
|
||||
# in python version 2.5)
|
||||
if isinstance(code, compat.string_types):
|
||||
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
|
||||
else:
|
||||
expr = code
|
||||
|
||||
f = pyparser.FindIdentifiers(self, **exception_kwargs)
|
||||
f.visit(expr)
|
||||
|
||||
|
||||
class ArgumentList(object):
|
||||
|
||||
"""parses a fragment of code as a comma-separated list of expressions"""
|
||||
|
||||
def __init__(self, code, **exception_kwargs):
|
||||
self.codeargs = []
|
||||
self.args = []
|
||||
self.declared_identifiers = set()
|
||||
self.undeclared_identifiers = set()
|
||||
if isinstance(code, compat.string_types):
|
||||
if re.match(r"\S", code) and not re.match(r",\s*$", code):
|
||||
# if theres text and no trailing comma, insure its parsed
|
||||
# as a tuple by adding a trailing comma
|
||||
code += ","
|
||||
expr = pyparser.parse(code, "exec", **exception_kwargs)
|
||||
else:
|
||||
expr = code
|
||||
|
||||
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
|
||||
f.visit(expr)
|
||||
|
||||
|
||||
class PythonFragment(PythonCode):
|
||||
|
||||
"""extends PythonCode to provide identifier lookups in partial control
|
||||
statements
|
||||
|
||||
e.g.
|
||||
for x in 5:
|
||||
elif y==9:
|
||||
except (MyException, e):
|
||||
etc.
|
||||
"""
|
||||
|
||||
def __init__(self, code, **exception_kwargs):
|
||||
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
|
||||
if not m:
|
||||
raise exceptions.CompileException(
|
||||
"Fragment '%s' is not a partial control statement" %
|
||||
code, **exception_kwargs)
|
||||
if m.group(3):
|
||||
code = code[:m.start(3)]
|
||||
(keyword, expr) = m.group(1, 2)
|
||||
if keyword in ['for', 'if', 'while']:
|
||||
code = code + "pass"
|
||||
elif keyword == 'try':
|
||||
code = code + "pass\nexcept:pass"
|
||||
elif keyword == 'elif' or keyword == 'else':
|
||||
code = "if False:pass\n" + code + "pass"
|
||||
elif keyword == 'except':
|
||||
code = "try:pass\n" + code + "pass"
|
||||
elif keyword == 'with':
|
||||
code = code + "pass"
|
||||
else:
|
||||
raise exceptions.CompileException(
|
||||
"Unsupported control keyword: '%s'" %
|
||||
keyword, **exception_kwargs)
|
||||
super(PythonFragment, self).__init__(code, **exception_kwargs)
|
||||
|
||||
|
||||
class FunctionDecl(object):
|
||||
|
||||
"""function declaration"""
|
||||
|
||||
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
|
||||
self.code = code
|
||||
expr = pyparser.parse(code, "exec", **exception_kwargs)
|
||||
|
||||
f = pyparser.ParseFunc(self, **exception_kwargs)
|
||||
f.visit(expr)
|
||||
if not hasattr(self, 'funcname'):
|
||||
raise exceptions.CompileException(
|
||||
"Code '%s' is not a function declaration" % code,
|
||||
**exception_kwargs)
|
||||
if not allow_kwargs and self.kwargs:
|
||||
raise exceptions.CompileException(
|
||||
"'**%s' keyword argument not allowed here" %
|
||||
self.kwargnames[-1], **exception_kwargs)
|
||||
|
||||
def get_argument_expressions(self, as_call=False):
|
||||
"""Return the argument declarations of this FunctionDecl as a printable
|
||||
list.
|
||||
|
||||
By default the return value is appropriate for writing in a ``def``;
|
||||
set `as_call` to true to build arguments to be passed to the function
|
||||
instead (assuming locals with the same names as the arguments exist).
|
||||
"""
|
||||
|
||||
namedecls = []
|
||||
|
||||
# Build in reverse order, since defaults and slurpy args come last
|
||||
argnames = self.argnames[::-1]
|
||||
kwargnames = self.kwargnames[::-1]
|
||||
defaults = self.defaults[::-1]
|
||||
kwdefaults = self.kwdefaults[::-1]
|
||||
|
||||
# Named arguments
|
||||
if self.kwargs:
|
||||
namedecls.append("**" + kwargnames.pop(0))
|
||||
|
||||
for name in kwargnames:
|
||||
# Keyword-only arguments must always be used by name, so even if
|
||||
# this is a call, print out `foo=foo`
|
||||
if as_call:
|
||||
namedecls.append("%s=%s" % (name, name))
|
||||
elif kwdefaults:
|
||||
default = kwdefaults.pop(0)
|
||||
if default is None:
|
||||
# The AST always gives kwargs a default, since you can do
|
||||
# `def foo(*, a=1, b, c=3)`
|
||||
namedecls.append(name)
|
||||
else:
|
||||
namedecls.append("%s=%s" % (
|
||||
name, pyparser.ExpressionGenerator(default).value()))
|
||||
else:
|
||||
namedecls.append(name)
|
||||
|
||||
# Positional arguments
|
||||
if self.varargs:
|
||||
namedecls.append("*" + argnames.pop(0))
|
||||
|
||||
for name in argnames:
|
||||
if as_call or not defaults:
|
||||
namedecls.append(name)
|
||||
else:
|
||||
default = defaults.pop(0)
|
||||
namedecls.append("%s=%s" % (
|
||||
name, pyparser.ExpressionGenerator(default).value()))
|
||||
|
||||
namedecls.reverse()
|
||||
return namedecls
|
||||
|
||||
@property
|
||||
def allargnames(self):
|
||||
return tuple(self.argnames) + tuple(self.kwargnames)
|
||||
|
||||
|
||||
class FunctionArgs(FunctionDecl):
|
||||
|
||||
"""the argument portion of a function declaration"""
|
||||
|
||||
def __init__(self, code, **kwargs):
|
||||
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code,
|
||||
**kwargs)
|
|
@ -0,0 +1,240 @@
|
|||
# mako/cache.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from mako import compat, util
|
||||
|
||||
_cache_plugins = util.PluginLoader("mako.cache")
|
||||
|
||||
register_plugin = _cache_plugins.register
|
||||
register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl")
|
||||
|
||||
|
||||
class Cache(object):
|
||||
|
||||
"""Represents a data content cache made available to the module
|
||||
space of a specific :class:`.Template` object.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
:class:`.Cache` by itself is mostly a
|
||||
container for a :class:`.CacheImpl` object, which implements
|
||||
a fixed API to provide caching services; specific subclasses exist to
|
||||
implement different
|
||||
caching strategies. Mako includes a backend that works with
|
||||
the Beaker caching system. Beaker itself then supports
|
||||
a number of backends (i.e. file, memory, memcached, etc.)
|
||||
|
||||
The construction of a :class:`.Cache` is part of the mechanics
|
||||
of a :class:`.Template`, and programmatic access to this
|
||||
cache is typically via the :attr:`.Template.cache` attribute.
|
||||
|
||||
"""
|
||||
|
||||
impl = None
|
||||
"""Provide the :class:`.CacheImpl` in use by this :class:`.Cache`.
|
||||
|
||||
This accessor allows a :class:`.CacheImpl` with additional
|
||||
methods beyond that of :class:`.Cache` to be used programmatically.
|
||||
|
||||
"""
|
||||
|
||||
id = None
|
||||
"""Return the 'id' that identifies this cache.
|
||||
|
||||
This is a value that should be globally unique to the
|
||||
:class:`.Template` associated with this cache, and can
|
||||
be used by a caching system to name a local container
|
||||
for data specific to this template.
|
||||
|
||||
"""
|
||||
|
||||
starttime = None
|
||||
"""Epochal time value for when the owning :class:`.Template` was
|
||||
first compiled.
|
||||
|
||||
A cache implementation may wish to invalidate data earlier than
|
||||
this timestamp; this has the effect of the cache for a specific
|
||||
:class:`.Template` starting clean any time the :class:`.Template`
|
||||
is recompiled, such as when the original template file changed on
|
||||
the filesystem.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, template, *args):
|
||||
# check for a stale template calling the
|
||||
# constructor
|
||||
if isinstance(template, compat.string_types) and args:
|
||||
return
|
||||
self.template = template
|
||||
self.id = template.module.__name__
|
||||
self.starttime = template.module._modified_time
|
||||
self._def_regions = {}
|
||||
self.impl = self._load_impl(self.template.cache_impl)
|
||||
|
||||
def _load_impl(self, name):
|
||||
return _cache_plugins.load(name)(self)
|
||||
|
||||
def get_or_create(self, key, creation_function, **kw):
|
||||
"""Retrieve a value from the cache, using the given creation function
|
||||
to generate a new value."""
|
||||
|
||||
return self._ctx_get_or_create(key, creation_function, None, **kw)
|
||||
|
||||
def _ctx_get_or_create(self, key, creation_function, context, **kw):
|
||||
"""Retrieve a value from the cache, using the given creation function
|
||||
to generate a new value."""
|
||||
|
||||
if not self.template.cache_enabled:
|
||||
return creation_function()
|
||||
|
||||
return self.impl.get_or_create(
|
||||
key,
|
||||
creation_function,
|
||||
**self._get_cache_kw(kw, context))
|
||||
|
||||
def set(self, key, value, **kw):
|
||||
"""Place a value in the cache.
|
||||
|
||||
:param key: the value's key.
|
||||
:param value: the value.
|
||||
:param \**kw: cache configuration arguments.
|
||||
|
||||
"""
|
||||
|
||||
self.impl.set(key, value, **self._get_cache_kw(kw, None))
|
||||
|
||||
put = set
|
||||
"""A synonym for :meth:`.Cache.set`.
|
||||
|
||||
This is here for backwards compatibility.
|
||||
|
||||
"""
|
||||
|
||||
def get(self, key, **kw):
|
||||
"""Retrieve a value from the cache.
|
||||
|
||||
:param key: the value's key.
|
||||
:param \**kw: cache configuration arguments. The
|
||||
backend is configured using these arguments upon first request.
|
||||
Subsequent requests that use the same series of configuration
|
||||
values will use that same backend.
|
||||
|
||||
"""
|
||||
return self.impl.get(key, **self._get_cache_kw(kw, None))
|
||||
|
||||
def invalidate(self, key, **kw):
|
||||
"""Invalidate a value in the cache.
|
||||
|
||||
:param key: the value's key.
|
||||
:param \**kw: cache configuration arguments. The
|
||||
backend is configured using these arguments upon first request.
|
||||
Subsequent requests that use the same series of configuration
|
||||
values will use that same backend.
|
||||
|
||||
"""
|
||||
self.impl.invalidate(key, **self._get_cache_kw(kw, None))
|
||||
|
||||
def invalidate_body(self):
|
||||
"""Invalidate the cached content of the "body" method for this
|
||||
template.
|
||||
|
||||
"""
|
||||
self.invalidate('render_body', __M_defname='render_body')
|
||||
|
||||
def invalidate_def(self, name):
|
||||
"""Invalidate the cached content of a particular ``<%def>`` within this
|
||||
template.
|
||||
|
||||
"""
|
||||
|
||||
self.invalidate('render_%s' % name, __M_defname='render_%s' % name)
|
||||
|
||||
def invalidate_closure(self, name):
|
||||
"""Invalidate a nested ``<%def>`` within this template.
|
||||
|
||||
Caching of nested defs is a blunt tool as there is no
|
||||
management of scope -- nested defs that use cache tags
|
||||
need to have names unique of all other nested defs in the
|
||||
template, else their content will be overwritten by
|
||||
each other.
|
||||
|
||||
"""
|
||||
|
||||
self.invalidate(name, __M_defname=name)
|
||||
|
||||
def _get_cache_kw(self, kw, context):
|
||||
defname = kw.pop('__M_defname', None)
|
||||
if not defname:
|
||||
tmpl_kw = self.template.cache_args.copy()
|
||||
tmpl_kw.update(kw)
|
||||
elif defname in self._def_regions:
|
||||
tmpl_kw = self._def_regions[defname]
|
||||
else:
|
||||
tmpl_kw = self.template.cache_args.copy()
|
||||
tmpl_kw.update(kw)
|
||||
self._def_regions[defname] = tmpl_kw
|
||||
if context and self.impl.pass_context:
|
||||
tmpl_kw = tmpl_kw.copy()
|
||||
tmpl_kw.setdefault('context', context)
|
||||
return tmpl_kw
|
||||
|
||||
|
||||
class CacheImpl(object):
|
||||
|
||||
"""Provide a cache implementation for use by :class:`.Cache`."""
|
||||
|
||||
def __init__(self, cache):
|
||||
self.cache = cache
|
||||
|
||||
pass_context = False
|
||||
"""If ``True``, the :class:`.Context` will be passed to
|
||||
:meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``.
|
||||
"""
|
||||
|
||||
def get_or_create(self, key, creation_function, **kw):
|
||||
"""Retrieve a value from the cache, using the given creation function
|
||||
to generate a new value.
|
||||
|
||||
This function *must* return a value, either from
|
||||
the cache, or via the given creation function.
|
||||
If the creation function is called, the newly
|
||||
created value should be populated into the cache
|
||||
under the given key before being returned.
|
||||
|
||||
:param key: the value's key.
|
||||
:param creation_function: function that when called generates
|
||||
a new value.
|
||||
:param \**kw: cache configuration arguments.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set(self, key, value, **kw):
|
||||
"""Place a value in the cache.
|
||||
|
||||
:param key: the value's key.
|
||||
:param value: the value.
|
||||
:param \**kw: cache configuration arguments.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(self, key, **kw):
|
||||
"""Retrieve a value from the cache.
|
||||
|
||||
:param key: the value's key.
|
||||
:param \**kw: cache configuration arguments.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def invalidate(self, key, **kw):
|
||||
"""Invalidate a value in the cache.
|
||||
|
||||
:param key: the value's key.
|
||||
:param \**kw: cache configuration arguments.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
|
@ -0,0 +1,67 @@
|
|||
# mako/cmd.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from argparse import ArgumentParser
|
||||
from os.path import isfile, dirname
|
||||
import sys
|
||||
from mako.template import Template
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako import exceptions
|
||||
|
||||
|
||||
def varsplit(var):
|
||||
if "=" not in var:
|
||||
return (var, "")
|
||||
return var.split("=", 1)
|
||||
|
||||
|
||||
def _exit():
|
||||
sys.stderr.write(exceptions.text_error_template().render())
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmdline(argv=None):
|
||||
|
||||
parser = ArgumentParser("usage: %prog [FILENAME]")
|
||||
parser.add_argument(
|
||||
"--var", default=[], action="append",
|
||||
help="variable (can be used multiple times, use name=value)")
|
||||
parser.add_argument(
|
||||
"--template-dir", default=[], action="append",
|
||||
help="Directory to use for template lookup (multiple "
|
||||
"directories may be provided). If not given then if the "
|
||||
"template is read from stdin, the value defaults to be "
|
||||
"the current directory, otherwise it defaults to be the "
|
||||
"parent directory of the file provided.")
|
||||
parser.add_argument('input', nargs='?', default='-')
|
||||
|
||||
options = parser.parse_args(argv)
|
||||
if options.input == '-':
|
||||
lookup_dirs = options.template_dir or ["."]
|
||||
lookup = TemplateLookup(lookup_dirs)
|
||||
try:
|
||||
template = Template(sys.stdin.read(), lookup=lookup)
|
||||
except:
|
||||
_exit()
|
||||
else:
|
||||
filename = options.input
|
||||
if not isfile(filename):
|
||||
raise SystemExit("error: can't find %s" % filename)
|
||||
lookup_dirs = options.template_dir or [dirname(filename)]
|
||||
lookup = TemplateLookup(lookup_dirs)
|
||||
try:
|
||||
template = Template(filename=filename, lookup=lookup)
|
||||
except:
|
||||
_exit()
|
||||
|
||||
kw = dict([varsplit(var) for var in options.var])
|
||||
try:
|
||||
print(template.render(**kw))
|
||||
except:
|
||||
_exit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cmdline()
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,200 @@
|
|||
import sys
|
||||
import time
|
||||
|
||||
py3k = sys.version_info >= (3, 0)
|
||||
py33 = sys.version_info >= (3, 3)
|
||||
py2k = sys.version_info < (3,)
|
||||
py26 = sys.version_info >= (2, 6)
|
||||
jython = sys.platform.startswith('java')
|
||||
win32 = sys.platform.startswith('win')
|
||||
pypy = hasattr(sys, 'pypy_version_info')
|
||||
|
||||
if py3k:
|
||||
# create a "getargspec" from getfullargspec(), which is not deprecated
|
||||
# in Py3K; getargspec() has started to emit warnings as of Py3.5.
|
||||
# As of Py3.4, now they are trying to move from getfullargspec()
|
||||
# to "signature()", but getfullargspec() is not deprecated, so stick
|
||||
# with that for now.
|
||||
|
||||
import collections
|
||||
ArgSpec = collections.namedtuple(
|
||||
"ArgSpec",
|
||||
["args", "varargs", "keywords", "defaults"])
|
||||
from inspect import getfullargspec as inspect_getfullargspec
|
||||
|
||||
def inspect_getargspec(func):
|
||||
return ArgSpec(
|
||||
*inspect_getfullargspec(func)[0:4]
|
||||
)
|
||||
else:
|
||||
from inspect import getargspec as inspect_getargspec # noqa
|
||||
|
||||
|
||||
if py3k:
|
||||
from io import StringIO
|
||||
import builtins as compat_builtins
|
||||
from urllib.parse import quote_plus, unquote_plus
|
||||
from html.entities import codepoint2name, name2codepoint
|
||||
string_types = str,
|
||||
binary_type = bytes
|
||||
text_type = str
|
||||
|
||||
from io import BytesIO as byte_buffer
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def octal(lit):
|
||||
return eval("0o" + lit)
|
||||
|
||||
else:
|
||||
import __builtin__ as compat_builtins # noqa
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except:
|
||||
from StringIO import StringIO
|
||||
|
||||
byte_buffer = StringIO
|
||||
|
||||
from urllib import quote_plus, unquote_plus # noqa
|
||||
from htmlentitydefs import codepoint2name, name2codepoint # noqa
|
||||
string_types = basestring, # noqa
|
||||
binary_type = str
|
||||
text_type = unicode # noqa
|
||||
|
||||
def u(s):
|
||||
return unicode(s, "utf-8") # noqa
|
||||
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
def octal(lit):
|
||||
return eval("0" + lit)
|
||||
|
||||
|
||||
if py33:
|
||||
from importlib import machinery
|
||||
|
||||
def load_module(module_id, path):
|
||||
return machinery.SourceFileLoader(module_id, path).load_module()
|
||||
else:
|
||||
import imp
|
||||
|
||||
def load_module(module_id, path):
|
||||
fp = open(path, 'rb')
|
||||
try:
|
||||
return imp.load_source(module_id, path, fp)
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
|
||||
if py3k:
|
||||
def reraise(tp, value, tb=None, cause=None):
|
||||
if cause is not None:
|
||||
value.__cause__ = cause
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
else:
|
||||
exec("def reraise(tp, value, tb=None, cause=None):\n"
|
||||
" raise tp, value, tb\n")
|
||||
|
||||
|
||||
def exception_as():
|
||||
return sys.exc_info()[1]
|
||||
|
||||
try:
|
||||
import threading
|
||||
if py3k:
|
||||
import _thread as thread
|
||||
else:
|
||||
import thread
|
||||
except ImportError:
|
||||
import dummy_threading as threading # noqa
|
||||
if py3k:
|
||||
import _dummy_thread as thread
|
||||
else:
|
||||
import dummy_thread as thread # noqa
|
||||
|
||||
if win32 or jython:
|
||||
time_func = time.clock
|
||||
else:
|
||||
time_func = time.time
|
||||
|
||||
try:
|
||||
from functools import partial
|
||||
except:
|
||||
def partial(func, *args, **keywords):
|
||||
def newfunc(*fargs, **fkeywords):
|
||||
newkeywords = keywords.copy()
|
||||
newkeywords.update(fkeywords)
|
||||
return func(*(args + fargs), **newkeywords)
|
||||
return newfunc
|
||||
|
||||
|
||||
all = all
|
||||
import json # noqa
|
||||
|
||||
|
||||
def exception_name(exc):
|
||||
return exc.__class__.__name__
|
||||
|
||||
try:
|
||||
from inspect import CO_VARKEYWORDS, CO_VARARGS
|
||||
|
||||
def inspect_func_args(fn):
|
||||
if py3k:
|
||||
co = fn.__code__
|
||||
else:
|
||||
co = fn.func_code
|
||||
|
||||
nargs = co.co_argcount
|
||||
names = co.co_varnames
|
||||
args = list(names[:nargs])
|
||||
|
||||
varargs = None
|
||||
if co.co_flags & CO_VARARGS:
|
||||
varargs = co.co_varnames[nargs]
|
||||
nargs = nargs + 1
|
||||
varkw = None
|
||||
if co.co_flags & CO_VARKEYWORDS:
|
||||
varkw = co.co_varnames[nargs]
|
||||
|
||||
if py3k:
|
||||
return args, varargs, varkw, fn.__defaults__
|
||||
else:
|
||||
return args, varargs, varkw, fn.func_defaults
|
||||
except ImportError:
|
||||
import inspect
|
||||
|
||||
def inspect_func_args(fn):
|
||||
return inspect.getargspec(fn)
|
||||
|
||||
if py3k:
|
||||
def callable(fn):
|
||||
return hasattr(fn, '__call__')
|
||||
else:
|
||||
callable = callable
|
||||
|
||||
|
||||
################################################
|
||||
# cross-compatible metaclass implementation
|
||||
# Copyright (c) 2010-2012 Benjamin Peterson
|
||||
def with_metaclass(meta, base=object):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("%sBase" % meta.__name__, (base,), {})
|
||||
################################################
|
||||
|
||||
|
||||
def arg_stringname(func_arg):
|
||||
"""Gets the string name of a kwarg or vararg
|
||||
In Python3.4 a function's args are
|
||||
of _ast.arg type not _ast.name
|
||||
"""
|
||||
if hasattr(func_arg, 'arg'):
|
||||
return func_arg.arg
|
||||
else:
|
||||
return str(func_arg)
|
|
@ -0,0 +1,394 @@
|
|||
# mako/exceptions.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""exception classes"""
|
||||
|
||||
import traceback
|
||||
import sys
|
||||
from mako import util, compat
|
||||
|
||||
|
||||
class MakoException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class RuntimeException(MakoException):
|
||||
pass
|
||||
|
||||
|
||||
def _format_filepos(lineno, pos, filename):
|
||||
if filename is None:
|
||||
return " at line: %d char: %d" % (lineno, pos)
|
||||
else:
|
||||
return " in file '%s' at line: %d char: %d" % (filename, lineno, pos)
|
||||
|
||||
|
||||
class CompileException(MakoException):
|
||||
|
||||
def __init__(self, message, source, lineno, pos, filename):
|
||||
MakoException.__init__(
|
||||
self,
|
||||
message + _format_filepos(lineno, pos, filename))
|
||||
self.lineno = lineno
|
||||
self.pos = pos
|
||||
self.filename = filename
|
||||
self.source = source
|
||||
|
||||
|
||||
class SyntaxException(MakoException):
|
||||
|
||||
def __init__(self, message, source, lineno, pos, filename):
|
||||
MakoException.__init__(
|
||||
self,
|
||||
message + _format_filepos(lineno, pos, filename))
|
||||
self.lineno = lineno
|
||||
self.pos = pos
|
||||
self.filename = filename
|
||||
self.source = source
|
||||
|
||||
|
||||
class UnsupportedError(MakoException):
|
||||
|
||||
"""raised when a retired feature is used."""
|
||||
|
||||
|
||||
class NameConflictError(MakoException):
|
||||
|
||||
"""raised when a reserved word is used inappropriately"""
|
||||
|
||||
|
||||
class TemplateLookupException(MakoException):
|
||||
pass
|
||||
|
||||
|
||||
class TopLevelLookupException(TemplateLookupException):
|
||||
pass
|
||||
|
||||
|
||||
class RichTraceback(object):
|
||||
|
||||
"""Pull the current exception from the ``sys`` traceback and extracts
|
||||
Mako-specific template information.
|
||||
|
||||
See the usage examples in :ref:`handling_exceptions`.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, error=None, traceback=None):
|
||||
self.source, self.lineno = "", 0
|
||||
|
||||
if error is None or traceback is None:
|
||||
t, value, tback = sys.exc_info()
|
||||
|
||||
if error is None:
|
||||
error = value or t
|
||||
|
||||
if traceback is None:
|
||||
traceback = tback
|
||||
|
||||
self.error = error
|
||||
self.records = self._init(traceback)
|
||||
|
||||
if isinstance(self.error, (CompileException, SyntaxException)):
|
||||
self.source = self.error.source
|
||||
self.lineno = self.error.lineno
|
||||
self._has_source = True
|
||||
|
||||
self._init_message()
|
||||
|
||||
@property
|
||||
def errorname(self):
|
||||
return compat.exception_name(self.error)
|
||||
|
||||
def _init_message(self):
|
||||
"""Find a unicode representation of self.error"""
|
||||
try:
|
||||
self.message = compat.text_type(self.error)
|
||||
except UnicodeError:
|
||||
try:
|
||||
self.message = str(self.error)
|
||||
except UnicodeEncodeError:
|
||||
# Fallback to args as neither unicode nor
|
||||
# str(Exception(u'\xe6')) work in Python < 2.6
|
||||
self.message = self.error.args[0]
|
||||
if not isinstance(self.message, compat.text_type):
|
||||
self.message = compat.text_type(self.message, 'ascii', 'replace')
|
||||
|
||||
def _get_reformatted_records(self, records):
|
||||
for rec in records:
|
||||
if rec[6] is not None:
|
||||
yield (rec[4], rec[5], rec[2], rec[6])
|
||||
else:
|
||||
yield tuple(rec[0:4])
|
||||
|
||||
@property
|
||||
def traceback(self):
|
||||
"""Return a list of 4-tuple traceback records (i.e. normal python
|
||||
format) with template-corresponding lines remapped to the originating
|
||||
template.
|
||||
|
||||
"""
|
||||
return list(self._get_reformatted_records(self.records))
|
||||
|
||||
@property
|
||||
def reverse_records(self):
|
||||
return reversed(self.records)
|
||||
|
||||
@property
|
||||
def reverse_traceback(self):
|
||||
"""Return the same data as traceback, except in reverse order.
|
||||
"""
|
||||
|
||||
return list(self._get_reformatted_records(self.reverse_records))
|
||||
|
||||
def _init(self, trcback):
|
||||
"""format a traceback from sys.exc_info() into 7-item tuples,
|
||||
containing the regular four traceback tuple items, plus the original
|
||||
template filename, the line number adjusted relative to the template
|
||||
source, and code line from that line number of the template."""
|
||||
|
||||
import mako.template
|
||||
mods = {}
|
||||
rawrecords = traceback.extract_tb(trcback)
|
||||
new_trcback = []
|
||||
for filename, lineno, function, line in rawrecords:
|
||||
if not line:
|
||||
line = ''
|
||||
try:
|
||||
(line_map, template_lines) = mods[filename]
|
||||
except KeyError:
|
||||
try:
|
||||
info = mako.template._get_module_info(filename)
|
||||
module_source = info.code
|
||||
template_source = info.source
|
||||
template_filename = info.template_filename or filename
|
||||
except KeyError:
|
||||
# A normal .py file (not a Template)
|
||||
if not compat.py3k:
|
||||
try:
|
||||
fp = open(filename, 'rb')
|
||||
encoding = util.parse_encoding(fp)
|
||||
fp.close()
|
||||
except IOError:
|
||||
encoding = None
|
||||
if encoding:
|
||||
line = line.decode(encoding)
|
||||
else:
|
||||
line = line.decode('ascii', 'replace')
|
||||
new_trcback.append((filename, lineno, function, line,
|
||||
None, None, None, None))
|
||||
continue
|
||||
|
||||
template_ln = 1
|
||||
|
||||
source_map = mako.template.ModuleInfo.\
|
||||
get_module_source_metadata(
|
||||
module_source, full_line_map=True)
|
||||
line_map = source_map['full_line_map']
|
||||
|
||||
template_lines = [line_ for line_ in
|
||||
template_source.split("\n")]
|
||||
mods[filename] = (line_map, template_lines)
|
||||
|
||||
template_ln = line_map[lineno - 1]
|
||||
|
||||
if template_ln <= len(template_lines):
|
||||
template_line = template_lines[template_ln - 1]
|
||||
else:
|
||||
template_line = None
|
||||
new_trcback.append((filename, lineno, function,
|
||||
line, template_filename, template_ln,
|
||||
template_line, template_source))
|
||||
if not self.source:
|
||||
for l in range(len(new_trcback) - 1, 0, -1):
|
||||
if new_trcback[l][5]:
|
||||
self.source = new_trcback[l][7]
|
||||
self.lineno = new_trcback[l][5]
|
||||
break
|
||||
else:
|
||||
if new_trcback:
|
||||
try:
|
||||
# A normal .py file (not a Template)
|
||||
fp = open(new_trcback[-1][0], 'rb')
|
||||
encoding = util.parse_encoding(fp)
|
||||
fp.seek(0)
|
||||
self.source = fp.read()
|
||||
fp.close()
|
||||
if encoding:
|
||||
self.source = self.source.decode(encoding)
|
||||
except IOError:
|
||||
self.source = ''
|
||||
self.lineno = new_trcback[-1][1]
|
||||
return new_trcback
|
||||
|
||||
|
||||
def text_error_template(lookup=None):
|
||||
"""Provides a template that renders a stack trace in a similar format to
|
||||
the Python interpreter, substituting source template filenames, line
|
||||
numbers and code for that of the originating source template, as
|
||||
applicable.
|
||||
|
||||
"""
|
||||
import mako.template
|
||||
return mako.template.Template(r"""
|
||||
<%page args="error=None, traceback=None"/>
|
||||
<%!
|
||||
from mako.exceptions import RichTraceback
|
||||
%>\
|
||||
<%
|
||||
tback = RichTraceback(error=error, traceback=traceback)
|
||||
%>\
|
||||
Traceback (most recent call last):
|
||||
% for (filename, lineno, function, line) in tback.traceback:
|
||||
File "${filename}", line ${lineno}, in ${function or '?'}
|
||||
${line | trim}
|
||||
% endfor
|
||||
${tback.errorname}: ${tback.message}
|
||||
""")
|
||||
|
||||
|
||||
def _install_pygments():
|
||||
global syntax_highlight, pygments_html_formatter
|
||||
from mako.ext.pygmentplugin import syntax_highlight # noqa
|
||||
from mako.ext.pygmentplugin import pygments_html_formatter # noqa
|
||||
|
||||
|
||||
def _install_fallback():
|
||||
global syntax_highlight, pygments_html_formatter
|
||||
from mako.filters import html_escape
|
||||
pygments_html_formatter = None
|
||||
|
||||
def syntax_highlight(filename='', language=None):
|
||||
return html_escape
|
||||
|
||||
|
||||
def _install_highlighting():
|
||||
try:
|
||||
_install_pygments()
|
||||
except ImportError:
|
||||
_install_fallback()
|
||||
_install_highlighting()
|
||||
|
||||
|
||||
def html_error_template():
|
||||
"""Provides a template that renders a stack trace in an HTML format,
|
||||
providing an excerpt of code as well as substituting source template
|
||||
filenames, line numbers and code for that of the originating source
|
||||
template, as applicable.
|
||||
|
||||
The template's default ``encoding_errors`` value is
|
||||
``'htmlentityreplace'``. The template has two options. With the
|
||||
``full`` option disabled, only a section of an HTML document is
|
||||
returned. With the ``css`` option disabled, the default stylesheet
|
||||
won't be included.
|
||||
|
||||
"""
|
||||
import mako.template
|
||||
return mako.template.Template(r"""
|
||||
<%!
|
||||
from mako.exceptions import RichTraceback, syntax_highlight,\
|
||||
pygments_html_formatter
|
||||
%>
|
||||
<%page args="full=True, css=True, error=None, traceback=None"/>
|
||||
% if full:
|
||||
<html>
|
||||
<head>
|
||||
<title>Mako Runtime Error</title>
|
||||
% endif
|
||||
% if css:
|
||||
<style>
|
||||
body { font-family:verdana; margin:10px 30px 10px 30px;}
|
||||
.stacktrace { margin:5px 5px 5px 5px; }
|
||||
.highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; }
|
||||
.nonhighlight { padding:0px; background-color:#DFDFDF; }
|
||||
.sample { padding:10px; margin:10px 10px 10px 10px;
|
||||
font-family:monospace; }
|
||||
.sampleline { padding:0px 10px 0px 10px; }
|
||||
.sourceline { margin:5px 5px 10px 5px; font-family:monospace;}
|
||||
.location { font-size:80%; }
|
||||
.highlight { white-space:pre; }
|
||||
.sampleline { white-space:pre; }
|
||||
|
||||
% if pygments_html_formatter:
|
||||
${pygments_html_formatter.get_style_defs()}
|
||||
.linenos { min-width: 2.5em; text-align: right; }
|
||||
pre { margin: 0; }
|
||||
.syntax-highlighted { padding: 0 10px; }
|
||||
.syntax-highlightedtable { border-spacing: 1px; }
|
||||
.nonhighlight { border-top: 1px solid #DFDFDF;
|
||||
border-bottom: 1px solid #DFDFDF; }
|
||||
.stacktrace .nonhighlight { margin: 5px 15px 10px; }
|
||||
.sourceline { margin: 0 0; font-family:monospace; }
|
||||
.code { background-color: #F8F8F8; width: 100%; }
|
||||
.error .code { background-color: #FFBDBD; }
|
||||
.error .syntax-highlighted { background-color: #FFBDBD; }
|
||||
% endif
|
||||
|
||||
</style>
|
||||
% endif
|
||||
% if full:
|
||||
</head>
|
||||
<body>
|
||||
% endif
|
||||
|
||||
<h2>Error !</h2>
|
||||
<%
|
||||
tback = RichTraceback(error=error, traceback=traceback)
|
||||
src = tback.source
|
||||
line = tback.lineno
|
||||
if src:
|
||||
lines = src.split('\n')
|
||||
else:
|
||||
lines = None
|
||||
%>
|
||||
<h3>${tback.errorname}: ${tback.message|h}</h3>
|
||||
|
||||
% if lines:
|
||||
<div class="sample">
|
||||
<div class="nonhighlight">
|
||||
% for index in range(max(0, line-4),min(len(lines), line+5)):
|
||||
<%
|
||||
if pygments_html_formatter:
|
||||
pygments_html_formatter.linenostart = index + 1
|
||||
%>
|
||||
% if index + 1 == line:
|
||||
<%
|
||||
if pygments_html_formatter:
|
||||
old_cssclass = pygments_html_formatter.cssclass
|
||||
pygments_html_formatter.cssclass = 'error ' + old_cssclass
|
||||
%>
|
||||
${lines[index] | syntax_highlight(language='mako')}
|
||||
<%
|
||||
if pygments_html_formatter:
|
||||
pygments_html_formatter.cssclass = old_cssclass
|
||||
%>
|
||||
% else:
|
||||
${lines[index] | syntax_highlight(language='mako')}
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
<div class="stacktrace">
|
||||
% for (filename, lineno, function, line) in tback.reverse_traceback:
|
||||
<div class="location">${filename}, line ${lineno}:</div>
|
||||
<div class="nonhighlight">
|
||||
<%
|
||||
if pygments_html_formatter:
|
||||
pygments_html_formatter.linenostart = lineno
|
||||
%>
|
||||
<div class="sourceline">${line | syntax_highlight(filename)}</div>
|
||||
</div>
|
||||
% endfor
|
||||
</div>
|
||||
|
||||
% if full:
|
||||
</body>
|
||||
</html>
|
||||
% endif
|
||||
""", output_encoding=sys.getdefaultencoding(),
|
||||
encoding_errors='htmlentityreplace')
|
|
@ -0,0 +1,68 @@
|
|||
# ext/autohandler.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""adds autohandler functionality to Mako templates.
|
||||
|
||||
requires that the TemplateLookup class is used with templates.
|
||||
|
||||
usage:
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context)}"/>
|
||||
|
||||
|
||||
or with custom autohandler filename:
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
|
||||
|
||||
"""
|
||||
|
||||
import posixpath
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def autohandler(template, context, name='autohandler'):
|
||||
lookup = context.lookup
|
||||
_template_uri = template.module._template_uri
|
||||
if not lookup.filesystem_checks:
|
||||
try:
|
||||
return lookup._uri_cache[(autohandler, _template_uri, name)]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name]
|
||||
while len(tokens):
|
||||
path = '/' + '/'.join(tokens)
|
||||
if path != _template_uri and _file_exists(lookup, path):
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault(
|
||||
(autohandler, _template_uri, name), path)
|
||||
else:
|
||||
return path
|
||||
if len(tokens) == 1:
|
||||
break
|
||||
tokens[-2:] = [name]
|
||||
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault(
|
||||
(autohandler, _template_uri, name), None)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _file_exists(lookup, path):
|
||||
psub = re.sub(r'^/', '', path)
|
||||
for d in lookup.directories:
|
||||
if os.path.exists(d + '/' + psub):
|
||||
return True
|
||||
else:
|
||||
return False
|
|
@ -0,0 +1,50 @@
|
|||
# ext/babelplugin.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
|
||||
from babel.messages.extract import extract_python
|
||||
from mako.ext.extract import MessageExtractor
|
||||
|
||||
|
||||
class BabelMakoExtractor(MessageExtractor):
|
||||
|
||||
def __init__(self, keywords, comment_tags, options):
|
||||
self.keywords = keywords
|
||||
self.options = options
|
||||
self.config = {
|
||||
'comment-tags': u' '.join(comment_tags),
|
||||
'encoding': options.get('input_encoding',
|
||||
options.get('encoding', None)),
|
||||
}
|
||||
super(BabelMakoExtractor, self).__init__()
|
||||
|
||||
def __call__(self, fileobj):
|
||||
return self.process_file(fileobj)
|
||||
|
||||
def process_python(self, code, code_lineno, translator_strings):
|
||||
comment_tags = self.config['comment-tags']
|
||||
for lineno, funcname, messages, python_translator_comments \
|
||||
in extract_python(code,
|
||||
self.keywords, comment_tags, self.options):
|
||||
yield (code_lineno + (lineno - 1), funcname, messages,
|
||||
translator_strings + python_translator_comments)
|
||||
|
||||
|
||||
def extract(fileobj, keywords, comment_tags, options):
|
||||
"""Extract messages from Mako templates.
|
||||
|
||||
:param fileobj: the file-like object the messages should be extracted from
|
||||
:param keywords: a list of keywords (i.e. function names) that should be
|
||||
recognized as translation functions
|
||||
:param comment_tags: a list of translator tags to search for and include
|
||||
in the results
|
||||
:param options: a dictionary of additional options (optional)
|
||||
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
|
||||
:rtype: ``iterator``
|
||||
"""
|
||||
extractor = BabelMakoExtractor(keywords, comment_tags, options)
|
||||
for message in extractor(fileobj):
|
||||
yield message
|
|
@ -0,0 +1,76 @@
|
|||
"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
|
||||
|
||||
from mako import exceptions
|
||||
|
||||
from mako.cache import CacheImpl
|
||||
|
||||
try:
|
||||
from beaker import cache as beaker_cache
|
||||
except:
|
||||
has_beaker = False
|
||||
else:
|
||||
has_beaker = True
|
||||
|
||||
_beaker_cache = None
|
||||
|
||||
|
||||
class BeakerCacheImpl(CacheImpl):
|
||||
|
||||
"""A :class:`.CacheImpl` provided for the Beaker caching system.
|
||||
|
||||
This plugin is used by default, based on the default
|
||||
value of ``'beaker'`` for the ``cache_impl`` parameter of the
|
||||
:class:`.Template` or :class:`.TemplateLookup` classes.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, cache):
|
||||
if not has_beaker:
|
||||
raise exceptions.RuntimeException(
|
||||
"Can't initialize Beaker plugin; Beaker is not installed.")
|
||||
global _beaker_cache
|
||||
if _beaker_cache is None:
|
||||
if 'manager' in cache.template.cache_args:
|
||||
_beaker_cache = cache.template.cache_args['manager']
|
||||
else:
|
||||
_beaker_cache = beaker_cache.CacheManager()
|
||||
super(BeakerCacheImpl, self).__init__(cache)
|
||||
|
||||
def _get_cache(self, **kw):
|
||||
expiretime = kw.pop('timeout', None)
|
||||
if 'dir' in kw:
|
||||
kw['data_dir'] = kw.pop('dir')
|
||||
elif self.cache.template.module_directory:
|
||||
kw['data_dir'] = self.cache.template.module_directory
|
||||
|
||||
if 'manager' in kw:
|
||||
kw.pop('manager')
|
||||
|
||||
if kw.get('type') == 'memcached':
|
||||
kw['type'] = 'ext:memcached'
|
||||
|
||||
if 'region' in kw:
|
||||
region = kw.pop('region')
|
||||
cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
|
||||
else:
|
||||
cache = _beaker_cache.get_cache(self.cache.id, **kw)
|
||||
cache_args = {'starttime': self.cache.starttime}
|
||||
if expiretime:
|
||||
cache_args['expiretime'] = expiretime
|
||||
return cache, cache_args
|
||||
|
||||
def get_or_create(self, key, creation_function, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
return cache.get(key, createfunc=creation_function, **kw)
|
||||
|
||||
def put(self, key, value, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
cache.put(key, value, **kw)
|
||||
|
||||
def get(self, key, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
return cache.get(key, **kw)
|
||||
|
||||
def invalidate(self, key, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
cache.remove_value(key, **kw)
|
|
@ -0,0 +1,108 @@
|
|||
import re
|
||||
from mako import compat
|
||||
from mako import lexer
|
||||
from mako import parsetree
|
||||
|
||||
|
||||
class MessageExtractor(object):
|
||||
|
||||
def process_file(self, fileobj):
|
||||
template_node = lexer.Lexer(
|
||||
fileobj.read(),
|
||||
input_encoding=self.config['encoding']).parse()
|
||||
for extracted in self.extract_nodes(template_node.get_children()):
|
||||
yield extracted
|
||||
|
||||
def extract_nodes(self, nodes):
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
input_encoding = self.config['encoding'] or 'ascii'
|
||||
comment_tags = list(
|
||||
filter(None, re.split(r'\s+', self.config['comment-tags'])))
|
||||
|
||||
for node in nodes:
|
||||
child_nodes = None
|
||||
if in_translator_comments and \
|
||||
isinstance(node, parsetree.Text) and \
|
||||
not node.content.strip():
|
||||
# Ignore whitespace within translator comments
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.Comment):
|
||||
value = node.text.strip()
|
||||
if in_translator_comments:
|
||||
translator_comments.extend(
|
||||
self._split_comment(node.lineno, value))
|
||||
continue
|
||||
for comment_tag in comment_tags:
|
||||
if value.startswith(comment_tag):
|
||||
in_translator_comments = True
|
||||
translator_comments.extend(
|
||||
self._split_comment(node.lineno, value))
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.DefTag):
|
||||
code = node.function_decl.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.BlockTag):
|
||||
code = node.body_decl.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.CallTag):
|
||||
code = node.code.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.PageTag):
|
||||
code = node.body_decl.code
|
||||
elif isinstance(node, parsetree.CallNamespaceTag):
|
||||
code = node.expression
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.ControlLine):
|
||||
if node.isend:
|
||||
in_translator_comments = False
|
||||
continue
|
||||
code = node.text
|
||||
elif isinstance(node, parsetree.Code):
|
||||
in_translator_comments = False
|
||||
code = node.code.code
|
||||
elif isinstance(node, parsetree.Expression):
|
||||
code = node.code.code
|
||||
else:
|
||||
continue
|
||||
|
||||
# Comments don't apply unless they immediately preceed the message
|
||||
if translator_comments and \
|
||||
translator_comments[-1][0] < node.lineno - 1:
|
||||
translator_comments = []
|
||||
|
||||
translator_strings = [
|
||||
comment[1] for comment in translator_comments]
|
||||
|
||||
if isinstance(code, compat.text_type):
|
||||
code = code.encode(input_encoding, 'backslashreplace')
|
||||
|
||||
used_translator_comments = False
|
||||
# We add extra newline to work around a pybabel bug
|
||||
# (see python-babel/babel#274, parse_encoding dies if the first
|
||||
# input string of the input is non-ascii)
|
||||
# Also, because we added it, we have to subtract one from
|
||||
# node.lineno
|
||||
code = compat.byte_buffer(compat.b('\n') + code)
|
||||
|
||||
for message in self.process_python(
|
||||
code, node.lineno - 1, translator_strings):
|
||||
yield message
|
||||
used_translator_comments = True
|
||||
|
||||
if used_translator_comments:
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
|
||||
if child_nodes:
|
||||
for extracted in self.extract_nodes(child_nodes):
|
||||
yield extracted
|
||||
|
||||
@staticmethod
|
||||
def _split_comment(lineno, comment):
|
||||
"""Return the multiline comment at lineno split into a list of
|
||||
comment line numbers and the accompanying comment line"""
|
||||
return [(lineno + index, line) for index, line in
|
||||
enumerate(comment.splitlines())]
|
|
@ -0,0 +1,43 @@
|
|||
import io
|
||||
from lingua.extractors import Extractor
|
||||
from lingua.extractors import Message
|
||||
from lingua.extractors import get_extractor
|
||||
from mako.ext.extract import MessageExtractor
|
||||
from mako import compat
|
||||
|
||||
|
||||
class LinguaMakoExtractor(Extractor, MessageExtractor):
|
||||
|
||||
'''Mako templates'''
|
||||
extensions = ['.mako']
|
||||
default_config = {
|
||||
'encoding': 'utf-8',
|
||||
'comment-tags': '',
|
||||
}
|
||||
|
||||
def __call__(self, filename, options, fileobj=None):
|
||||
self.options = options
|
||||
self.filename = filename
|
||||
self.python_extractor = get_extractor('x.py')
|
||||
if fileobj is None:
|
||||
fileobj = open(filename, 'rb')
|
||||
return self.process_file(fileobj)
|
||||
|
||||
def process_python(self, code, code_lineno, translator_strings):
|
||||
source = code.getvalue().strip()
|
||||
if source.endswith(compat.b(':')):
|
||||
if source in (compat.b('try:'), compat.b('else:')) or source.startswith(compat.b('except')):
|
||||
source = compat.b('') # Ignore try/except and else
|
||||
elif source.startswith(compat.b('elif')):
|
||||
source = source[2:] # Replace "elif" with "if"
|
||||
source += compat.b('pass')
|
||||
code = io.BytesIO(source)
|
||||
for msg in self.python_extractor(
|
||||
self.filename, self.options, code, code_lineno -1):
|
||||
if translator_strings:
|
||||
msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural,
|
||||
msg.flags,
|
||||
compat.u(' ').join(
|
||||
translator_strings + [msg.comment]),
|
||||
msg.tcomment, msg.location)
|
||||
yield msg
|
|
@ -0,0 +1,20 @@
|
|||
# ext/preprocessors.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""preprocessing functions, used with the 'preprocessor'
|
||||
argument on Template, TemplateLookup"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def convert_comments(text):
|
||||
"""preprocess old style comments.
|
||||
|
||||
example:
|
||||
|
||||
from mako.ext.preprocessors import convert_comments
|
||||
t = Template(..., preprocessor=convert_comments)"""
|
||||
return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
|
|
@ -0,0 +1,127 @@
|
|||
# ext/pygmentplugin.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from pygments.lexers.web import \
|
||||
HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
|
||||
from pygments.lexers.agile import PythonLexer, Python3Lexer
|
||||
from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \
|
||||
include, using
|
||||
from pygments.token import \
|
||||
Text, Comment, Operator, Keyword, Name, String, Other
|
||||
from pygments.formatters.html import HtmlFormatter
|
||||
from pygments import highlight
|
||||
from mako import compat
|
||||
|
||||
|
||||
class MakoLexer(RegexLexer):
|
||||
name = 'Mako'
|
||||
aliases = ['mako']
|
||||
filenames = ['*.mao']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Keyword, Other)),
|
||||
(r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
|
||||
(r'(\s*)(##[^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Other)),
|
||||
(r'''(?s)<%doc>.*?</%doc>''', Comment.Preproc),
|
||||
(r'(<%)([\w\.\:]+)',
|
||||
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
|
||||
(r'(</%)([\w\.\:]+)(>)',
|
||||
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
|
||||
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
|
||||
(r'(<%(?:!?))(.*?)(%>)(?s)',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'(\$\{)(.*?)(\})',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'''(?sx)
|
||||
(.+?) # anything, followed by:
|
||||
(?:
|
||||
(?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
|
||||
(?=\#\*) | # multiline comment
|
||||
(?=</?%) | # a python block
|
||||
# call start or end
|
||||
(?=\$\{) | # a substitution
|
||||
(?<=\n)(?=\s*%) |
|
||||
# - don't consume
|
||||
(\\\n) | # an escaped newline
|
||||
\Z # end of string
|
||||
)
|
||||
''', bygroups(Other, Operator)),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'ondeftags': [
|
||||
(r'<%', Comment.Preproc),
|
||||
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
|
||||
include('tag'),
|
||||
],
|
||||
'tag': [
|
||||
(r'((?:\w+)\s*=)\s*(".*?")',
|
||||
bygroups(Name.Attribute, String)),
|
||||
(r'/?\s*>', Comment.Preproc, '#pop'),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'attr': [
|
||||
('".*?"', String, '#pop'),
|
||||
("'.*?'", String, '#pop'),
|
||||
(r'[^\s>]+', String, '#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class MakoHtmlLexer(DelegatingLexer):
|
||||
name = 'HTML+Mako'
|
||||
aliases = ['html+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
|
||||
class MakoXmlLexer(DelegatingLexer):
|
||||
name = 'XML+Mako'
|
||||
aliases = ['xml+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
|
||||
class MakoJavascriptLexer(DelegatingLexer):
|
||||
name = 'JavaScript+Mako'
|
||||
aliases = ['js+mako', 'javascript+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
|
||||
MakoLexer, **options)
|
||||
|
||||
|
||||
class MakoCssLexer(DelegatingLexer):
|
||||
name = 'CSS+Mako'
|
||||
aliases = ['css+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
|
||||
pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted',
|
||||
linenos=True)
|
||||
|
||||
|
||||
def syntax_highlight(filename='', language=None):
|
||||
mako_lexer = MakoLexer()
|
||||
if compat.py3k:
|
||||
python_lexer = Python3Lexer()
|
||||
else:
|
||||
python_lexer = PythonLexer()
|
||||
if filename.startswith('memory:') or language == 'mako':
|
||||
return lambda string: highlight(string, mako_lexer,
|
||||
pygments_html_formatter)
|
||||
return lambda string: highlight(string, python_lexer,
|
||||
pygments_html_formatter)
|
|
@ -0,0 +1,58 @@
|
|||
# ext/turbogears.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from mako import compat
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako.template import Template
|
||||
|
||||
|
||||
class TGPlugin(object):
|
||||
|
||||
"""TurboGears compatible Template Plugin."""
|
||||
|
||||
def __init__(self, extra_vars_func=None, options=None, extension='mak'):
|
||||
self.extra_vars_func = extra_vars_func
|
||||
self.extension = extension
|
||||
if not options:
|
||||
options = {}
|
||||
|
||||
# Pull the options out and initialize the lookup
|
||||
lookup_options = {}
|
||||
for k, v in options.items():
|
||||
if k.startswith('mako.'):
|
||||
lookup_options[k[5:]] = v
|
||||
elif k in ['directories', 'filesystem_checks', 'module_directory']:
|
||||
lookup_options[k] = v
|
||||
self.lookup = TemplateLookup(**lookup_options)
|
||||
|
||||
self.tmpl_options = {}
|
||||
# transfer lookup args to template args, based on those available
|
||||
# in getargspec
|
||||
for kw in compat.inspect_getargspec(Template.__init__)[0]:
|
||||
if kw in lookup_options:
|
||||
self.tmpl_options[kw] = lookup_options[kw]
|
||||
|
||||
def load_template(self, templatename, template_string=None):
|
||||
"""Loads a template from a file or a string"""
|
||||
if template_string is not None:
|
||||
return Template(template_string, **self.tmpl_options)
|
||||
# Translate TG dot notation to normal / template path
|
||||
if '/' not in templatename:
|
||||
templatename = '/' + templatename.replace('.', '/') + '.' +\
|
||||
self.extension
|
||||
|
||||
# Lookup template
|
||||
return self.lookup.get_template(templatename)
|
||||
|
||||
def render(self, info, format="html", fragment=False, template=None):
|
||||
if isinstance(template, compat.string_types):
|
||||
template = self.load_template(template)
|
||||
|
||||
# Load extra vars func if provided
|
||||
if self.extra_vars_func:
|
||||
info.update(self.extra_vars_func())
|
||||
|
||||
return template.render(**info)
|
|
@ -0,0 +1,209 @@
|
|||
# mako/filters.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
import re
|
||||
import codecs
|
||||
|
||||
from mako.compat import quote_plus, unquote_plus, codepoint2name, \
|
||||
name2codepoint
|
||||
|
||||
from mako import compat
|
||||
|
||||
xml_escapes = {
|
||||
'&': '&',
|
||||
'>': '>',
|
||||
'<': '<',
|
||||
'"': '"', # also " in html-only
|
||||
"'": ''' # also ' in html-only
|
||||
}
|
||||
|
||||
# XXX: " is valid in HTML and XML
|
||||
# ' is not valid HTML, but is valid XML
|
||||
|
||||
|
||||
def legacy_html_escape(s):
|
||||
"""legacy HTML escape for non-unicode mode."""
|
||||
s = s.replace("&", "&")
|
||||
s = s.replace(">", ">")
|
||||
s = s.replace("<", "<")
|
||||
s = s.replace('"', """)
|
||||
s = s.replace("'", "'")
|
||||
return s
|
||||
|
||||
|
||||
try:
|
||||
import markupsafe
|
||||
html_escape = markupsafe.escape
|
||||
except ImportError:
|
||||
html_escape = legacy_html_escape
|
||||
|
||||
|
||||
def xml_escape(string):
|
||||
return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
|
||||
|
||||
|
||||
def url_escape(string):
|
||||
# convert into a list of octets
|
||||
string = string.encode("utf8")
|
||||
return quote_plus(string)
|
||||
|
||||
|
||||
def legacy_url_escape(string):
|
||||
# convert into a list of octets
|
||||
return quote_plus(string)
|
||||
|
||||
|
||||
def url_unescape(string):
|
||||
text = unquote_plus(string)
|
||||
if not is_ascii_str(text):
|
||||
text = text.decode("utf8")
|
||||
return text
|
||||
|
||||
|
||||
def trim(string):
|
||||
return string.strip()
|
||||
|
||||
|
||||
class Decode(object):
|
||||
|
||||
def __getattr__(self, key):
|
||||
def decode(x):
|
||||
if isinstance(x, compat.text_type):
|
||||
return x
|
||||
elif not isinstance(x, compat.binary_type):
|
||||
return decode(str(x))
|
||||
else:
|
||||
return compat.text_type(x, encoding=key)
|
||||
return decode
|
||||
decode = Decode()
|
||||
|
||||
|
||||
_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z')
|
||||
|
||||
|
||||
def is_ascii_str(text):
|
||||
return isinstance(text, str) and _ASCII_re.match(text)
|
||||
|
||||
################################################################
|
||||
|
||||
|
||||
class XMLEntityEscaper(object):
|
||||
|
||||
def __init__(self, codepoint2name, name2codepoint):
|
||||
self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n))
|
||||
for c, n in codepoint2name.items()])
|
||||
self.name2codepoint = name2codepoint
|
||||
|
||||
def escape_entities(self, text):
|
||||
"""Replace characters with their character entity references.
|
||||
|
||||
Only characters corresponding to a named entity are replaced.
|
||||
"""
|
||||
return compat.text_type(text).translate(self.codepoint2entity)
|
||||
|
||||
def __escape(self, m):
|
||||
codepoint = ord(m.group())
|
||||
try:
|
||||
return self.codepoint2entity[codepoint]
|
||||
except (KeyError, IndexError):
|
||||
return '&#x%X;' % codepoint
|
||||
|
||||
__escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
|
||||
|
||||
def escape(self, text):
|
||||
"""Replace characters with their character references.
|
||||
|
||||
Replace characters by their named entity references.
|
||||
Non-ASCII characters, if they do not have a named entity reference,
|
||||
are replaced by numerical character references.
|
||||
|
||||
The return value is guaranteed to be ASCII.
|
||||
"""
|
||||
return self.__escapable.sub(self.__escape, compat.text_type(text)
|
||||
).encode('ascii')
|
||||
|
||||
# XXX: This regexp will not match all valid XML entity names__.
|
||||
# (It punts on details involving involving CombiningChars and Extenders.)
|
||||
#
|
||||
# .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
|
||||
__characterrefs = re.compile(r'''& (?:
|
||||
\#(\d+)
|
||||
| \#x([\da-f]+)
|
||||
| ( (?!\d) [:\w] [-.:\w]+ )
|
||||
) ;''',
|
||||
re.X | re.UNICODE)
|
||||
|
||||
def __unescape(self, m):
|
||||
dval, hval, name = m.groups()
|
||||
if dval:
|
||||
codepoint = int(dval)
|
||||
elif hval:
|
||||
codepoint = int(hval, 16)
|
||||
else:
|
||||
codepoint = self.name2codepoint.get(name, 0xfffd)
|
||||
# U+FFFD = "REPLACEMENT CHARACTER"
|
||||
if codepoint < 128:
|
||||
return chr(codepoint)
|
||||
return chr(codepoint)
|
||||
|
||||
def unescape(self, text):
|
||||
"""Unescape character references.
|
||||
|
||||
All character references (both entity references and numerical
|
||||
character references) are unescaped.
|
||||
"""
|
||||
return self.__characterrefs.sub(self.__unescape, text)
|
||||
|
||||
|
||||
_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
|
||||
|
||||
html_entities_escape = _html_entities_escaper.escape_entities
|
||||
html_entities_unescape = _html_entities_escaper.unescape
|
||||
|
||||
|
||||
def htmlentityreplace_errors(ex):
|
||||
"""An encoding error handler.
|
||||
|
||||
This python `codecs`_ error handler replaces unencodable
|
||||
characters with HTML entities, or, if no HTML entity exists for
|
||||
the character, XML character references.
|
||||
|
||||
>>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
|
||||
'The cost was €12.'
|
||||
"""
|
||||
if isinstance(ex, UnicodeEncodeError):
|
||||
# Handle encoding errors
|
||||
bad_text = ex.object[ex.start:ex.end]
|
||||
text = _html_entities_escaper.escape(bad_text)
|
||||
return (compat.text_type(text), ex.end)
|
||||
raise ex
|
||||
|
||||
codecs.register_error('htmlentityreplace', htmlentityreplace_errors)
|
||||
|
||||
|
||||
# TODO: options to make this dynamic per-compilation will be added in a later
|
||||
# release
|
||||
DEFAULT_ESCAPES = {
|
||||
'x': 'filters.xml_escape',
|
||||
'h': 'filters.html_escape',
|
||||
'u': 'filters.url_escape',
|
||||
'trim': 'filters.trim',
|
||||
'entity': 'filters.html_entities_escape',
|
||||
'unicode': 'unicode',
|
||||
'decode': 'decode',
|
||||
'str': 'str',
|
||||
'n': 'n'
|
||||
}
|
||||
|
||||
if compat.py3k:
|
||||
DEFAULT_ESCAPES.update({
|
||||
'unicode': 'str'
|
||||
})
|
||||
|
||||
NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
|
||||
NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape'
|
||||
NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape'
|
|
@ -0,0 +1,443 @@
|
|||
# mako/lexer.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""provides the Lexer class for parsing template strings into parse trees."""
|
||||
|
||||
import re
|
||||
import codecs
|
||||
from mako import parsetree, exceptions, compat
|
||||
from mako.pygen import adjust_whitespace
|
||||
|
||||
_regexp_cache = {}
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
|
||||
def __init__(self, text, filename=None,
|
||||
disable_unicode=False,
|
||||
input_encoding=None, preprocessor=None):
|
||||
self.text = text
|
||||
self.filename = filename
|
||||
self.template = parsetree.TemplateNode(self.filename)
|
||||
self.matched_lineno = 1
|
||||
self.matched_charpos = 0
|
||||
self.lineno = 1
|
||||
self.match_position = 0
|
||||
self.tag = []
|
||||
self.control_line = []
|
||||
self.ternary_stack = []
|
||||
self.disable_unicode = disable_unicode
|
||||
self.encoding = input_encoding
|
||||
|
||||
if compat.py3k and disable_unicode:
|
||||
raise exceptions.UnsupportedError(
|
||||
"Mako for Python 3 does not "
|
||||
"support disabling Unicode")
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = []
|
||||
elif not hasattr(preprocessor, '__iter__'):
|
||||
self.preprocessor = [preprocessor]
|
||||
else:
|
||||
self.preprocessor = preprocessor
|
||||
|
||||
@property
|
||||
def exception_kwargs(self):
|
||||
return {'source': self.text,
|
||||
'lineno': self.matched_lineno,
|
||||
'pos': self.matched_charpos,
|
||||
'filename': self.filename}
|
||||
|
||||
def match(self, regexp, flags=None):
|
||||
"""compile the given regexp, cache the reg, and call match_reg()."""
|
||||
|
||||
try:
|
||||
reg = _regexp_cache[(regexp, flags)]
|
||||
except KeyError:
|
||||
if flags:
|
||||
reg = re.compile(regexp, flags)
|
||||
else:
|
||||
reg = re.compile(regexp)
|
||||
_regexp_cache[(regexp, flags)] = reg
|
||||
|
||||
return self.match_reg(reg)
|
||||
|
||||
def match_reg(self, reg):
|
||||
"""match the given regular expression object to the current text
|
||||
position.
|
||||
|
||||
if a match occurs, update the current text and line position.
|
||||
|
||||
"""
|
||||
|
||||
mp = self.match_position
|
||||
|
||||
match = reg.match(self.text, self.match_position)
|
||||
if match:
|
||||
(start, end) = match.span()
|
||||
if end == start:
|
||||
self.match_position = end + 1
|
||||
else:
|
||||
self.match_position = end
|
||||
self.matched_lineno = self.lineno
|
||||
lines = re.findall(r"\n", self.text[mp:self.match_position])
|
||||
cp = mp - 1
|
||||
while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'):
|
||||
cp -= 1
|
||||
self.matched_charpos = mp - cp
|
||||
self.lineno += len(lines)
|
||||
# print "MATCHED:", match.group(0), "LINE START:",
|
||||
# self.matched_lineno, "LINE END:", self.lineno
|
||||
# print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
|
||||
# (match and "TRUE" or "FALSE")
|
||||
return match
|
||||
|
||||
def parse_until_text(self, *text):
|
||||
startpos = self.match_position
|
||||
text_re = r'|'.join(text)
|
||||
brace_level = 0
|
||||
while True:
|
||||
match = self.match(r'#.*\n')
|
||||
if match:
|
||||
continue
|
||||
match = self.match(r'(\"\"\"|\'\'\'|\"|\')((?<!\\)\\\1|.)*?\1',
|
||||
re.S)
|
||||
if match:
|
||||
continue
|
||||
match = self.match(r'(%s)' % text_re)
|
||||
if match:
|
||||
if match.group(1) == '}' and brace_level > 0:
|
||||
brace_level -= 1
|
||||
continue
|
||||
return \
|
||||
self.text[startpos:
|
||||
self.match_position - len(match.group(1))],\
|
||||
match.group(1)
|
||||
match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S)
|
||||
if match:
|
||||
brace_level += match.group(1).count('{')
|
||||
brace_level -= match.group(1).count('}')
|
||||
continue
|
||||
raise exceptions.SyntaxException(
|
||||
"Expected: %s" %
|
||||
','.join(text),
|
||||
**self.exception_kwargs)
|
||||
|
||||
def append_node(self, nodecls, *args, **kwargs):
|
||||
kwargs.setdefault('source', self.text)
|
||||
kwargs.setdefault('lineno', self.matched_lineno)
|
||||
kwargs.setdefault('pos', self.matched_charpos)
|
||||
kwargs['filename'] = self.filename
|
||||
node = nodecls(*args, **kwargs)
|
||||
if len(self.tag):
|
||||
self.tag[-1].nodes.append(node)
|
||||
else:
|
||||
self.template.nodes.append(node)
|
||||
# build a set of child nodes for the control line
|
||||
# (used for loop variable detection)
|
||||
# also build a set of child nodes on ternary control lines
|
||||
# (used for determining if a pass needs to be auto-inserted
|
||||
if self.control_line:
|
||||
control_frame = self.control_line[-1]
|
||||
control_frame.nodes.append(node)
|
||||
if not (isinstance(node, parsetree.ControlLine) and
|
||||
control_frame.is_ternary(node.keyword)):
|
||||
if self.ternary_stack and self.ternary_stack[-1]:
|
||||
self.ternary_stack[-1][-1].nodes.append(node)
|
||||
if isinstance(node, parsetree.Tag):
|
||||
if len(self.tag):
|
||||
node.parent = self.tag[-1]
|
||||
self.tag.append(node)
|
||||
elif isinstance(node, parsetree.ControlLine):
|
||||
if node.isend:
|
||||
self.control_line.pop()
|
||||
self.ternary_stack.pop()
|
||||
elif node.is_primary:
|
||||
self.control_line.append(node)
|
||||
self.ternary_stack.append([])
|
||||
elif self.control_line and \
|
||||
self.control_line[-1].is_ternary(node.keyword):
|
||||
self.ternary_stack[-1].append(node)
|
||||
elif self.control_line and \
|
||||
not self.control_line[-1].is_ternary(node.keyword):
|
||||
raise exceptions.SyntaxException(
|
||||
"Keyword '%s' not a legal ternary for keyword '%s'" %
|
||||
(node.keyword, self.control_line[-1].keyword),
|
||||
**self.exception_kwargs)
|
||||
|
||||
_coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
|
||||
|
||||
def decode_raw_stream(self, text, decode_raw, known_encoding, filename):
|
||||
"""given string/unicode or bytes/string, determine encoding
|
||||
from magic encoding comment, return body as unicode
|
||||
or raw if decode_raw=False
|
||||
|
||||
"""
|
||||
if isinstance(text, compat.text_type):
|
||||
m = self._coding_re.match(text)
|
||||
encoding = m and m.group(1) or known_encoding or 'ascii'
|
||||
return encoding, text
|
||||
|
||||
if text.startswith(codecs.BOM_UTF8):
|
||||
text = text[len(codecs.BOM_UTF8):]
|
||||
parsed_encoding = 'utf-8'
|
||||
m = self._coding_re.match(text.decode('utf-8', 'ignore'))
|
||||
if m is not None and m.group(1) != 'utf-8':
|
||||
raise exceptions.CompileException(
|
||||
"Found utf-8 BOM in file, with conflicting "
|
||||
"magic encoding comment of '%s'" % m.group(1),
|
||||
text.decode('utf-8', 'ignore'),
|
||||
0, 0, filename)
|
||||
else:
|
||||
m = self._coding_re.match(text.decode('utf-8', 'ignore'))
|
||||
if m:
|
||||
parsed_encoding = m.group(1)
|
||||
else:
|
||||
parsed_encoding = known_encoding or 'ascii'
|
||||
|
||||
if decode_raw:
|
||||
try:
|
||||
text = text.decode(parsed_encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise exceptions.CompileException(
|
||||
"Unicode decode operation of encoding '%s' failed" %
|
||||
parsed_encoding,
|
||||
text.decode('utf-8', 'ignore'),
|
||||
0, 0, filename)
|
||||
|
||||
return parsed_encoding, text
|
||||
|
||||
def parse(self):
|
||||
self.encoding, self.text = self.decode_raw_stream(
|
||||
self.text,
|
||||
not self.disable_unicode,
|
||||
self.encoding,
|
||||
self.filename)
|
||||
|
||||
for preproc in self.preprocessor:
|
||||
self.text = preproc(self.text)
|
||||
|
||||
# push the match marker past the
|
||||
# encoding comment.
|
||||
self.match_reg(self._coding_re)
|
||||
|
||||
self.textlength = len(self.text)
|
||||
|
||||
while (True):
|
||||
if self.match_position > self.textlength:
|
||||
break
|
||||
|
||||
if self.match_end():
|
||||
break
|
||||
if self.match_expression():
|
||||
continue
|
||||
if self.match_control_line():
|
||||
continue
|
||||
if self.match_comment():
|
||||
continue
|
||||
if self.match_tag_start():
|
||||
continue
|
||||
if self.match_tag_end():
|
||||
continue
|
||||
if self.match_python_block():
|
||||
continue
|
||||
if self.match_text():
|
||||
continue
|
||||
|
||||
if self.match_position > self.textlength:
|
||||
break
|
||||
raise exceptions.CompileException("assertion failed")
|
||||
|
||||
if len(self.tag):
|
||||
raise exceptions.SyntaxException("Unclosed tag: <%%%s>" %
|
||||
self.tag[-1].keyword,
|
||||
**self.exception_kwargs)
|
||||
if len(self.control_line):
|
||||
raise exceptions.SyntaxException(
|
||||
"Unterminated control keyword: '%s'" %
|
||||
self.control_line[-1].keyword,
|
||||
self.text,
|
||||
self.control_line[-1].lineno,
|
||||
self.control_line[-1].pos, self.filename)
|
||||
return self.template
|
||||
|
||||
def match_tag_start(self):
|
||||
match = self.match(r'''
|
||||
\<% # opening tag
|
||||
|
||||
([\w\.\:]+) # keyword
|
||||
|
||||
((?:\s+\w+|\s*=\s*|".*?"|'.*?')*) # attrname, = \
|
||||
# sign, string expression
|
||||
|
||||
\s* # more whitespace
|
||||
|
||||
(/)?> # closing
|
||||
|
||||
''',
|
||||
|
||||
re.I | re.S | re.X)
|
||||
|
||||
if match:
|
||||
keyword, attr, isend = match.groups()
|
||||
self.keyword = keyword
|
||||
attributes = {}
|
||||
if attr:
|
||||
for att in re.findall(
|
||||
r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
|
||||
key, val1, val2 = att
|
||||
text = val1 or val2
|
||||
text = text.replace('\r\n', '\n')
|
||||
attributes[key] = text
|
||||
self.append_node(parsetree.Tag, keyword, attributes)
|
||||
if isend:
|
||||
self.tag.pop()
|
||||
else:
|
||||
if keyword == 'text':
|
||||
match = self.match(r'(.*?)(?=\</%text>)', re.S)
|
||||
if not match:
|
||||
raise exceptions.SyntaxException(
|
||||
"Unclosed tag: <%%%s>" %
|
||||
self.tag[-1].keyword,
|
||||
**self.exception_kwargs)
|
||||
self.append_node(parsetree.Text, match.group(1))
|
||||
return self.match_tag_end()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_tag_end(self):
|
||||
match = self.match(r'\</%[\t ]*(.+?)[\t ]*>')
|
||||
if match:
|
||||
if not len(self.tag):
|
||||
raise exceptions.SyntaxException(
|
||||
"Closing tag without opening tag: </%%%s>" %
|
||||
match.group(1),
|
||||
**self.exception_kwargs)
|
||||
elif self.tag[-1].keyword != match.group(1):
|
||||
raise exceptions.SyntaxException(
|
||||
"Closing tag </%%%s> does not match tag: <%%%s>" %
|
||||
(match.group(1), self.tag[-1].keyword),
|
||||
**self.exception_kwargs)
|
||||
self.tag.pop()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_end(self):
|
||||
match = self.match(r'\Z', re.S)
|
||||
if match:
|
||||
string = match.group()
|
||||
if string:
|
||||
return string
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_text(self):
|
||||
match = self.match(r"""
|
||||
(.*?) # anything, followed by:
|
||||
(
|
||||
(?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
|
||||
# comment preceded by a
|
||||
# consumed newline and whitespace
|
||||
|
|
||||
(?=\${) # an expression
|
||||
|
|
||||
(?=</?[%&]) # a substitution or block or call start or end
|
||||
# - don't consume
|
||||
|
|
||||
(\\\r?\n) # an escaped newline - throw away
|
||||
|
|
||||
\Z # end of string
|
||||
)""", re.X | re.S)
|
||||
|
||||
if match:
|
||||
text = match.group(1)
|
||||
if text:
|
||||
self.append_node(parsetree.Text, text)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_python_block(self):
|
||||
match = self.match(r"<%(!)?")
|
||||
if match:
|
||||
line, pos = self.matched_lineno, self.matched_charpos
|
||||
text, end = self.parse_until_text(r'%>')
|
||||
# the trailing newline helps
|
||||
# compiler.parse() not complain about indentation
|
||||
text = adjust_whitespace(text) + "\n"
|
||||
self.append_node(
|
||||
parsetree.Code,
|
||||
text,
|
||||
match.group(1) == '!', lineno=line, pos=pos)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_expression(self):
|
||||
match = self.match(r"\${")
|
||||
if match:
|
||||
line, pos = self.matched_lineno, self.matched_charpos
|
||||
text, end = self.parse_until_text(r'\|', r'}')
|
||||
if end == '|':
|
||||
escapes, end = self.parse_until_text(r'}')
|
||||
else:
|
||||
escapes = ""
|
||||
text = text.replace('\r\n', '\n')
|
||||
self.append_node(
|
||||
parsetree.Expression,
|
||||
text, escapes.strip(),
|
||||
lineno=line, pos=pos)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_control_line(self):
|
||||
match = self.match(
|
||||
r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
|
||||
r"(?:\r?\n|\Z)", re.M)
|
||||
if match:
|
||||
operator = match.group(1)
|
||||
text = match.group(2)
|
||||
if operator == '%':
|
||||
m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
|
||||
if not m2:
|
||||
raise exceptions.SyntaxException(
|
||||
"Invalid control line: '%s'" %
|
||||
text,
|
||||
**self.exception_kwargs)
|
||||
isend, keyword = m2.group(1, 2)
|
||||
isend = (isend is not None)
|
||||
|
||||
if isend:
|
||||
if not len(self.control_line):
|
||||
raise exceptions.SyntaxException(
|
||||
"No starting keyword '%s' for '%s'" %
|
||||
(keyword, text),
|
||||
**self.exception_kwargs)
|
||||
elif self.control_line[-1].keyword != keyword:
|
||||
raise exceptions.SyntaxException(
|
||||
"Keyword '%s' doesn't match keyword '%s'" %
|
||||
(text, self.control_line[-1].keyword),
|
||||
**self.exception_kwargs)
|
||||
self.append_node(parsetree.ControlLine, keyword, isend, text)
|
||||
else:
|
||||
self.append_node(parsetree.Comment, text)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def match_comment(self):
|
||||
"""matches the multiline version of a comment"""
|
||||
match = self.match(r"<%doc>(.*?)</%doc>", re.S)
|
||||
if match:
|
||||
self.append_node(parsetree.Comment, match.group(1))
|
||||
return True
|
||||
else:
|
||||
return False
|
|
@ -0,0 +1,367 @@
|
|||
# mako/lookup.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import os
|
||||
import stat
|
||||
import posixpath
|
||||
import re
|
||||
from mako import exceptions, util
|
||||
from mako.template import Template
|
||||
|
||||
try:
|
||||
import threading
|
||||
except:
|
||||
import dummy_threading as threading
|
||||
|
||||
|
||||
class TemplateCollection(object):
|
||||
|
||||
"""Represent a collection of :class:`.Template` objects,
|
||||
identifiable via URI.
|
||||
|
||||
A :class:`.TemplateCollection` is linked to the usage of
|
||||
all template tags that address other templates, such
|
||||
as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
|
||||
The ``file`` attribute of each of those tags refers
|
||||
to a string URI that is passed to that :class:`.Template`
|
||||
object's :class:`.TemplateCollection` for resolution.
|
||||
|
||||
:class:`.TemplateCollection` is an abstract class,
|
||||
with the usual default implementation being :class:`.TemplateLookup`.
|
||||
|
||||
"""
|
||||
|
||||
def has_template(self, uri):
|
||||
"""Return ``True`` if this :class:`.TemplateLookup` is
|
||||
capable of returning a :class:`.Template` object for the
|
||||
given ``uri``.
|
||||
|
||||
:param uri: String URI of the template to be resolved.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.get_template(uri)
|
||||
return True
|
||||
except exceptions.TemplateLookupException:
|
||||
return False
|
||||
|
||||
def get_template(self, uri, relativeto=None):
|
||||
"""Return a :class:`.Template` object corresponding to the given
|
||||
``uri``.
|
||||
|
||||
The default implementation raises
|
||||
:class:`.NotImplementedError`. Implementations should
|
||||
raise :class:`.TemplateLookupException` if the given ``uri``
|
||||
cannot be resolved.
|
||||
|
||||
:param uri: String URI of the template to be resolved.
|
||||
:param relativeto: if present, the given ``uri`` is assumed to
|
||||
be relative to this URI.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def filename_to_uri(self, uri, filename):
|
||||
"""Convert the given ``filename`` to a URI relative to
|
||||
this :class:`.TemplateCollection`."""
|
||||
|
||||
return uri
|
||||
|
||||
def adjust_uri(self, uri, filename):
|
||||
"""Adjust the given ``uri`` based on the calling ``filename``.
|
||||
|
||||
When this method is called from the runtime, the
|
||||
``filename`` parameter is taken directly to the ``filename``
|
||||
attribute of the calling template. Therefore a custom
|
||||
:class:`.TemplateCollection` subclass can place any string
|
||||
identifier desired in the ``filename`` parameter of the
|
||||
:class:`.Template` objects it constructs and have them come back
|
||||
here.
|
||||
|
||||
"""
|
||||
return uri
|
||||
|
||||
|
||||
class TemplateLookup(TemplateCollection):
|
||||
|
||||
"""Represent a collection of templates that locates template source files
|
||||
from the local filesystem.
|
||||
|
||||
The primary argument is the ``directories`` argument, the list of
|
||||
directories to search:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
lookup = TemplateLookup(["/path/to/templates"])
|
||||
some_template = lookup.get_template("/admin_index.mako")
|
||||
|
||||
The :class:`.TemplateLookup` can also be given :class:`.Template` objects
|
||||
programatically using :meth:`.put_string` or :meth:`.put_template`:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
lookup = TemplateLookup()
|
||||
lookup.put_string("base.html", '''
|
||||
<html><body>${self.next()}</body></html>
|
||||
''')
|
||||
lookup.put_string("hello.html", '''
|
||||
<%include file='base.html'/>
|
||||
|
||||
Hello, world !
|
||||
''')
|
||||
|
||||
|
||||
:param directories: A list of directory names which will be
|
||||
searched for a particular template URI. The URI is appended
|
||||
to each directory and the filesystem checked.
|
||||
|
||||
:param collection_size: Approximate size of the collection used
|
||||
to store templates. If left at its default of ``-1``, the size
|
||||
is unbounded, and a plain Python dictionary is used to
|
||||
relate URI strings to :class:`.Template` instances.
|
||||
Otherwise, a least-recently-used cache object is used which
|
||||
will maintain the size of the collection approximately to
|
||||
the number given.
|
||||
|
||||
:param filesystem_checks: When at its default value of ``True``,
|
||||
each call to :meth:`.TemplateLookup.get_template()` will
|
||||
compare the filesystem last modified time to the time in
|
||||
which an existing :class:`.Template` object was created.
|
||||
This allows the :class:`.TemplateLookup` to regenerate a
|
||||
new :class:`.Template` whenever the original source has
|
||||
been updated. Set this to ``False`` for a very minor
|
||||
performance increase.
|
||||
|
||||
:param modulename_callable: A callable which, when present,
|
||||
is passed the path of the source file as well as the
|
||||
requested URI, and then returns the full path of the
|
||||
generated Python module file. This is used to inject
|
||||
alternate schemes for Python module location. If left at
|
||||
its default of ``None``, the built in system of generation
|
||||
based on ``module_directory`` plus ``uri`` is used.
|
||||
|
||||
All other keyword parameters available for
|
||||
:class:`.Template` are mirrored here. When new
|
||||
:class:`.Template` objects are created, the keywords
|
||||
established with this :class:`.TemplateLookup` are passed on
|
||||
to each new :class:`.Template`.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
directories=None,
|
||||
module_directory=None,
|
||||
filesystem_checks=True,
|
||||
collection_size=-1,
|
||||
format_exceptions=False,
|
||||
error_handler=None,
|
||||
disable_unicode=False,
|
||||
bytestring_passthrough=False,
|
||||
output_encoding=None,
|
||||
encoding_errors='strict',
|
||||
|
||||
cache_args=None,
|
||||
cache_impl='beaker',
|
||||
cache_enabled=True,
|
||||
cache_type=None,
|
||||
cache_dir=None,
|
||||
cache_url=None,
|
||||
|
||||
modulename_callable=None,
|
||||
module_writer=None,
|
||||
default_filters=None,
|
||||
buffer_filters=(),
|
||||
strict_undefined=False,
|
||||
imports=None,
|
||||
future_imports=None,
|
||||
enable_loop=True,
|
||||
input_encoding=None,
|
||||
preprocessor=None,
|
||||
lexer_cls=None):
|
||||
|
||||
self.directories = [posixpath.normpath(d) for d in
|
||||
util.to_list(directories, ())
|
||||
]
|
||||
self.module_directory = module_directory
|
||||
self.modulename_callable = modulename_callable
|
||||
self.filesystem_checks = filesystem_checks
|
||||
self.collection_size = collection_size
|
||||
|
||||
if cache_args is None:
|
||||
cache_args = {}
|
||||
# transfer deprecated cache_* args
|
||||
if cache_dir:
|
||||
cache_args.setdefault('dir', cache_dir)
|
||||
if cache_url:
|
||||
cache_args.setdefault('url', cache_url)
|
||||
if cache_type:
|
||||
cache_args.setdefault('type', cache_type)
|
||||
|
||||
self.template_args = {
|
||||
'format_exceptions': format_exceptions,
|
||||
'error_handler': error_handler,
|
||||
'disable_unicode': disable_unicode,
|
||||
'bytestring_passthrough': bytestring_passthrough,
|
||||
'output_encoding': output_encoding,
|
||||
'cache_impl': cache_impl,
|
||||
'encoding_errors': encoding_errors,
|
||||
'input_encoding': input_encoding,
|
||||
'module_directory': module_directory,
|
||||
'module_writer': module_writer,
|
||||
'cache_args': cache_args,
|
||||
'cache_enabled': cache_enabled,
|
||||
'default_filters': default_filters,
|
||||
'buffer_filters': buffer_filters,
|
||||
'strict_undefined': strict_undefined,
|
||||
'imports': imports,
|
||||
'future_imports': future_imports,
|
||||
'enable_loop': enable_loop,
|
||||
'preprocessor': preprocessor,
|
||||
'lexer_cls': lexer_cls
|
||||
}
|
||||
|
||||
if collection_size == -1:
|
||||
self._collection = {}
|
||||
self._uri_cache = {}
|
||||
else:
|
||||
self._collection = util.LRUCache(collection_size)
|
||||
self._uri_cache = util.LRUCache(collection_size)
|
||||
self._mutex = threading.Lock()
|
||||
|
||||
def get_template(self, uri):
|
||||
"""Return a :class:`.Template` object corresponding to the given
|
||||
``uri``.
|
||||
|
||||
.. note:: The ``relativeto`` argument is not supported here at
|
||||
the moment.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
if self.filesystem_checks:
|
||||
return self._check(uri, self._collection[uri])
|
||||
else:
|
||||
return self._collection[uri]
|
||||
except KeyError:
|
||||
u = re.sub(r'^\/+', '', uri)
|
||||
for dir in self.directories:
|
||||
# make sure the path seperators are posix - os.altsep is empty
|
||||
# on POSIX and cannot be used.
|
||||
dir = dir.replace(os.path.sep, posixpath.sep)
|
||||
srcfile = posixpath.normpath(posixpath.join(dir, u))
|
||||
if os.path.isfile(srcfile):
|
||||
return self._load(srcfile, uri)
|
||||
else:
|
||||
raise exceptions.TopLevelLookupException(
|
||||
"Cant locate template for uri %r" % uri)
|
||||
|
||||
def adjust_uri(self, uri, relativeto):
|
||||
"""Adjust the given ``uri`` based on the given relative URI."""
|
||||
|
||||
key = (uri, relativeto)
|
||||
if key in self._uri_cache:
|
||||
return self._uri_cache[key]
|
||||
|
||||
if uri[0] != '/':
|
||||
if relativeto is not None:
|
||||
v = self._uri_cache[key] = posixpath.join(
|
||||
posixpath.dirname(relativeto), uri)
|
||||
else:
|
||||
v = self._uri_cache[key] = '/' + uri
|
||||
else:
|
||||
v = self._uri_cache[key] = uri
|
||||
return v
|
||||
|
||||
def filename_to_uri(self, filename):
|
||||
"""Convert the given ``filename`` to a URI relative to
|
||||
this :class:`.TemplateCollection`."""
|
||||
|
||||
try:
|
||||
return self._uri_cache[filename]
|
||||
except KeyError:
|
||||
value = self._relativeize(filename)
|
||||
self._uri_cache[filename] = value
|
||||
return value
|
||||
|
||||
def _relativeize(self, filename):
|
||||
"""Return the portion of a filename that is 'relative'
|
||||
to the directories in this lookup.
|
||||
|
||||
"""
|
||||
|
||||
filename = posixpath.normpath(filename)
|
||||
for dir in self.directories:
|
||||
if filename[0:len(dir)] == dir:
|
||||
return filename[len(dir):]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _load(self, filename, uri):
|
||||
self._mutex.acquire()
|
||||
try:
|
||||
try:
|
||||
# try returning from collection one
|
||||
# more time in case concurrent thread already loaded
|
||||
return self._collection[uri]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
if self.modulename_callable is not None:
|
||||
module_filename = self.modulename_callable(filename, uri)
|
||||
else:
|
||||
module_filename = None
|
||||
self._collection[uri] = template = Template(
|
||||
uri=uri,
|
||||
filename=posixpath.normpath(filename),
|
||||
lookup=self,
|
||||
module_filename=module_filename,
|
||||
**self.template_args)
|
||||
return template
|
||||
except:
|
||||
# if compilation fails etc, ensure
|
||||
# template is removed from collection,
|
||||
# re-raise
|
||||
self._collection.pop(uri, None)
|
||||
raise
|
||||
finally:
|
||||
self._mutex.release()
|
||||
|
||||
def _check(self, uri, template):
|
||||
if template.filename is None:
|
||||
return template
|
||||
|
||||
try:
|
||||
template_stat = os.stat(template.filename)
|
||||
if template.module._modified_time < \
|
||||
template_stat[stat.ST_MTIME]:
|
||||
self._collection.pop(uri, None)
|
||||
return self._load(template.filename, uri)
|
||||
else:
|
||||
return template
|
||||
except OSError:
|
||||
self._collection.pop(uri, None)
|
||||
raise exceptions.TemplateLookupException(
|
||||
"Cant locate template for uri %r" % uri)
|
||||
|
||||
def put_string(self, uri, text):
|
||||
"""Place a new :class:`.Template` object into this
|
||||
:class:`.TemplateLookup`, based on the given string of
|
||||
``text``.
|
||||
|
||||
"""
|
||||
self._collection[uri] = Template(
|
||||
text,
|
||||
lookup=self,
|
||||
uri=uri,
|
||||
**self.template_args)
|
||||
|
||||
def put_template(self, uri, template):
|
||||
"""Place a new :class:`.Template` object into this
|
||||
:class:`.TemplateLookup`, based on the given
|
||||
:class:`.Template` object.
|
||||
|
||||
"""
|
||||
self._collection[uri] = template
|
|
@ -0,0 +1,616 @@
|
|||
# mako/parsetree.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""defines the parse tree components for Mako templates."""
|
||||
|
||||
from mako import exceptions, ast, util, filters, compat
|
||||
import re
|
||||
|
||||
|
||||
class Node(object):
|
||||
|
||||
"""base class for a Node in the parse tree."""
|
||||
|
||||
def __init__(self, source, lineno, pos, filename):
|
||||
self.source = source
|
||||
self.lineno = lineno
|
||||
self.pos = pos
|
||||
self.filename = filename
|
||||
|
||||
@property
|
||||
def exception_kwargs(self):
|
||||
return {'source': self.source, 'lineno': self.lineno,
|
||||
'pos': self.pos, 'filename': self.filename}
|
||||
|
||||
def get_children(self):
|
||||
return []
|
||||
|
||||
def accept_visitor(self, visitor):
|
||||
def traverse(node):
|
||||
for n in node.get_children():
|
||||
n.accept_visitor(visitor)
|
||||
|
||||
method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
|
||||
method(self)
|
||||
|
||||
|
||||
class TemplateNode(Node):
|
||||
|
||||
"""a 'container' node that stores the overall collection of nodes."""
|
||||
|
||||
def __init__(self, filename):
|
||||
super(TemplateNode, self).__init__('', 0, 0, filename)
|
||||
self.nodes = []
|
||||
self.page_attributes = {}
|
||||
|
||||
def get_children(self):
|
||||
return self.nodes
|
||||
|
||||
def __repr__(self):
|
||||
return "TemplateNode(%s, %r)" % (
|
||||
util.sorted_dict_repr(self.page_attributes),
|
||||
self.nodes)
|
||||
|
||||
|
||||
class ControlLine(Node):
|
||||
|
||||
"""defines a control line, a line-oriented python line or end tag.
|
||||
|
||||
e.g.::
|
||||
|
||||
% if foo:
|
||||
(markup)
|
||||
% endif
|
||||
|
||||
"""
|
||||
|
||||
has_loop_context = False
|
||||
|
||||
def __init__(self, keyword, isend, text, **kwargs):
|
||||
super(ControlLine, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
self.keyword = keyword
|
||||
self.isend = isend
|
||||
self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
|
||||
self.nodes = []
|
||||
if self.isend:
|
||||
self._declared_identifiers = []
|
||||
self._undeclared_identifiers = []
|
||||
else:
|
||||
code = ast.PythonFragment(text, **self.exception_kwargs)
|
||||
self._declared_identifiers = code.declared_identifiers
|
||||
self._undeclared_identifiers = code.undeclared_identifiers
|
||||
|
||||
def get_children(self):
|
||||
return self.nodes
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self._declared_identifiers
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self._undeclared_identifiers
|
||||
|
||||
def is_ternary(self, keyword):
|
||||
"""return true if the given keyword is a ternary keyword
|
||||
for this ControlLine"""
|
||||
|
||||
return keyword in {
|
||||
'if': set(['else', 'elif']),
|
||||
'try': set(['except', 'finally']),
|
||||
'for': set(['else'])
|
||||
}.get(self.keyword, [])
|
||||
|
||||
def __repr__(self):
|
||||
return "ControlLine(%r, %r, %r, %r)" % (
|
||||
self.keyword,
|
||||
self.text,
|
||||
self.isend,
|
||||
(self.lineno, self.pos)
|
||||
)
|
||||
|
||||
|
||||
class Text(Node):
|
||||
|
||||
"""defines plain text in the template."""
|
||||
|
||||
def __init__(self, content, **kwargs):
|
||||
super(Text, self).__init__(**kwargs)
|
||||
self.content = content
|
||||
|
||||
def __repr__(self):
|
||||
return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
|
||||
|
||||
|
||||
class Code(Node):
|
||||
|
||||
"""defines a Python code block, either inline or module level.
|
||||
|
||||
e.g.::
|
||||
|
||||
inline:
|
||||
<%
|
||||
x = 12
|
||||
%>
|
||||
|
||||
module level:
|
||||
<%!
|
||||
import logger
|
||||
%>
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, text, ismodule, **kwargs):
|
||||
super(Code, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
self.ismodule = ismodule
|
||||
self.code = ast.PythonCode(text, **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.code.declared_identifiers
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.code.undeclared_identifiers
|
||||
|
||||
def __repr__(self):
|
||||
return "Code(%r, %r, %r)" % (
|
||||
self.text,
|
||||
self.ismodule,
|
||||
(self.lineno, self.pos)
|
||||
)
|
||||
|
||||
|
||||
class Comment(Node):
|
||||
|
||||
"""defines a comment line.
|
||||
|
||||
# this is a comment
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, text, **kwargs):
|
||||
super(Comment, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
|
||||
def __repr__(self):
|
||||
return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
|
||||
|
||||
|
||||
class Expression(Node):
|
||||
|
||||
"""defines an inline expression.
|
||||
|
||||
${x+y}
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, text, escapes, **kwargs):
|
||||
super(Expression, self).__init__(**kwargs)
|
||||
self.text = text
|
||||
self.escapes = escapes
|
||||
self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
|
||||
self.code = ast.PythonCode(text, **self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
# TODO: make the "filter" shortcut list configurable at parse/gen time
|
||||
return self.code.undeclared_identifiers.union(
|
||||
self.escapes_code.undeclared_identifiers.difference(
|
||||
set(filters.DEFAULT_ESCAPES.keys())
|
||||
)
|
||||
).difference(self.code.declared_identifiers)
|
||||
|
||||
def __repr__(self):
|
||||
return "Expression(%r, %r, %r)" % (
|
||||
self.text,
|
||||
self.escapes_code.args,
|
||||
(self.lineno, self.pos)
|
||||
)
|
||||
|
||||
|
||||
class _TagMeta(type):
|
||||
|
||||
"""metaclass to allow Tag to produce a subclass according to
|
||||
its keyword"""
|
||||
|
||||
_classmap = {}
|
||||
|
||||
def __init__(cls, clsname, bases, dict):
|
||||
if getattr(cls, '__keyword__', None) is not None:
|
||||
cls._classmap[cls.__keyword__] = cls
|
||||
super(_TagMeta, cls).__init__(clsname, bases, dict)
|
||||
|
||||
def __call__(cls, keyword, attributes, **kwargs):
|
||||
if ":" in keyword:
|
||||
ns, defname = keyword.split(':')
|
||||
return type.__call__(CallNamespaceTag, ns, defname,
|
||||
attributes, **kwargs)
|
||||
|
||||
try:
|
||||
cls = _TagMeta._classmap[keyword]
|
||||
except KeyError:
|
||||
raise exceptions.CompileException(
|
||||
"No such tag: '%s'" % keyword,
|
||||
source=kwargs['source'],
|
||||
lineno=kwargs['lineno'],
|
||||
pos=kwargs['pos'],
|
||||
filename=kwargs['filename']
|
||||
)
|
||||
return type.__call__(cls, keyword, attributes, **kwargs)
|
||||
|
||||
|
||||
class Tag(compat.with_metaclass(_TagMeta, Node)):
|
||||
|
||||
"""abstract base class for tags.
|
||||
|
||||
<%sometag/>
|
||||
|
||||
<%someothertag>
|
||||
stuff
|
||||
</%someothertag>
|
||||
|
||||
"""
|
||||
__keyword__ = None
|
||||
|
||||
def __init__(self, keyword, attributes, expressions,
|
||||
nonexpressions, required, **kwargs):
|
||||
"""construct a new Tag instance.
|
||||
|
||||
this constructor not called directly, and is only called
|
||||
by subclasses.
|
||||
|
||||
:param keyword: the tag keyword
|
||||
|
||||
:param attributes: raw dictionary of attribute key/value pairs
|
||||
|
||||
:param expressions: a set of identifiers that are legal attributes,
|
||||
which can also contain embedded expressions
|
||||
|
||||
:param nonexpressions: a set of identifiers that are legal
|
||||
attributes, which cannot contain embedded expressions
|
||||
|
||||
:param \**kwargs:
|
||||
other arguments passed to the Node superclass (lineno, pos)
|
||||
|
||||
"""
|
||||
super(Tag, self).__init__(**kwargs)
|
||||
self.keyword = keyword
|
||||
self.attributes = attributes
|
||||
self._parse_attributes(expressions, nonexpressions)
|
||||
missing = [r for r in required if r not in self.parsed_attributes]
|
||||
if len(missing):
|
||||
raise exceptions.CompileException(
|
||||
"Missing attribute(s): %s" %
|
||||
",".join([repr(m) for m in missing]),
|
||||
**self.exception_kwargs)
|
||||
self.parent = None
|
||||
self.nodes = []
|
||||
|
||||
def is_root(self):
|
||||
return self.parent is None
|
||||
|
||||
def get_children(self):
|
||||
return self.nodes
|
||||
|
||||
def _parse_attributes(self, expressions, nonexpressions):
|
||||
undeclared_identifiers = set()
|
||||
self.parsed_attributes = {}
|
||||
for key in self.attributes:
|
||||
if key in expressions:
|
||||
expr = []
|
||||
for x in re.compile(r'(\${.+?})',
|
||||
re.S).split(self.attributes[key]):
|
||||
m = re.compile(r'^\${(.+?)}$', re.S).match(x)
|
||||
if m:
|
||||
code = ast.PythonCode(m.group(1).rstrip(),
|
||||
**self.exception_kwargs)
|
||||
# we aren't discarding "declared_identifiers" here,
|
||||
# which we do so that list comprehension-declared
|
||||
# variables aren't counted. As yet can't find a
|
||||
# condition that requires it here.
|
||||
undeclared_identifiers = \
|
||||
undeclared_identifiers.union(
|
||||
code.undeclared_identifiers)
|
||||
expr.append('(%s)' % m.group(1))
|
||||
else:
|
||||
if x:
|
||||
expr.append(repr(x))
|
||||
self.parsed_attributes[key] = " + ".join(expr) or repr('')
|
||||
elif key in nonexpressions:
|
||||
if re.search(r'\${.+?}', self.attributes[key]):
|
||||
raise exceptions.CompileException(
|
||||
"Attibute '%s' in tag '%s' does not allow embedded "
|
||||
"expressions" % (key, self.keyword),
|
||||
**self.exception_kwargs)
|
||||
self.parsed_attributes[key] = repr(self.attributes[key])
|
||||
else:
|
||||
raise exceptions.CompileException(
|
||||
"Invalid attribute for tag '%s': '%s'" %
|
||||
(self.keyword, key),
|
||||
**self.exception_kwargs)
|
||||
self.expression_undeclared_identifiers = undeclared_identifiers
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.expression_undeclared_identifiers
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
|
||||
self.keyword,
|
||||
util.sorted_dict_repr(self.attributes),
|
||||
(self.lineno, self.pos),
|
||||
self.nodes
|
||||
)
|
||||
|
||||
|
||||
class IncludeTag(Tag):
|
||||
__keyword__ = 'include'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(IncludeTag, self).__init__(
|
||||
keyword,
|
||||
attributes,
|
||||
('file', 'import', 'args'),
|
||||
(), ('file',), **kwargs)
|
||||
self.page_args = ast.PythonCode(
|
||||
"__DUMMY(%s)" % attributes.get('args', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
identifiers = self.page_args.undeclared_identifiers.\
|
||||
difference(set(["__DUMMY"])).\
|
||||
difference(self.page_args.declared_identifiers)
|
||||
return identifiers.union(super(IncludeTag, self).
|
||||
undeclared_identifiers())
|
||||
|
||||
|
||||
class NamespaceTag(Tag):
|
||||
__keyword__ = 'namespace'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(NamespaceTag, self).__init__(
|
||||
keyword, attributes,
|
||||
('file',),
|
||||
('name', 'inheritable',
|
||||
'import', 'module'),
|
||||
(), **kwargs)
|
||||
|
||||
self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
|
||||
if 'name' not in attributes and 'import' not in attributes:
|
||||
raise exceptions.CompileException(
|
||||
"'name' and/or 'import' attributes are required "
|
||||
"for <%namespace>",
|
||||
**self.exception_kwargs)
|
||||
if 'file' in attributes and 'module' in attributes:
|
||||
raise exceptions.CompileException(
|
||||
"<%namespace> may only have one of 'file' or 'module'",
|
||||
**self.exception_kwargs
|
||||
)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return []
|
||||
|
||||
|
||||
class TextTag(Tag):
|
||||
__keyword__ = 'text'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(TextTag, self).__init__(
|
||||
keyword,
|
||||
attributes, (),
|
||||
('filter'), (), **kwargs)
|
||||
self.filter_args = ast.ArgumentList(
|
||||
attributes.get('filter', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.filter_args.\
|
||||
undeclared_identifiers.\
|
||||
difference(filters.DEFAULT_ESCAPES.keys()).union(
|
||||
self.expression_undeclared_identifiers
|
||||
)
|
||||
|
||||
|
||||
class DefTag(Tag):
|
||||
__keyword__ = 'def'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
expressions = ['buffered', 'cached'] + [
|
||||
c for c in attributes if c.startswith('cache_')]
|
||||
|
||||
super(DefTag, self).__init__(
|
||||
keyword,
|
||||
attributes,
|
||||
expressions,
|
||||
('name', 'filter', 'decorator'),
|
||||
('name',),
|
||||
**kwargs)
|
||||
name = attributes['name']
|
||||
if re.match(r'^[\w_]+$', name):
|
||||
raise exceptions.CompileException(
|
||||
"Missing parenthesis in %def",
|
||||
**self.exception_kwargs)
|
||||
self.function_decl = ast.FunctionDecl("def " + name + ":pass",
|
||||
**self.exception_kwargs)
|
||||
self.name = self.function_decl.funcname
|
||||
self.decorator = attributes.get('decorator', '')
|
||||
self.filter_args = ast.ArgumentList(
|
||||
attributes.get('filter', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
is_anonymous = False
|
||||
is_block = False
|
||||
|
||||
@property
|
||||
def funcname(self):
|
||||
return self.function_decl.funcname
|
||||
|
||||
def get_argument_expressions(self, **kw):
|
||||
return self.function_decl.get_argument_expressions(**kw)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.function_decl.allargnames
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
res = []
|
||||
for c in self.function_decl.defaults:
|
||||
res += list(ast.PythonCode(c, **self.exception_kwargs).
|
||||
undeclared_identifiers)
|
||||
return set(res).union(
|
||||
self.filter_args.
|
||||
undeclared_identifiers.
|
||||
difference(filters.DEFAULT_ESCAPES.keys())
|
||||
).union(
|
||||
self.expression_undeclared_identifiers
|
||||
).difference(
|
||||
self.function_decl.allargnames
|
||||
)
|
||||
|
||||
|
||||
class BlockTag(Tag):
|
||||
__keyword__ = 'block'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
expressions = ['buffered', 'cached', 'args'] + [
|
||||
c for c in attributes if c.startswith('cache_')]
|
||||
|
||||
super(BlockTag, self).__init__(
|
||||
keyword,
|
||||
attributes,
|
||||
expressions,
|
||||
('name', 'filter', 'decorator'),
|
||||
(),
|
||||
**kwargs)
|
||||
name = attributes.get('name')
|
||||
if name and not re.match(r'^[\w_]+$', name):
|
||||
raise exceptions.CompileException(
|
||||
"%block may not specify an argument signature",
|
||||
**self.exception_kwargs)
|
||||
if not name and attributes.get('args', None):
|
||||
raise exceptions.CompileException(
|
||||
"Only named %blocks may specify args",
|
||||
**self.exception_kwargs
|
||||
)
|
||||
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
self.name = name
|
||||
self.decorator = attributes.get('decorator', '')
|
||||
self.filter_args = ast.ArgumentList(
|
||||
attributes.get('filter', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
is_block = True
|
||||
|
||||
@property
|
||||
def is_anonymous(self):
|
||||
return self.name is None
|
||||
|
||||
@property
|
||||
def funcname(self):
|
||||
return self.name or "__M_anon_%d" % (self.lineno, )
|
||||
|
||||
def get_argument_expressions(self, **kw):
|
||||
return self.body_decl.get_argument_expressions(**kw)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.body_decl.allargnames
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return (self.filter_args.
|
||||
undeclared_identifiers.
|
||||
difference(filters.DEFAULT_ESCAPES.keys())
|
||||
).union(self.expression_undeclared_identifiers)
|
||||
|
||||
|
||||
class CallTag(Tag):
|
||||
__keyword__ = 'call'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(CallTag, self).__init__(keyword, attributes,
|
||||
('args'), ('expr',), ('expr',), **kwargs)
|
||||
self.expression = attributes['expr']
|
||||
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
|
||||
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.code.declared_identifiers.union(self.body_decl.allargnames)
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.code.undeclared_identifiers.\
|
||||
difference(self.code.declared_identifiers)
|
||||
|
||||
|
||||
class CallNamespaceTag(Tag):
|
||||
|
||||
def __init__(self, namespace, defname, attributes, **kwargs):
|
||||
super(CallNamespaceTag, self).__init__(
|
||||
namespace + ":" + defname,
|
||||
attributes,
|
||||
tuple(attributes.keys()) + ('args', ),
|
||||
(),
|
||||
(),
|
||||
**kwargs)
|
||||
|
||||
self.expression = "%s.%s(%s)" % (
|
||||
namespace,
|
||||
defname,
|
||||
",".join(["%s=%s" % (k, v) for k, v in
|
||||
self.parsed_attributes.items()
|
||||
if k != 'args'])
|
||||
)
|
||||
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
|
||||
self.body_decl = ast.FunctionArgs(
|
||||
attributes.get('args', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.code.declared_identifiers.union(self.body_decl.allargnames)
|
||||
|
||||
def undeclared_identifiers(self):
|
||||
return self.code.undeclared_identifiers.\
|
||||
difference(self.code.declared_identifiers)
|
||||
|
||||
|
||||
class InheritTag(Tag):
|
||||
__keyword__ = 'inherit'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
super(InheritTag, self).__init__(
|
||||
keyword, attributes,
|
||||
('file',), (), ('file',), **kwargs)
|
||||
|
||||
|
||||
class PageTag(Tag):
|
||||
__keyword__ = 'page'
|
||||
|
||||
def __init__(self, keyword, attributes, **kwargs):
|
||||
expressions = \
|
||||
['cached', 'args', 'expression_filter', 'enable_loop'] + \
|
||||
[c for c in attributes if c.startswith('cache_')]
|
||||
|
||||
super(PageTag, self).__init__(
|
||||
keyword,
|
||||
attributes,
|
||||
expressions,
|
||||
(),
|
||||
(),
|
||||
**kwargs)
|
||||
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
|
||||
**self.exception_kwargs)
|
||||
self.filter_args = ast.ArgumentList(
|
||||
attributes.get('expression_filter', ''),
|
||||
**self.exception_kwargs)
|
||||
|
||||
def declared_identifiers(self):
|
||||
return self.body_decl.allargnames
|
|
@ -0,0 +1,303 @@
|
|||
# mako/pygen.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""utilities for generating and formatting literal Python code."""
|
||||
|
||||
import re
|
||||
from mako import exceptions
|
||||
|
||||
|
||||
class PythonPrinter(object):
|
||||
|
||||
def __init__(self, stream):
|
||||
# indentation counter
|
||||
self.indent = 0
|
||||
|
||||
# a stack storing information about why we incremented
|
||||
# the indentation counter, to help us determine if we
|
||||
# should decrement it
|
||||
self.indent_detail = []
|
||||
|
||||
# the string of whitespace multiplied by the indent
|
||||
# counter to produce a line
|
||||
self.indentstring = " "
|
||||
|
||||
# the stream we are writing to
|
||||
self.stream = stream
|
||||
|
||||
# current line number
|
||||
self.lineno = 1
|
||||
|
||||
# a list of lines that represents a buffered "block" of code,
|
||||
# which can be later printed relative to an indent level
|
||||
self.line_buffer = []
|
||||
|
||||
self.in_indent_lines = False
|
||||
|
||||
self._reset_multi_line_flags()
|
||||
|
||||
# mapping of generated python lines to template
|
||||
# source lines
|
||||
self.source_map = {}
|
||||
|
||||
def _update_lineno(self, num):
|
||||
self.lineno += num
|
||||
|
||||
def start_source(self, lineno):
|
||||
if self.lineno not in self.source_map:
|
||||
self.source_map[self.lineno] = lineno
|
||||
|
||||
def write_blanks(self, num):
|
||||
self.stream.write("\n" * num)
|
||||
self._update_lineno(num)
|
||||
|
||||
def write_indented_block(self, block):
|
||||
"""print a line or lines of python which already contain indentation.
|
||||
|
||||
The indentation of the total block of lines will be adjusted to that of
|
||||
the current indent level."""
|
||||
self.in_indent_lines = False
|
||||
for l in re.split(r'\r?\n', block):
|
||||
self.line_buffer.append(l)
|
||||
self._update_lineno(1)
|
||||
|
||||
def writelines(self, *lines):
|
||||
"""print a series of lines of python."""
|
||||
for line in lines:
|
||||
self.writeline(line)
|
||||
|
||||
def writeline(self, line):
|
||||
"""print a line of python, indenting it according to the current
|
||||
indent level.
|
||||
|
||||
this also adjusts the indentation counter according to the
|
||||
content of the line.
|
||||
|
||||
"""
|
||||
|
||||
if not self.in_indent_lines:
|
||||
self._flush_adjusted_lines()
|
||||
self.in_indent_lines = True
|
||||
|
||||
if (
|
||||
line is None or
|
||||
re.match(r"^\s*#", line) or
|
||||
re.match(r"^\s*$", line)
|
||||
):
|
||||
hastext = False
|
||||
else:
|
||||
hastext = True
|
||||
|
||||
is_comment = line and len(line) and line[0] == '#'
|
||||
|
||||
# see if this line should decrease the indentation level
|
||||
if (
|
||||
not is_comment and
|
||||
(not hastext or self._is_unindentor(line))
|
||||
):
|
||||
|
||||
if self.indent > 0:
|
||||
self.indent -= 1
|
||||
# if the indent_detail stack is empty, the user
|
||||
# probably put extra closures - the resulting
|
||||
# module wont compile.
|
||||
if len(self.indent_detail) == 0:
|
||||
raise exceptions.SyntaxException(
|
||||
"Too many whitespace closures")
|
||||
self.indent_detail.pop()
|
||||
|
||||
if line is None:
|
||||
return
|
||||
|
||||
# write the line
|
||||
self.stream.write(self._indent_line(line) + "\n")
|
||||
self._update_lineno(len(line.split("\n")))
|
||||
|
||||
# see if this line should increase the indentation level.
|
||||
# note that a line can both decrase (before printing) and
|
||||
# then increase (after printing) the indentation level.
|
||||
|
||||
if re.search(r":[ \t]*(?:#.*)?$", line):
|
||||
# increment indentation count, and also
|
||||
# keep track of what the keyword was that indented us,
|
||||
# if it is a python compound statement keyword
|
||||
# where we might have to look for an "unindent" keyword
|
||||
match = re.match(r"^\s*(if|try|elif|while|for|with)", line)
|
||||
if match:
|
||||
# its a "compound" keyword, so we will check for "unindentors"
|
||||
indentor = match.group(1)
|
||||
self.indent += 1
|
||||
self.indent_detail.append(indentor)
|
||||
else:
|
||||
indentor = None
|
||||
# its not a "compound" keyword. but lets also
|
||||
# test for valid Python keywords that might be indenting us,
|
||||
# else assume its a non-indenting line
|
||||
m2 = re.match(r"^\s*(def|class|else|elif|except|finally)",
|
||||
line)
|
||||
if m2:
|
||||
self.indent += 1
|
||||
self.indent_detail.append(indentor)
|
||||
|
||||
def close(self):
|
||||
"""close this printer, flushing any remaining lines."""
|
||||
self._flush_adjusted_lines()
|
||||
|
||||
def _is_unindentor(self, line):
|
||||
"""return true if the given line is an 'unindentor',
|
||||
relative to the last 'indent' event received.
|
||||
|
||||
"""
|
||||
|
||||
# no indentation detail has been pushed on; return False
|
||||
if len(self.indent_detail) == 0:
|
||||
return False
|
||||
|
||||
indentor = self.indent_detail[-1]
|
||||
|
||||
# the last indent keyword we grabbed is not a
|
||||
# compound statement keyword; return False
|
||||
if indentor is None:
|
||||
return False
|
||||
|
||||
# if the current line doesnt have one of the "unindentor" keywords,
|
||||
# return False
|
||||
match = re.match(r"^\s*(else|elif|except|finally).*\:", line)
|
||||
if not match:
|
||||
return False
|
||||
|
||||
# whitespace matches up, we have a compound indentor,
|
||||
# and this line has an unindentor, this
|
||||
# is probably good enough
|
||||
return True
|
||||
|
||||
# should we decide that its not good enough, heres
|
||||
# more stuff to check.
|
||||
# keyword = match.group(1)
|
||||
|
||||
# match the original indent keyword
|
||||
# for crit in [
|
||||
# (r'if|elif', r'else|elif'),
|
||||
# (r'try', r'except|finally|else'),
|
||||
# (r'while|for', r'else'),
|
||||
# ]:
|
||||
# if re.match(crit[0], indentor) and re.match(crit[1], keyword):
|
||||
# return True
|
||||
|
||||
# return False
|
||||
|
||||
def _indent_line(self, line, stripspace=''):
|
||||
"""indent the given line according to the current indent level.
|
||||
|
||||
stripspace is a string of space that will be truncated from the
|
||||
start of the line before indenting."""
|
||||
|
||||
return re.sub(r"^%s" % stripspace, self.indentstring
|
||||
* self.indent, line)
|
||||
|
||||
def _reset_multi_line_flags(self):
|
||||
"""reset the flags which would indicate we are in a backslashed
|
||||
or triple-quoted section."""
|
||||
|
||||
self.backslashed, self.triplequoted = False, False
|
||||
|
||||
def _in_multi_line(self, line):
|
||||
"""return true if the given line is part of a multi-line block,
|
||||
via backslash or triple-quote."""
|
||||
|
||||
# we are only looking for explicitly joined lines here, not
|
||||
# implicit ones (i.e. brackets, braces etc.). this is just to
|
||||
# guard against the possibility of modifying the space inside of
|
||||
# a literal multiline string with unfortunately placed
|
||||
# whitespace
|
||||
|
||||
current_state = (self.backslashed or self.triplequoted)
|
||||
|
||||
if re.search(r"\\$", line):
|
||||
self.backslashed = True
|
||||
else:
|
||||
self.backslashed = False
|
||||
|
||||
triples = len(re.findall(r"\"\"\"|\'\'\'", line))
|
||||
if triples == 1 or triples % 2 != 0:
|
||||
self.triplequoted = not self.triplequoted
|
||||
|
||||
return current_state
|
||||
|
||||
def _flush_adjusted_lines(self):
|
||||
stripspace = None
|
||||
self._reset_multi_line_flags()
|
||||
|
||||
for entry in self.line_buffer:
|
||||
if self._in_multi_line(entry):
|
||||
self.stream.write(entry + "\n")
|
||||
else:
|
||||
entry = entry.expandtabs()
|
||||
if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry):
|
||||
stripspace = re.match(r"^([ \t]*)", entry).group(1)
|
||||
self.stream.write(self._indent_line(entry, stripspace) + "\n")
|
||||
|
||||
self.line_buffer = []
|
||||
self._reset_multi_line_flags()
|
||||
|
||||
|
||||
def adjust_whitespace(text):
|
||||
"""remove the left-whitespace margin of a block of Python code."""
|
||||
|
||||
state = [False, False]
|
||||
(backslashed, triplequoted) = (0, 1)
|
||||
|
||||
def in_multi_line(line):
|
||||
start_state = (state[backslashed] or state[triplequoted])
|
||||
|
||||
if re.search(r"\\$", line):
|
||||
state[backslashed] = True
|
||||
else:
|
||||
state[backslashed] = False
|
||||
|
||||
def match(reg, t):
|
||||
m = re.match(reg, t)
|
||||
if m:
|
||||
return m, t[len(m.group(0)):]
|
||||
else:
|
||||
return None, t
|
||||
|
||||
while line:
|
||||
if state[triplequoted]:
|
||||
m, line = match(r"%s" % state[triplequoted], line)
|
||||
if m:
|
||||
state[triplequoted] = False
|
||||
else:
|
||||
m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
|
||||
else:
|
||||
m, line = match(r'#', line)
|
||||
if m:
|
||||
return start_state
|
||||
|
||||
m, line = match(r"\"\"\"|\'\'\'", line)
|
||||
if m:
|
||||
state[triplequoted] = m.group(0)
|
||||
continue
|
||||
|
||||
m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line)
|
||||
|
||||
return start_state
|
||||
|
||||
def _indent_line(line, stripspace=''):
|
||||
return re.sub(r"^%s" % stripspace, '', line)
|
||||
|
||||
lines = []
|
||||
stripspace = None
|
||||
|
||||
for line in re.split(r'\r?\n', text):
|
||||
if in_multi_line(line):
|
||||
lines.append(line)
|
||||
else:
|
||||
line = line.expandtabs()
|
||||
if stripspace is None and re.search(r"^[ \t]*[^# \t]", line):
|
||||
stripspace = re.match(r"^([ \t]*)", line).group(1)
|
||||
lines.append(_indent_line(line, stripspace))
|
||||
return "\n".join(lines)
|
|
@ -0,0 +1,233 @@
|
|||
# mako/pyparser.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Handles parsing of Python code.
|
||||
|
||||
Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
|
||||
module is used.
|
||||
"""
|
||||
|
||||
from mako import exceptions, util, compat
|
||||
from mako.compat import arg_stringname
|
||||
import operator
|
||||
|
||||
if compat.py3k:
|
||||
# words that cannot be assigned to (notably
|
||||
# smaller than the total keys in __builtins__)
|
||||
reserved = set(['True', 'False', 'None', 'print'])
|
||||
|
||||
# the "id" attribute on a function node
|
||||
arg_id = operator.attrgetter('arg')
|
||||
else:
|
||||
# words that cannot be assigned to (notably
|
||||
# smaller than the total keys in __builtins__)
|
||||
reserved = set(['True', 'False', 'None'])
|
||||
|
||||
# the "id" attribute on a function node
|
||||
arg_id = operator.attrgetter('id')
|
||||
|
||||
import _ast
|
||||
util.restore__ast(_ast)
|
||||
from mako import _ast_util
|
||||
|
||||
|
||||
def parse(code, mode='exec', **exception_kwargs):
|
||||
"""Parse an expression into AST"""
|
||||
|
||||
try:
|
||||
return _ast_util.parse(code, '<unknown>', mode)
|
||||
except Exception:
|
||||
raise exceptions.SyntaxException(
|
||||
"(%s) %s (%r)" % (
|
||||
compat.exception_as().__class__.__name__,
|
||||
compat.exception_as(),
|
||||
code[0:50]
|
||||
), **exception_kwargs)
|
||||
|
||||
|
||||
class FindIdentifiers(_ast_util.NodeVisitor):
|
||||
|
||||
def __init__(self, listener, **exception_kwargs):
|
||||
self.in_function = False
|
||||
self.in_assign_targets = False
|
||||
self.local_ident_stack = set()
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
|
||||
def _add_declared(self, name):
|
||||
if not self.in_function:
|
||||
self.listener.declared_identifiers.add(name)
|
||||
else:
|
||||
self.local_ident_stack.add(name)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
self._add_declared(node.name)
|
||||
|
||||
def visit_Assign(self, node):
|
||||
|
||||
# flip around the visiting of Assign so the expression gets
|
||||
# evaluated first, in the case of a clause like "x=x+5" (x
|
||||
# is undeclared)
|
||||
|
||||
self.visit(node.value)
|
||||
in_a = self.in_assign_targets
|
||||
self.in_assign_targets = True
|
||||
for n in node.targets:
|
||||
self.visit(n)
|
||||
self.in_assign_targets = in_a
|
||||
|
||||
if compat.py3k:
|
||||
|
||||
# ExceptHandler is in Python 2, but this block only works in
|
||||
# Python 3 (and is required there)
|
||||
|
||||
def visit_ExceptHandler(self, node):
|
||||
if node.name is not None:
|
||||
self._add_declared(node.name)
|
||||
if node.type is not None:
|
||||
self.visit(node.type)
|
||||
for statement in node.body:
|
||||
self.visit(statement)
|
||||
|
||||
def visit_Lambda(self, node, *args):
|
||||
self._visit_function(node, True)
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
self._add_declared(node.name)
|
||||
self._visit_function(node, False)
|
||||
|
||||
def _expand_tuples(self, args):
|
||||
for arg in args:
|
||||
if isinstance(arg, _ast.Tuple):
|
||||
for n in arg.elts:
|
||||
yield n
|
||||
else:
|
||||
yield arg
|
||||
|
||||
def _visit_function(self, node, islambda):
|
||||
|
||||
# push function state onto stack. dont log any more
|
||||
# identifiers as "declared" until outside of the function,
|
||||
# but keep logging identifiers as "undeclared". track
|
||||
# argument names in each function header so they arent
|
||||
# counted as "undeclared"
|
||||
|
||||
inf = self.in_function
|
||||
self.in_function = True
|
||||
|
||||
local_ident_stack = self.local_ident_stack
|
||||
self.local_ident_stack = local_ident_stack.union([
|
||||
arg_id(arg) for arg in self._expand_tuples(node.args.args)
|
||||
])
|
||||
if islambda:
|
||||
self.visit(node.body)
|
||||
else:
|
||||
for n in node.body:
|
||||
self.visit(n)
|
||||
self.in_function = inf
|
||||
self.local_ident_stack = local_ident_stack
|
||||
|
||||
def visit_For(self, node):
|
||||
|
||||
# flip around visit
|
||||
|
||||
self.visit(node.iter)
|
||||
self.visit(node.target)
|
||||
for statement in node.body:
|
||||
self.visit(statement)
|
||||
for statement in node.orelse:
|
||||
self.visit(statement)
|
||||
|
||||
def visit_Name(self, node):
|
||||
if isinstance(node.ctx, _ast.Store):
|
||||
# this is eqiuvalent to visit_AssName in
|
||||
# compiler
|
||||
self._add_declared(node.id)
|
||||
elif node.id not in reserved and node.id \
|
||||
not in self.listener.declared_identifiers and node.id \
|
||||
not in self.local_ident_stack:
|
||||
self.listener.undeclared_identifiers.add(node.id)
|
||||
|
||||
def visit_Import(self, node):
|
||||
for name in node.names:
|
||||
if name.asname is not None:
|
||||
self._add_declared(name.asname)
|
||||
else:
|
||||
self._add_declared(name.name.split('.')[0])
|
||||
|
||||
def visit_ImportFrom(self, node):
|
||||
for name in node.names:
|
||||
if name.asname is not None:
|
||||
self._add_declared(name.asname)
|
||||
else:
|
||||
if name.name == '*':
|
||||
raise exceptions.CompileException(
|
||||
"'import *' is not supported, since all identifier "
|
||||
"names must be explicitly declared. Please use the "
|
||||
"form 'from <modulename> import <name1>, <name2>, "
|
||||
"...' instead.", **self.exception_kwargs)
|
||||
self._add_declared(name.name)
|
||||
|
||||
|
||||
class FindTuple(_ast_util.NodeVisitor):
|
||||
|
||||
def __init__(self, listener, code_factory, **exception_kwargs):
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
self.code_factory = code_factory
|
||||
|
||||
def visit_Tuple(self, node):
|
||||
for n in node.elts:
|
||||
p = self.code_factory(n, **self.exception_kwargs)
|
||||
self.listener.codeargs.append(p)
|
||||
self.listener.args.append(ExpressionGenerator(n).value())
|
||||
self.listener.declared_identifiers = \
|
||||
self.listener.declared_identifiers.union(
|
||||
p.declared_identifiers)
|
||||
self.listener.undeclared_identifiers = \
|
||||
self.listener.undeclared_identifiers.union(
|
||||
p.undeclared_identifiers)
|
||||
|
||||
|
||||
class ParseFunc(_ast_util.NodeVisitor):
|
||||
|
||||
def __init__(self, listener, **exception_kwargs):
|
||||
self.listener = listener
|
||||
self.exception_kwargs = exception_kwargs
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
self.listener.funcname = node.name
|
||||
|
||||
argnames = [arg_id(arg) for arg in node.args.args]
|
||||
if node.args.vararg:
|
||||
argnames.append(arg_stringname(node.args.vararg))
|
||||
|
||||
if compat.py2k:
|
||||
# kw-only args don't exist in Python 2
|
||||
kwargnames = []
|
||||
else:
|
||||
kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs]
|
||||
if node.args.kwarg:
|
||||
kwargnames.append(arg_stringname(node.args.kwarg))
|
||||
self.listener.argnames = argnames
|
||||
self.listener.defaults = node.args.defaults # ast
|
||||
self.listener.kwargnames = kwargnames
|
||||
if compat.py2k:
|
||||
self.listener.kwdefaults = []
|
||||
else:
|
||||
self.listener.kwdefaults = node.args.kw_defaults
|
||||
self.listener.varargs = node.args.vararg
|
||||
self.listener.kwargs = node.args.kwarg
|
||||
|
||||
|
||||
class ExpressionGenerator(object):
|
||||
|
||||
def __init__(self, astnode):
|
||||
self.generator = _ast_util.SourceGenerator(' ' * 4)
|
||||
self.generator.visit(astnode)
|
||||
|
||||
def value(self):
|
||||
return ''.join(self.generator.result)
|
|
@ -0,0 +1,909 @@
|
|||
# mako/runtime.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""provides runtime services for templates, including Context,
|
||||
Namespace, and various helper functions."""
|
||||
|
||||
from mako import exceptions, util, compat
|
||||
from mako.compat import compat_builtins
|
||||
import sys
|
||||
|
||||
|
||||
class Context(object):
|
||||
|
||||
"""Provides runtime namespace, output buffer, and various
|
||||
callstacks for templates.
|
||||
|
||||
See :ref:`runtime_toplevel` for detail on the usage of
|
||||
:class:`.Context`.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, buffer, **data):
|
||||
self._buffer_stack = [buffer]
|
||||
|
||||
self._data = data
|
||||
|
||||
self._kwargs = data.copy()
|
||||
self._with_template = None
|
||||
self._outputting_as_unicode = None
|
||||
self.namespaces = {}
|
||||
|
||||
# "capture" function which proxies to the
|
||||
# generic "capture" function
|
||||
self._data['capture'] = compat.partial(capture, self)
|
||||
|
||||
# "caller" stack used by def calls with content
|
||||
self.caller_stack = self._data['caller'] = CallerStack()
|
||||
|
||||
def _set_with_template(self, t):
|
||||
self._with_template = t
|
||||
illegal_names = t.reserved_names.intersection(self._data)
|
||||
if illegal_names:
|
||||
raise exceptions.NameConflictError(
|
||||
"Reserved words passed to render(): %s" %
|
||||
", ".join(illegal_names))
|
||||
|
||||
@property
|
||||
def lookup(self):
|
||||
"""Return the :class:`.TemplateLookup` associated
|
||||
with this :class:`.Context`.
|
||||
|
||||
"""
|
||||
return self._with_template.lookup
|
||||
|
||||
@property
|
||||
def kwargs(self):
|
||||
"""Return the dictionary of top level keyword arguments associated
|
||||
with this :class:`.Context`.
|
||||
|
||||
This dictionary only includes the top-level arguments passed to
|
||||
:meth:`.Template.render`. It does not include names produced within
|
||||
the template execution such as local variable names or special names
|
||||
such as ``self``, ``next``, etc.
|
||||
|
||||
The purpose of this dictionary is primarily for the case that
|
||||
a :class:`.Template` accepts arguments via its ``<%page>`` tag,
|
||||
which are normally expected to be passed via :meth:`.Template.render`,
|
||||
except the template is being called in an inheritance context,
|
||||
using the ``body()`` method. :attr:`.Context.kwargs` can then be
|
||||
used to propagate these arguments to the inheriting template::
|
||||
|
||||
${next.body(**context.kwargs)}
|
||||
|
||||
"""
|
||||
return self._kwargs.copy()
|
||||
|
||||
def push_caller(self, caller):
|
||||
"""Push a ``caller`` callable onto the callstack for
|
||||
this :class:`.Context`."""
|
||||
|
||||
self.caller_stack.append(caller)
|
||||
|
||||
def pop_caller(self):
|
||||
"""Pop a ``caller`` callable onto the callstack for this
|
||||
:class:`.Context`."""
|
||||
|
||||
del self.caller_stack[-1]
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of all names established in this :class:`.Context`."""
|
||||
|
||||
return list(self._data.keys())
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self._data:
|
||||
return self._data[key]
|
||||
else:
|
||||
return compat_builtins.__dict__[key]
|
||||
|
||||
def _push_writer(self):
|
||||
"""push a capturing buffer onto this Context and return
|
||||
the new writer function."""
|
||||
|
||||
buf = util.FastEncodingBuffer()
|
||||
self._buffer_stack.append(buf)
|
||||
return buf.write
|
||||
|
||||
def _pop_buffer_and_writer(self):
|
||||
"""pop the most recent capturing buffer from this Context
|
||||
and return the current writer after the pop.
|
||||
|
||||
"""
|
||||
|
||||
buf = self._buffer_stack.pop()
|
||||
return buf, self._buffer_stack[-1].write
|
||||
|
||||
def _push_buffer(self):
|
||||
"""push a capturing buffer onto this Context."""
|
||||
|
||||
self._push_writer()
|
||||
|
||||
def _pop_buffer(self):
|
||||
"""pop the most recent capturing buffer from this Context."""
|
||||
|
||||
return self._buffer_stack.pop()
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Return a value from this :class:`.Context`."""
|
||||
|
||||
return self._data.get(key, compat_builtins.__dict__.get(key, default))
|
||||
|
||||
def write(self, string):
|
||||
"""Write a string to this :class:`.Context` object's
|
||||
underlying output buffer."""
|
||||
|
||||
self._buffer_stack[-1].write(string)
|
||||
|
||||
def writer(self):
|
||||
"""Return the current writer function."""
|
||||
|
||||
return self._buffer_stack[-1].write
|
||||
|
||||
def _copy(self):
|
||||
c = Context.__new__(Context)
|
||||
c._buffer_stack = self._buffer_stack
|
||||
c._data = self._data.copy()
|
||||
c._kwargs = self._kwargs
|
||||
c._with_template = self._with_template
|
||||
c._outputting_as_unicode = self._outputting_as_unicode
|
||||
c.namespaces = self.namespaces
|
||||
c.caller_stack = self.caller_stack
|
||||
return c
|
||||
|
||||
def _locals(self, d):
|
||||
"""Create a new :class:`.Context` with a copy of this
|
||||
:class:`.Context`'s current state,
|
||||
updated with the given dictionary.
|
||||
|
||||
The :attr:`.Context.kwargs` collection remains
|
||||
unaffected.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
if not d:
|
||||
return self
|
||||
c = self._copy()
|
||||
c._data.update(d)
|
||||
return c
|
||||
|
||||
def _clean_inheritance_tokens(self):
|
||||
"""create a new copy of this :class:`.Context`. with
|
||||
tokens related to inheritance state removed."""
|
||||
|
||||
c = self._copy()
|
||||
x = c._data
|
||||
x.pop('self', None)
|
||||
x.pop('parent', None)
|
||||
x.pop('next', None)
|
||||
return c
|
||||
|
||||
|
||||
class CallerStack(list):
|
||||
|
||||
def __init__(self):
|
||||
self.nextcaller = None
|
||||
|
||||
def __nonzero__(self):
|
||||
return self.__bool__()
|
||||
|
||||
def __bool__(self):
|
||||
return len(self) and self._get_caller() and True or False
|
||||
|
||||
def _get_caller(self):
|
||||
# this method can be removed once
|
||||
# codegen MAGIC_NUMBER moves past 7
|
||||
return self[-1]
|
||||
|
||||
def __getattr__(self, key):
|
||||
return getattr(self._get_caller(), key)
|
||||
|
||||
def _push_frame(self):
|
||||
frame = self.nextcaller or None
|
||||
self.append(frame)
|
||||
self.nextcaller = None
|
||||
return frame
|
||||
|
||||
def _pop_frame(self):
|
||||
self.nextcaller = self.pop()
|
||||
|
||||
|
||||
class Undefined(object):
|
||||
|
||||
"""Represents an undefined value in a template.
|
||||
|
||||
All template modules have a constant value
|
||||
``UNDEFINED`` present which is an instance of this
|
||||
object.
|
||||
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
raise NameError("Undefined")
|
||||
|
||||
def __nonzero__(self):
|
||||
return self.__bool__()
|
||||
|
||||
def __bool__(self):
|
||||
return False
|
||||
|
||||
UNDEFINED = Undefined()
|
||||
STOP_RENDERING = ""
|
||||
|
||||
|
||||
class LoopStack(object):
|
||||
|
||||
"""a stack for LoopContexts that implements the context manager protocol
|
||||
to automatically pop off the top of the stack on context exit
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.stack = []
|
||||
|
||||
def _enter(self, iterable):
|
||||
self._push(iterable)
|
||||
return self._top
|
||||
|
||||
def _exit(self):
|
||||
self._pop()
|
||||
return self._top
|
||||
|
||||
@property
|
||||
def _top(self):
|
||||
if self.stack:
|
||||
return self.stack[-1]
|
||||
else:
|
||||
return self
|
||||
|
||||
def _pop(self):
|
||||
return self.stack.pop()
|
||||
|
||||
def _push(self, iterable):
|
||||
new = LoopContext(iterable)
|
||||
if self.stack:
|
||||
new.parent = self.stack[-1]
|
||||
return self.stack.append(new)
|
||||
|
||||
def __getattr__(self, key):
|
||||
raise exceptions.RuntimeException("No loop context is established")
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._top)
|
||||
|
||||
|
||||
class LoopContext(object):
|
||||
|
||||
"""A magic loop variable.
|
||||
Automatically accessible in any ``% for`` block.
|
||||
|
||||
See the section :ref:`loop_context` for usage
|
||||
notes.
|
||||
|
||||
:attr:`parent` -> :class:`.LoopContext` or ``None``
|
||||
The parent loop, if one exists.
|
||||
:attr:`index` -> `int`
|
||||
The 0-based iteration count.
|
||||
:attr:`reverse_index` -> `int`
|
||||
The number of iterations remaining.
|
||||
:attr:`first` -> `bool`
|
||||
``True`` on the first iteration, ``False`` otherwise.
|
||||
:attr:`last` -> `bool`
|
||||
``True`` on the last iteration, ``False`` otherwise.
|
||||
:attr:`even` -> `bool`
|
||||
``True`` when ``index`` is even.
|
||||
:attr:`odd` -> `bool`
|
||||
``True`` when ``index`` is odd.
|
||||
"""
|
||||
|
||||
def __init__(self, iterable):
|
||||
self._iterable = iterable
|
||||
self.index = 0
|
||||
self.parent = None
|
||||
|
||||
def __iter__(self):
|
||||
for i in self._iterable:
|
||||
yield i
|
||||
self.index += 1
|
||||
|
||||
@util.memoized_instancemethod
|
||||
def __len__(self):
|
||||
return len(self._iterable)
|
||||
|
||||
@property
|
||||
def reverse_index(self):
|
||||
return len(self) - self.index - 1
|
||||
|
||||
@property
|
||||
def first(self):
|
||||
return self.index == 0
|
||||
|
||||
@property
|
||||
def last(self):
|
||||
return self.index == len(self) - 1
|
||||
|
||||
@property
|
||||
def even(self):
|
||||
return not self.odd
|
||||
|
||||
@property
|
||||
def odd(self):
|
||||
return bool(self.index % 2)
|
||||
|
||||
def cycle(self, *values):
|
||||
"""Cycle through values as the loop progresses.
|
||||
"""
|
||||
if not values:
|
||||
raise ValueError("You must provide values to cycle through")
|
||||
return values[self.index % len(values)]
|
||||
|
||||
|
||||
class _NSAttr(object):
|
||||
|
||||
def __init__(self, parent):
|
||||
self.__parent = parent
|
||||
|
||||
def __getattr__(self, key):
|
||||
ns = self.__parent
|
||||
while ns:
|
||||
if hasattr(ns.module, key):
|
||||
return getattr(ns.module, key)
|
||||
else:
|
||||
ns = ns.inherits
|
||||
raise AttributeError(key)
|
||||
|
||||
|
||||
class Namespace(object):
|
||||
|
||||
"""Provides access to collections of rendering methods, which
|
||||
can be local, from other templates, or from imported modules.
|
||||
|
||||
To access a particular rendering method referenced by a
|
||||
:class:`.Namespace`, use plain attribute access:
|
||||
|
||||
.. sourcecode:: mako
|
||||
|
||||
${some_namespace.foo(x, y, z)}
|
||||
|
||||
:class:`.Namespace` also contains several built-in attributes
|
||||
described here.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name, context,
|
||||
callables=None, inherits=None,
|
||||
populate_self=True, calling_uri=None):
|
||||
self.name = name
|
||||
self.context = context
|
||||
self.inherits = inherits
|
||||
if callables is not None:
|
||||
self.callables = dict([(c.__name__, c) for c in callables])
|
||||
|
||||
callables = ()
|
||||
|
||||
module = None
|
||||
"""The Python module referenced by this :class:`.Namespace`.
|
||||
|
||||
If the namespace references a :class:`.Template`, then
|
||||
this module is the equivalent of ``template.module``,
|
||||
i.e. the generated module for the template.
|
||||
|
||||
"""
|
||||
|
||||
template = None
|
||||
"""The :class:`.Template` object referenced by this
|
||||
:class:`.Namespace`, if any.
|
||||
|
||||
"""
|
||||
|
||||
context = None
|
||||
"""The :class:`.Context` object for this :class:`.Namespace`.
|
||||
|
||||
Namespaces are often created with copies of contexts that
|
||||
contain slightly different data, particularly in inheritance
|
||||
scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one
|
||||
can traverse an entire chain of templates that inherit from
|
||||
one-another.
|
||||
|
||||
"""
|
||||
|
||||
filename = None
|
||||
"""The path of the filesystem file used for this
|
||||
:class:`.Namespace`'s module or template.
|
||||
|
||||
If this is a pure module-based
|
||||
:class:`.Namespace`, this evaluates to ``module.__file__``. If a
|
||||
template-based namespace, it evaluates to the original
|
||||
template file location.
|
||||
|
||||
"""
|
||||
|
||||
uri = None
|
||||
"""The URI for this :class:`.Namespace`'s template.
|
||||
|
||||
I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
|
||||
|
||||
This is the equivalent of :attr:`.Template.uri`.
|
||||
|
||||
"""
|
||||
|
||||
_templateuri = None
|
||||
|
||||
@util.memoized_property
|
||||
def attr(self):
|
||||
"""Access module level attributes by name.
|
||||
|
||||
This accessor allows templates to supply "scalar"
|
||||
attributes which are particularly handy in inheritance
|
||||
relationships.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inheritance_attr`
|
||||
|
||||
:ref:`namespace_attr_for_includes`
|
||||
|
||||
"""
|
||||
return _NSAttr(self)
|
||||
|
||||
def get_namespace(self, uri):
|
||||
"""Return a :class:`.Namespace` corresponding to the given ``uri``.
|
||||
|
||||
If the given ``uri`` is a relative URI (i.e. it does not
|
||||
contain a leading slash ``/``), the ``uri`` is adjusted to
|
||||
be relative to the ``uri`` of the namespace itself. This
|
||||
method is therefore mostly useful off of the built-in
|
||||
``local`` namespace, described in :ref:`namespace_local`.
|
||||
|
||||
In
|
||||
most cases, a template wouldn't need this function, and
|
||||
should instead use the ``<%namespace>`` tag to load
|
||||
namespaces. However, since all ``<%namespace>`` tags are
|
||||
evaluated before the body of a template ever runs,
|
||||
this method can be used to locate namespaces using
|
||||
expressions that were generated within the body code of
|
||||
the template, or to conditionally use a particular
|
||||
namespace.
|
||||
|
||||
"""
|
||||
key = (self, uri)
|
||||
if key in self.context.namespaces:
|
||||
return self.context.namespaces[key]
|
||||
else:
|
||||
ns = TemplateNamespace(uri, self.context._copy(),
|
||||
templateuri=uri,
|
||||
calling_uri=self._templateuri)
|
||||
self.context.namespaces[key] = ns
|
||||
return ns
|
||||
|
||||
def get_template(self, uri):
|
||||
"""Return a :class:`.Template` from the given ``uri``.
|
||||
|
||||
The ``uri`` resolution is relative to the ``uri`` of this
|
||||
:class:`.Namespace` object's :class:`.Template`.
|
||||
|
||||
"""
|
||||
return _lookup_template(self.context, uri, self._templateuri)
|
||||
|
||||
def get_cached(self, key, **kwargs):
|
||||
"""Return a value from the :class:`.Cache` referenced by this
|
||||
:class:`.Namespace` object's :class:`.Template`.
|
||||
|
||||
The advantage to this method versus direct access to the
|
||||
:class:`.Cache` is that the configuration parameters
|
||||
declared in ``<%page>`` take effect here, thereby calling
|
||||
up the same configured backend as that configured
|
||||
by ``<%page>``.
|
||||
|
||||
"""
|
||||
|
||||
return self.cache.get(key, **kwargs)
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return the :class:`.Cache` object referenced
|
||||
by this :class:`.Namespace` object's
|
||||
:class:`.Template`.
|
||||
|
||||
"""
|
||||
return self.template.cache
|
||||
|
||||
def include_file(self, uri, **kwargs):
|
||||
"""Include a file at the given ``uri``."""
|
||||
|
||||
_include_file(self.context, uri, self._templateuri, **kwargs)
|
||||
|
||||
def _populate(self, d, l):
|
||||
for ident in l:
|
||||
if ident == '*':
|
||||
for (k, v) in self._get_star():
|
||||
d[k] = v
|
||||
else:
|
||||
d[ident] = getattr(self, ident)
|
||||
|
||||
def _get_star(self):
|
||||
if self.callables:
|
||||
for key in self.callables:
|
||||
yield (key, self.callables[key])
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key in self.callables:
|
||||
val = self.callables[key]
|
||||
elif self.inherits:
|
||||
val = getattr(self.inherits, key)
|
||||
else:
|
||||
raise AttributeError(
|
||||
"Namespace '%s' has no member '%s'" %
|
||||
(self.name, key))
|
||||
setattr(self, key, val)
|
||||
return val
|
||||
|
||||
|
||||
class TemplateNamespace(Namespace):
|
||||
|
||||
"""A :class:`.Namespace` specific to a :class:`.Template` instance."""
|
||||
|
||||
def __init__(self, name, context, template=None, templateuri=None,
|
||||
callables=None, inherits=None,
|
||||
populate_self=True, calling_uri=None):
|
||||
self.name = name
|
||||
self.context = context
|
||||
self.inherits = inherits
|
||||
if callables is not None:
|
||||
self.callables = dict([(c.__name__, c) for c in callables])
|
||||
|
||||
if templateuri is not None:
|
||||
self.template = _lookup_template(context, templateuri,
|
||||
calling_uri)
|
||||
self._templateuri = self.template.module._template_uri
|
||||
elif template is not None:
|
||||
self.template = template
|
||||
self._templateuri = template.module._template_uri
|
||||
else:
|
||||
raise TypeError("'template' argument is required.")
|
||||
|
||||
if populate_self:
|
||||
lclcallable, lclcontext = \
|
||||
_populate_self_namespace(context, self.template,
|
||||
self_ns=self)
|
||||
|
||||
@property
|
||||
def module(self):
|
||||
"""The Python module referenced by this :class:`.Namespace`.
|
||||
|
||||
If the namespace references a :class:`.Template`, then
|
||||
this module is the equivalent of ``template.module``,
|
||||
i.e. the generated module for the template.
|
||||
|
||||
"""
|
||||
return self.template.module
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
"""The path of the filesystem file used for this
|
||||
:class:`.Namespace`'s module or template.
|
||||
"""
|
||||
return self.template.filename
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
"""The URI for this :class:`.Namespace`'s template.
|
||||
|
||||
I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`.
|
||||
|
||||
This is the equivalent of :attr:`.Template.uri`.
|
||||
|
||||
"""
|
||||
return self.template.uri
|
||||
|
||||
def _get_star(self):
|
||||
if self.callables:
|
||||
for key in self.callables:
|
||||
yield (key, self.callables[key])
|
||||
|
||||
def get(key):
|
||||
callable_ = self.template._get_def_callable(key)
|
||||
return compat.partial(callable_, self.context)
|
||||
for k in self.template.module._exports:
|
||||
yield (k, get(k))
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key in self.callables:
|
||||
val = self.callables[key]
|
||||
elif self.template.has_def(key):
|
||||
callable_ = self.template._get_def_callable(key)
|
||||
val = compat.partial(callable_, self.context)
|
||||
elif self.inherits:
|
||||
val = getattr(self.inherits, key)
|
||||
|
||||
else:
|
||||
raise AttributeError(
|
||||
"Namespace '%s' has no member '%s'" %
|
||||
(self.name, key))
|
||||
setattr(self, key, val)
|
||||
return val
|
||||
|
||||
|
||||
class ModuleNamespace(Namespace):
|
||||
|
||||
"""A :class:`.Namespace` specific to a Python module instance."""
|
||||
|
||||
def __init__(self, name, context, module,
|
||||
callables=None, inherits=None,
|
||||
populate_self=True, calling_uri=None):
|
||||
self.name = name
|
||||
self.context = context
|
||||
self.inherits = inherits
|
||||
if callables is not None:
|
||||
self.callables = dict([(c.__name__, c) for c in callables])
|
||||
|
||||
mod = __import__(module)
|
||||
for token in module.split('.')[1:]:
|
||||
mod = getattr(mod, token)
|
||||
self.module = mod
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
"""The path of the filesystem file used for this
|
||||
:class:`.Namespace`'s module or template.
|
||||
"""
|
||||
return self.module.__file__
|
||||
|
||||
def _get_star(self):
|
||||
if self.callables:
|
||||
for key in self.callables:
|
||||
yield (key, self.callables[key])
|
||||
for key in dir(self.module):
|
||||
if key[0] != '_':
|
||||
callable_ = getattr(self.module, key)
|
||||
if compat.callable(callable_):
|
||||
yield key, compat.partial(callable_, self.context)
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key in self.callables:
|
||||
val = self.callables[key]
|
||||
elif hasattr(self.module, key):
|
||||
callable_ = getattr(self.module, key)
|
||||
val = compat.partial(callable_, self.context)
|
||||
elif self.inherits:
|
||||
val = getattr(self.inherits, key)
|
||||
else:
|
||||
raise AttributeError(
|
||||
"Namespace '%s' has no member '%s'" %
|
||||
(self.name, key))
|
||||
setattr(self, key, val)
|
||||
return val
|
||||
|
||||
|
||||
def supports_caller(func):
|
||||
"""Apply a caller_stack compatibility decorator to a plain
|
||||
Python function.
|
||||
|
||||
See the example in :ref:`namespaces_python_modules`.
|
||||
|
||||
"""
|
||||
|
||||
def wrap_stackframe(context, *args, **kwargs):
|
||||
context.caller_stack._push_frame()
|
||||
try:
|
||||
return func(context, *args, **kwargs)
|
||||
finally:
|
||||
context.caller_stack._pop_frame()
|
||||
return wrap_stackframe
|
||||
|
||||
|
||||
def capture(context, callable_, *args, **kwargs):
|
||||
"""Execute the given template def, capturing the output into
|
||||
a buffer.
|
||||
|
||||
See the example in :ref:`namespaces_python_modules`.
|
||||
|
||||
"""
|
||||
|
||||
if not compat.callable(callable_):
|
||||
raise exceptions.RuntimeException(
|
||||
"capture() function expects a callable as "
|
||||
"its argument (i.e. capture(func, *args, **kwargs))"
|
||||
)
|
||||
context._push_buffer()
|
||||
try:
|
||||
callable_(*args, **kwargs)
|
||||
finally:
|
||||
buf = context._pop_buffer()
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
def _decorate_toplevel(fn):
|
||||
def decorate_render(render_fn):
|
||||
def go(context, *args, **kw):
|
||||
def y(*args, **kw):
|
||||
return render_fn(context, *args, **kw)
|
||||
try:
|
||||
y.__name__ = render_fn.__name__[7:]
|
||||
except TypeError:
|
||||
# < Python 2.4
|
||||
pass
|
||||
return fn(y)(context, *args, **kw)
|
||||
return go
|
||||
return decorate_render
|
||||
|
||||
|
||||
def _decorate_inline(context, fn):
|
||||
def decorate_render(render_fn):
|
||||
dec = fn(render_fn)
|
||||
|
||||
def go(*args, **kw):
|
||||
return dec(context, *args, **kw)
|
||||
return go
|
||||
return decorate_render
|
||||
|
||||
|
||||
def _include_file(context, uri, calling_uri, **kwargs):
|
||||
"""locate the template from the given uri and include it in
|
||||
the current output."""
|
||||
|
||||
template = _lookup_template(context, uri, calling_uri)
|
||||
(callable_, ctx) = _populate_self_namespace(
|
||||
context._clean_inheritance_tokens(),
|
||||
template)
|
||||
callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs))
|
||||
|
||||
|
||||
def _inherit_from(context, uri, calling_uri):
|
||||
"""called by the _inherit method in template modules to set
|
||||
up the inheritance chain at the start of a template's
|
||||
execution."""
|
||||
|
||||
if uri is None:
|
||||
return None
|
||||
template = _lookup_template(context, uri, calling_uri)
|
||||
self_ns = context['self']
|
||||
ih = self_ns
|
||||
while ih.inherits is not None:
|
||||
ih = ih.inherits
|
||||
lclcontext = context._locals({'next': ih})
|
||||
ih.inherits = TemplateNamespace("self:%s" % template.uri,
|
||||
lclcontext,
|
||||
template=template,
|
||||
populate_self=False)
|
||||
context._data['parent'] = lclcontext._data['local'] = ih.inherits
|
||||
callable_ = getattr(template.module, '_mako_inherit', None)
|
||||
if callable_ is not None:
|
||||
ret = callable_(template, lclcontext)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
|
||||
if gen_ns is not None:
|
||||
gen_ns(context)
|
||||
return (template.callable_, lclcontext)
|
||||
|
||||
|
||||
def _lookup_template(context, uri, relativeto):
|
||||
lookup = context._with_template.lookup
|
||||
if lookup is None:
|
||||
raise exceptions.TemplateLookupException(
|
||||
"Template '%s' has no TemplateLookup associated" %
|
||||
context._with_template.uri)
|
||||
uri = lookup.adjust_uri(uri, relativeto)
|
||||
try:
|
||||
return lookup.get_template(uri)
|
||||
except exceptions.TopLevelLookupException:
|
||||
raise exceptions.TemplateLookupException(str(compat.exception_as()))
|
||||
|
||||
|
||||
def _populate_self_namespace(context, template, self_ns=None):
|
||||
if self_ns is None:
|
||||
self_ns = TemplateNamespace('self:%s' % template.uri,
|
||||
context, template=template,
|
||||
populate_self=False)
|
||||
context._data['self'] = context._data['local'] = self_ns
|
||||
if hasattr(template.module, '_mako_inherit'):
|
||||
ret = template.module._mako_inherit(template, context)
|
||||
if ret:
|
||||
return ret
|
||||
return (template.callable_, context)
|
||||
|
||||
|
||||
def _render(template, callable_, args, data, as_unicode=False):
|
||||
"""create a Context and return the string
|
||||
output of the given template and template callable."""
|
||||
|
||||
if as_unicode:
|
||||
buf = util.FastEncodingBuffer(as_unicode=True)
|
||||
elif template.bytestring_passthrough:
|
||||
buf = compat.StringIO()
|
||||
else:
|
||||
buf = util.FastEncodingBuffer(
|
||||
as_unicode=as_unicode,
|
||||
encoding=template.output_encoding,
|
||||
errors=template.encoding_errors)
|
||||
context = Context(buf, **data)
|
||||
context._outputting_as_unicode = as_unicode
|
||||
context._set_with_template(template)
|
||||
|
||||
_render_context(template, callable_, context, *args,
|
||||
**_kwargs_for_callable(callable_, data))
|
||||
return context._pop_buffer().getvalue()
|
||||
|
||||
|
||||
def _kwargs_for_callable(callable_, data):
|
||||
argspec = compat.inspect_func_args(callable_)
|
||||
# for normal pages, **pageargs is usually present
|
||||
if argspec[2]:
|
||||
return data
|
||||
|
||||
# for rendering defs from the top level, figure out the args
|
||||
namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
|
||||
kwargs = {}
|
||||
for arg in namedargs:
|
||||
if arg != 'context' and arg in data and arg not in kwargs:
|
||||
kwargs[arg] = data[arg]
|
||||
return kwargs
|
||||
|
||||
|
||||
def _kwargs_for_include(callable_, data, **kwargs):
|
||||
argspec = compat.inspect_func_args(callable_)
|
||||
namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
|
||||
for arg in namedargs:
|
||||
if arg != 'context' and arg in data and arg not in kwargs:
|
||||
kwargs[arg] = data[arg]
|
||||
return kwargs
|
||||
|
||||
|
||||
def _render_context(tmpl, callable_, context, *args, **kwargs):
|
||||
import mako.template as template
|
||||
# create polymorphic 'self' namespace for this
|
||||
# template with possibly updated context
|
||||
if not isinstance(tmpl, template.DefTemplate):
|
||||
# if main render method, call from the base of the inheritance stack
|
||||
(inherit, lclcontext) = _populate_self_namespace(context, tmpl)
|
||||
_exec_template(inherit, lclcontext, args=args, kwargs=kwargs)
|
||||
else:
|
||||
# otherwise, call the actual rendering method specified
|
||||
(inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent)
|
||||
_exec_template(callable_, context, args=args, kwargs=kwargs)
|
||||
|
||||
|
||||
def _exec_template(callable_, context, args=None, kwargs=None):
|
||||
"""execute a rendering callable given the callable, a
|
||||
Context, and optional explicit arguments
|
||||
|
||||
the contextual Template will be located if it exists, and
|
||||
the error handling options specified on that Template will
|
||||
be interpreted here.
|
||||
"""
|
||||
template = context._with_template
|
||||
if template is not None and \
|
||||
(template.format_exceptions or template.error_handler):
|
||||
try:
|
||||
callable_(context, *args, **kwargs)
|
||||
except Exception:
|
||||
_render_error(template, context, compat.exception_as())
|
||||
except:
|
||||
e = sys.exc_info()[0]
|
||||
_render_error(template, context, e)
|
||||
else:
|
||||
callable_(context, *args, **kwargs)
|
||||
|
||||
|
||||
def _render_error(template, context, error):
|
||||
if template.error_handler:
|
||||
result = template.error_handler(context, error)
|
||||
if not result:
|
||||
compat.reraise(*sys.exc_info())
|
||||
else:
|
||||
error_template = exceptions.html_error_template()
|
||||
if context._outputting_as_unicode:
|
||||
context._buffer_stack[:] = [
|
||||
util.FastEncodingBuffer(as_unicode=True)]
|
||||
else:
|
||||
context._buffer_stack[:] = [util.FastEncodingBuffer(
|
||||
error_template.output_encoding,
|
||||
error_template.encoding_errors)]
|
||||
|
||||
context._set_with_template(error_template)
|
||||
error_template.render_context(context, error=error)
|
|
@ -0,0 +1,718 @@
|
|||
# mako/template.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides the Template class, a facade for parsing, generating and executing
|
||||
template strings, as well as template runtime operations."""
|
||||
|
||||
from mako.lexer import Lexer
|
||||
from mako import runtime, util, exceptions, codegen, cache, compat
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
import types
|
||||
import weakref
|
||||
|
||||
|
||||
class Template(object):
|
||||
|
||||
"""Represents a compiled template.
|
||||
|
||||
:class:`.Template` includes a reference to the original
|
||||
template source (via the :attr:`.source` attribute)
|
||||
as well as the source code of the
|
||||
generated Python module (i.e. the :attr:`.code` attribute),
|
||||
as well as a reference to an actual Python module.
|
||||
|
||||
:class:`.Template` is constructed using either a literal string
|
||||
representing the template text, or a filename representing a filesystem
|
||||
path to a source file.
|
||||
|
||||
:param text: textual template source. This argument is mutually
|
||||
exclusive versus the ``filename`` parameter.
|
||||
|
||||
:param filename: filename of the source template. This argument is
|
||||
mutually exclusive versus the ``text`` parameter.
|
||||
|
||||
:param buffer_filters: string list of filters to be applied
|
||||
to the output of ``%def``\ s which are buffered, cached, or otherwise
|
||||
filtered, after all filters
|
||||
defined with the ``%def`` itself have been applied. Allows the
|
||||
creation of default expression filters that let the output
|
||||
of return-valued ``%def``\ s "opt out" of that filtering via
|
||||
passing special attributes or objects.
|
||||
|
||||
:param bytestring_passthrough: When ``True``, and ``output_encoding`` is
|
||||
set to ``None``, and :meth:`.Template.render` is used to render,
|
||||
the `StringIO` or `cStringIO` buffer will be used instead of the
|
||||
default "fast" buffer. This allows raw bytestrings in the
|
||||
output stream, such as in expressions, to pass straight
|
||||
through to the buffer. This flag is forced
|
||||
to ``True`` if ``disable_unicode`` is also configured.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
Added to provide the same behavior as that of the previous series.
|
||||
|
||||
:param cache_args: Dictionary of cache configuration arguments that
|
||||
will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`.
|
||||
|
||||
:param cache_dir:
|
||||
|
||||
.. deprecated:: 0.6
|
||||
Use the ``'dir'`` argument in the ``cache_args`` dictionary.
|
||||
See :ref:`caching_toplevel`.
|
||||
|
||||
:param cache_enabled: Boolean flag which enables caching of this
|
||||
template. See :ref:`caching_toplevel`.
|
||||
|
||||
:param cache_impl: String name of a :class:`.CacheImpl` caching
|
||||
implementation to use. Defaults to ``'beaker'``.
|
||||
|
||||
:param cache_type:
|
||||
|
||||
.. deprecated:: 0.6
|
||||
Use the ``'type'`` argument in the ``cache_args`` dictionary.
|
||||
See :ref:`caching_toplevel`.
|
||||
|
||||
:param cache_url:
|
||||
|
||||
.. deprecated:: 0.6
|
||||
Use the ``'url'`` argument in the ``cache_args`` dictionary.
|
||||
See :ref:`caching_toplevel`.
|
||||
|
||||
:param default_filters: List of string filter names that will
|
||||
be applied to all expressions. See :ref:`filtering_default_filters`.
|
||||
|
||||
:param disable_unicode: Disables all awareness of Python Unicode
|
||||
objects. See :ref:`unicode_disabled`.
|
||||
|
||||
:param enable_loop: When ``True``, enable the ``loop`` context variable.
|
||||
This can be set to ``False`` to support templates that may
|
||||
be making usage of the name "``loop``". Individual templates can
|
||||
re-enable the "loop" context by placing the directive
|
||||
``enable_loop="True"`` inside the ``<%page>`` tag -- see
|
||||
:ref:`migrating_loop`.
|
||||
|
||||
:param encoding_errors: Error parameter passed to ``encode()`` when
|
||||
string encoding is performed. See :ref:`usage_unicode`.
|
||||
|
||||
:param error_handler: Python callable which is called whenever
|
||||
compile or runtime exceptions occur. The callable is passed
|
||||
the current context as well as the exception. If the
|
||||
callable returns ``True``, the exception is considered to
|
||||
be handled, else it is re-raised after the function
|
||||
completes. Is used to provide custom error-rendering
|
||||
functions.
|
||||
|
||||
:param format_exceptions: if ``True``, exceptions which occur during
|
||||
the render phase of this template will be caught and
|
||||
formatted into an HTML error page, which then becomes the
|
||||
rendered result of the :meth:`.render` call. Otherwise,
|
||||
runtime exceptions are propagated outwards.
|
||||
|
||||
:param imports: String list of Python statements, typically individual
|
||||
"import" lines, which will be placed into the module level
|
||||
preamble of all generated Python modules. See the example
|
||||
in :ref:`filtering_default_filters`.
|
||||
|
||||
:param future_imports: String list of names to import from `__future__`.
|
||||
These will be concatenated into a comma-separated string and inserted
|
||||
into the beginning of the template, e.g. ``futures_imports=['FOO',
|
||||
'BAR']`` results in ``from __future__ import FOO, BAR``. If you're
|
||||
interested in using features like the new division operator, you must
|
||||
use future_imports to convey that to the renderer, as otherwise the
|
||||
import will not appear as the first executed statement in the generated
|
||||
code and will therefore not have the desired effect.
|
||||
|
||||
:param input_encoding: Encoding of the template's source code. Can
|
||||
be used in lieu of the coding comment. See
|
||||
:ref:`usage_unicode` as well as :ref:`unicode_toplevel` for
|
||||
details on source encoding.
|
||||
|
||||
:param lookup: a :class:`.TemplateLookup` instance that will be used
|
||||
for all file lookups via the ``<%namespace>``,
|
||||
``<%include>``, and ``<%inherit>`` tags. See
|
||||
:ref:`usage_templatelookup`.
|
||||
|
||||
:param module_directory: Filesystem location where generated
|
||||
Python module files will be placed.
|
||||
|
||||
:param module_filename: Overrides the filename of the generated
|
||||
Python module file. For advanced usage only.
|
||||
|
||||
:param module_writer: A callable which overrides how the Python
|
||||
module is written entirely. The callable is passed the
|
||||
encoded source content of the module and the destination
|
||||
path to be written to. The default behavior of module writing
|
||||
uses a tempfile in conjunction with a file move in order
|
||||
to make the operation atomic. So a user-defined module
|
||||
writing function that mimics the default behavior would be:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
import shutil
|
||||
|
||||
def module_writer(source, outputpath):
|
||||
(dest, name) = \\
|
||||
tempfile.mkstemp(
|
||||
dir=os.path.dirname(outputpath)
|
||||
)
|
||||
|
||||
os.write(dest, source)
|
||||
os.close(dest)
|
||||
shutil.move(name, outputpath)
|
||||
|
||||
from mako.template import Template
|
||||
mytemplate = Template(
|
||||
filename="admin_index.mako",
|
||||
module_directory="/path/to/modules",
|
||||
module_writer=module_writer
|
||||
)
|
||||
|
||||
The function is provided for unusual configurations where
|
||||
certain platform-specific permissions or other special
|
||||
steps are needed.
|
||||
|
||||
:param output_encoding: The encoding to use when :meth:`.render`
|
||||
is called.
|
||||
See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`.
|
||||
|
||||
:param preprocessor: Python callable which will be passed
|
||||
the full template source before it is parsed. The return
|
||||
result of the callable will be used as the template source
|
||||
code.
|
||||
|
||||
:param lexer_cls: A :class:`.Lexer` class used to parse
|
||||
the template. The :class:`.Lexer` class is used by
|
||||
default.
|
||||
|
||||
.. versionadded:: 0.7.4
|
||||
|
||||
:param strict_undefined: Replaces the automatic usage of
|
||||
``UNDEFINED`` for any undeclared variables not located in
|
||||
the :class:`.Context` with an immediate raise of
|
||||
``NameError``. The advantage is immediate reporting of
|
||||
missing variables which include the name.
|
||||
|
||||
.. versionadded:: 0.3.6
|
||||
|
||||
:param uri: string URI or other identifier for this template.
|
||||
If not provided, the ``uri`` is generated from the filesystem
|
||||
path, or from the in-memory identity of a non-file-based
|
||||
template. The primary usage of the ``uri`` is to provide a key
|
||||
within :class:`.TemplateLookup`, as well as to generate the
|
||||
file path of the generated Python module file, if
|
||||
``module_directory`` is specified.
|
||||
|
||||
"""
|
||||
|
||||
lexer_cls = Lexer
|
||||
|
||||
def __init__(self,
|
||||
text=None,
|
||||
filename=None,
|
||||
uri=None,
|
||||
format_exceptions=False,
|
||||
error_handler=None,
|
||||
lookup=None,
|
||||
output_encoding=None,
|
||||
encoding_errors='strict',
|
||||
module_directory=None,
|
||||
cache_args=None,
|
||||
cache_impl='beaker',
|
||||
cache_enabled=True,
|
||||
cache_type=None,
|
||||
cache_dir=None,
|
||||
cache_url=None,
|
||||
module_filename=None,
|
||||
input_encoding=None,
|
||||
disable_unicode=False,
|
||||
module_writer=None,
|
||||
bytestring_passthrough=False,
|
||||
default_filters=None,
|
||||
buffer_filters=(),
|
||||
strict_undefined=False,
|
||||
imports=None,
|
||||
future_imports=None,
|
||||
enable_loop=True,
|
||||
preprocessor=None,
|
||||
lexer_cls=None):
|
||||
if uri:
|
||||
self.module_id = re.sub(r'\W', "_", uri)
|
||||
self.uri = uri
|
||||
elif filename:
|
||||
self.module_id = re.sub(r'\W', "_", filename)
|
||||
drive, path = os.path.splitdrive(filename)
|
||||
path = os.path.normpath(path).replace(os.path.sep, "/")
|
||||
self.uri = path
|
||||
else:
|
||||
self.module_id = "memory:" + hex(id(self))
|
||||
self.uri = self.module_id
|
||||
|
||||
u_norm = self.uri
|
||||
if u_norm.startswith("/"):
|
||||
u_norm = u_norm[1:]
|
||||
u_norm = os.path.normpath(u_norm)
|
||||
if u_norm.startswith(".."):
|
||||
raise exceptions.TemplateLookupException(
|
||||
"Template uri \"%s\" is invalid - "
|
||||
"it cannot be relative outside "
|
||||
"of the root path." % self.uri)
|
||||
|
||||
self.input_encoding = input_encoding
|
||||
self.output_encoding = output_encoding
|
||||
self.encoding_errors = encoding_errors
|
||||
self.disable_unicode = disable_unicode
|
||||
self.bytestring_passthrough = bytestring_passthrough or disable_unicode
|
||||
self.enable_loop = enable_loop
|
||||
self.strict_undefined = strict_undefined
|
||||
self.module_writer = module_writer
|
||||
|
||||
if compat.py3k and disable_unicode:
|
||||
raise exceptions.UnsupportedError(
|
||||
"Mako for Python 3 does not "
|
||||
"support disabling Unicode")
|
||||
elif output_encoding and disable_unicode:
|
||||
raise exceptions.UnsupportedError(
|
||||
"output_encoding must be set to "
|
||||
"None when disable_unicode is used.")
|
||||
if default_filters is None:
|
||||
if compat.py3k or self.disable_unicode:
|
||||
self.default_filters = ['str']
|
||||
else:
|
||||
self.default_filters = ['unicode']
|
||||
else:
|
||||
self.default_filters = default_filters
|
||||
self.buffer_filters = buffer_filters
|
||||
|
||||
self.imports = imports
|
||||
self.future_imports = future_imports
|
||||
self.preprocessor = preprocessor
|
||||
|
||||
if lexer_cls is not None:
|
||||
self.lexer_cls = lexer_cls
|
||||
|
||||
# if plain text, compile code in memory only
|
||||
if text is not None:
|
||||
(code, module) = _compile_text(self, text, filename)
|
||||
self._code = code
|
||||
self._source = text
|
||||
ModuleInfo(module, None, self, filename, code, text)
|
||||
elif filename is not None:
|
||||
# if template filename and a module directory, load
|
||||
# a filesystem-based module file, generating if needed
|
||||
if module_filename is not None:
|
||||
path = module_filename
|
||||
elif module_directory is not None:
|
||||
path = os.path.abspath(
|
||||
os.path.join(
|
||||
os.path.normpath(module_directory),
|
||||
u_norm + ".py"
|
||||
)
|
||||
)
|
||||
else:
|
||||
path = None
|
||||
module = self._compile_from_file(path, filename)
|
||||
else:
|
||||
raise exceptions.RuntimeException(
|
||||
"Template requires text or filename")
|
||||
|
||||
self.module = module
|
||||
self.filename = filename
|
||||
self.callable_ = self.module.render_body
|
||||
self.format_exceptions = format_exceptions
|
||||
self.error_handler = error_handler
|
||||
self.lookup = lookup
|
||||
|
||||
self.module_directory = module_directory
|
||||
|
||||
self._setup_cache_args(
|
||||
cache_impl, cache_enabled, cache_args,
|
||||
cache_type, cache_dir, cache_url
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def reserved_names(self):
|
||||
if self.enable_loop:
|
||||
return codegen.RESERVED_NAMES
|
||||
else:
|
||||
return codegen.RESERVED_NAMES.difference(['loop'])
|
||||
|
||||
def _setup_cache_args(self,
|
||||
cache_impl, cache_enabled, cache_args,
|
||||
cache_type, cache_dir, cache_url):
|
||||
self.cache_impl = cache_impl
|
||||
self.cache_enabled = cache_enabled
|
||||
if cache_args:
|
||||
self.cache_args = cache_args
|
||||
else:
|
||||
self.cache_args = {}
|
||||
|
||||
# transfer deprecated cache_* args
|
||||
if cache_type:
|
||||
self.cache_args['type'] = cache_type
|
||||
if cache_dir:
|
||||
self.cache_args['dir'] = cache_dir
|
||||
if cache_url:
|
||||
self.cache_args['url'] = cache_url
|
||||
|
||||
def _compile_from_file(self, path, filename):
|
||||
if path is not None:
|
||||
util.verify_directory(os.path.dirname(path))
|
||||
filemtime = os.stat(filename)[stat.ST_MTIME]
|
||||
if not os.path.exists(path) or \
|
||||
os.stat(path)[stat.ST_MTIME] < filemtime:
|
||||
data = util.read_file(filename)
|
||||
_compile_module_file(
|
||||
self,
|
||||
data,
|
||||
filename,
|
||||
path,
|
||||
self.module_writer)
|
||||
module = compat.load_module(self.module_id, path)
|
||||
del sys.modules[self.module_id]
|
||||
if module._magic_number != codegen.MAGIC_NUMBER:
|
||||
data = util.read_file(filename)
|
||||
_compile_module_file(
|
||||
self,
|
||||
data,
|
||||
filename,
|
||||
path,
|
||||
self.module_writer)
|
||||
module = compat.load_module(self.module_id, path)
|
||||
del sys.modules[self.module_id]
|
||||
ModuleInfo(module, path, self, filename, None, None)
|
||||
else:
|
||||
# template filename and no module directory, compile code
|
||||
# in memory
|
||||
data = util.read_file(filename)
|
||||
code, module = _compile_text(
|
||||
self,
|
||||
data,
|
||||
filename)
|
||||
self._source = None
|
||||
self._code = code
|
||||
ModuleInfo(module, None, self, filename, code, None)
|
||||
return module
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
"""Return the template source code for this :class:`.Template`."""
|
||||
|
||||
return _get_module_info_from_callable(self.callable_).source
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
"""Return the module source code for this :class:`.Template`."""
|
||||
|
||||
return _get_module_info_from_callable(self.callable_).code
|
||||
|
||||
@util.memoized_property
|
||||
def cache(self):
|
||||
return cache.Cache(self)
|
||||
|
||||
@property
|
||||
def cache_dir(self):
|
||||
return self.cache_args['dir']
|
||||
|
||||
@property
|
||||
def cache_url(self):
|
||||
return self.cache_args['url']
|
||||
|
||||
@property
|
||||
def cache_type(self):
|
||||
return self.cache_args['type']
|
||||
|
||||
def render(self, *args, **data):
|
||||
"""Render the output of this template as a string.
|
||||
|
||||
If the template specifies an output encoding, the string
|
||||
will be encoded accordingly, else the output is raw (raw
|
||||
output uses `cStringIO` and can't handle multibyte
|
||||
characters). A :class:`.Context` object is created corresponding
|
||||
to the given data. Arguments that are explicitly declared
|
||||
by this template's internal rendering method are also
|
||||
pulled from the given ``*args``, ``**data`` members.
|
||||
|
||||
"""
|
||||
return runtime._render(self, self.callable_, args, data)
|
||||
|
||||
def render_unicode(self, *args, **data):
|
||||
"""Render the output of this template as a unicode object."""
|
||||
|
||||
return runtime._render(self,
|
||||
self.callable_,
|
||||
args,
|
||||
data,
|
||||
as_unicode=True)
|
||||
|
||||
def render_context(self, context, *args, **kwargs):
|
||||
"""Render this :class:`.Template` with the given context.
|
||||
|
||||
The data is written to the context's buffer.
|
||||
|
||||
"""
|
||||
if getattr(context, '_with_template', None) is None:
|
||||
context._set_with_template(self)
|
||||
runtime._render_context(self,
|
||||
self.callable_,
|
||||
context,
|
||||
*args,
|
||||
**kwargs)
|
||||
|
||||
def has_def(self, name):
|
||||
return hasattr(self.module, "render_%s" % name)
|
||||
|
||||
def get_def(self, name):
|
||||
"""Return a def of this template as a :class:`.DefTemplate`."""
|
||||
|
||||
return DefTemplate(self, getattr(self.module, "render_%s" % name))
|
||||
|
||||
def _get_def_callable(self, name):
|
||||
return getattr(self.module, "render_%s" % name)
|
||||
|
||||
@property
|
||||
def last_modified(self):
|
||||
return self.module._modified_time
|
||||
|
||||
|
||||
class ModuleTemplate(Template):
|
||||
|
||||
"""A Template which is constructed given an existing Python module.
|
||||
|
||||
e.g.::
|
||||
|
||||
t = Template("this is a template")
|
||||
f = file("mymodule.py", "w")
|
||||
f.write(t.code)
|
||||
f.close()
|
||||
|
||||
import mymodule
|
||||
|
||||
t = ModuleTemplate(mymodule)
|
||||
print t.render()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, module,
|
||||
module_filename=None,
|
||||
template=None,
|
||||
template_filename=None,
|
||||
module_source=None,
|
||||
template_source=None,
|
||||
output_encoding=None,
|
||||
encoding_errors='strict',
|
||||
disable_unicode=False,
|
||||
bytestring_passthrough=False,
|
||||
format_exceptions=False,
|
||||
error_handler=None,
|
||||
lookup=None,
|
||||
cache_args=None,
|
||||
cache_impl='beaker',
|
||||
cache_enabled=True,
|
||||
cache_type=None,
|
||||
cache_dir=None,
|
||||
cache_url=None,
|
||||
):
|
||||
self.module_id = re.sub(r'\W', "_", module._template_uri)
|
||||
self.uri = module._template_uri
|
||||
self.input_encoding = module._source_encoding
|
||||
self.output_encoding = output_encoding
|
||||
self.encoding_errors = encoding_errors
|
||||
self.disable_unicode = disable_unicode
|
||||
self.bytestring_passthrough = bytestring_passthrough or disable_unicode
|
||||
self.enable_loop = module._enable_loop
|
||||
|
||||
if compat.py3k and disable_unicode:
|
||||
raise exceptions.UnsupportedError(
|
||||
"Mako for Python 3 does not "
|
||||
"support disabling Unicode")
|
||||
elif output_encoding and disable_unicode:
|
||||
raise exceptions.UnsupportedError(
|
||||
"output_encoding must be set to "
|
||||
"None when disable_unicode is used.")
|
||||
|
||||
self.module = module
|
||||
self.filename = template_filename
|
||||
ModuleInfo(module,
|
||||
module_filename,
|
||||
self,
|
||||
template_filename,
|
||||
module_source,
|
||||
template_source)
|
||||
|
||||
self.callable_ = self.module.render_body
|
||||
self.format_exceptions = format_exceptions
|
||||
self.error_handler = error_handler
|
||||
self.lookup = lookup
|
||||
self._setup_cache_args(
|
||||
cache_impl, cache_enabled, cache_args,
|
||||
cache_type, cache_dir, cache_url
|
||||
)
|
||||
|
||||
|
||||
class DefTemplate(Template):
|
||||
|
||||
"""A :class:`.Template` which represents a callable def in a parent
|
||||
template."""
|
||||
|
||||
def __init__(self, parent, callable_):
|
||||
self.parent = parent
|
||||
self.callable_ = callable_
|
||||
self.output_encoding = parent.output_encoding
|
||||
self.module = parent.module
|
||||
self.encoding_errors = parent.encoding_errors
|
||||
self.format_exceptions = parent.format_exceptions
|
||||
self.error_handler = parent.error_handler
|
||||
self.enable_loop = parent.enable_loop
|
||||
self.lookup = parent.lookup
|
||||
self.bytestring_passthrough = parent.bytestring_passthrough
|
||||
|
||||
def get_def(self, name):
|
||||
return self.parent.get_def(name)
|
||||
|
||||
|
||||
class ModuleInfo(object):
|
||||
|
||||
"""Stores information about a module currently loaded into
|
||||
memory, provides reverse lookups of template source, module
|
||||
source code based on a module's identifier.
|
||||
|
||||
"""
|
||||
_modules = weakref.WeakValueDictionary()
|
||||
|
||||
def __init__(self,
|
||||
module,
|
||||
module_filename,
|
||||
template,
|
||||
template_filename,
|
||||
module_source,
|
||||
template_source):
|
||||
self.module = module
|
||||
self.module_filename = module_filename
|
||||
self.template_filename = template_filename
|
||||
self.module_source = module_source
|
||||
self.template_source = template_source
|
||||
self._modules[module.__name__] = template._mmarker = self
|
||||
if module_filename:
|
||||
self._modules[module_filename] = self
|
||||
|
||||
@classmethod
|
||||
def get_module_source_metadata(cls, module_source, full_line_map=False):
|
||||
source_map = re.search(
|
||||
r"__M_BEGIN_METADATA(.+?)__M_END_METADATA",
|
||||
module_source, re.S).group(1)
|
||||
source_map = compat.json.loads(source_map)
|
||||
source_map['line_map'] = dict(
|
||||
(int(k), int(v))
|
||||
for k, v in source_map['line_map'].items())
|
||||
if full_line_map:
|
||||
f_line_map = source_map['full_line_map'] = []
|
||||
line_map = source_map['line_map']
|
||||
|
||||
curr_templ_line = 1
|
||||
for mod_line in range(1, max(line_map)):
|
||||
if mod_line in line_map:
|
||||
curr_templ_line = line_map[mod_line]
|
||||
f_line_map.append(curr_templ_line)
|
||||
return source_map
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
if self.module_source is not None:
|
||||
return self.module_source
|
||||
else:
|
||||
return util.read_python_file(self.module_filename)
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
if self.template_source is not None:
|
||||
if self.module._source_encoding and \
|
||||
not isinstance(self.template_source, compat.text_type):
|
||||
return self.template_source.decode(
|
||||
self.module._source_encoding)
|
||||
else:
|
||||
return self.template_source
|
||||
else:
|
||||
data = util.read_file(self.template_filename)
|
||||
if self.module._source_encoding:
|
||||
return data.decode(self.module._source_encoding)
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
def _compile(template, text, filename, generate_magic_comment):
|
||||
lexer = template.lexer_cls(text,
|
||||
filename,
|
||||
disable_unicode=template.disable_unicode,
|
||||
input_encoding=template.input_encoding,
|
||||
preprocessor=template.preprocessor)
|
||||
node = lexer.parse()
|
||||
source = codegen.compile(node,
|
||||
template.uri,
|
||||
filename,
|
||||
default_filters=template.default_filters,
|
||||
buffer_filters=template.buffer_filters,
|
||||
imports=template.imports,
|
||||
future_imports=template.future_imports,
|
||||
source_encoding=lexer.encoding,
|
||||
generate_magic_comment=generate_magic_comment,
|
||||
disable_unicode=template.disable_unicode,
|
||||
strict_undefined=template.strict_undefined,
|
||||
enable_loop=template.enable_loop,
|
||||
reserved_names=template.reserved_names)
|
||||
return source, lexer
|
||||
|
||||
|
||||
def _compile_text(template, text, filename):
|
||||
identifier = template.module_id
|
||||
source, lexer = _compile(template, text, filename,
|
||||
generate_magic_comment=template.disable_unicode)
|
||||
|
||||
cid = identifier
|
||||
if not compat.py3k and isinstance(cid, compat.text_type):
|
||||
cid = cid.encode()
|
||||
module = types.ModuleType(cid)
|
||||
code = compile(source, cid, 'exec')
|
||||
|
||||
# this exec() works for 2.4->3.3.
|
||||
exec(code, module.__dict__, module.__dict__)
|
||||
return (source, module)
|
||||
|
||||
|
||||
def _compile_module_file(template, text, filename, outputpath, module_writer):
|
||||
source, lexer = _compile(template, text, filename,
|
||||
generate_magic_comment=True)
|
||||
|
||||
if isinstance(source, compat.text_type):
|
||||
source = source.encode(lexer.encoding or 'ascii')
|
||||
|
||||
if module_writer:
|
||||
module_writer(source, outputpath)
|
||||
else:
|
||||
# make tempfiles in the same location as the ultimate
|
||||
# location. this ensures they're on the same filesystem,
|
||||
# avoiding synchronization issues.
|
||||
(dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath))
|
||||
|
||||
os.write(dest, source)
|
||||
os.close(dest)
|
||||
shutil.move(name, outputpath)
|
||||
|
||||
|
||||
def _get_module_info_from_callable(callable_):
|
||||
if compat.py3k:
|
||||
return _get_module_info(callable_.__globals__['__name__'])
|
||||
else:
|
||||
return _get_module_info(callable_.func_globals['__name__'])
|
||||
|
||||
|
||||
def _get_module_info(filename):
|
||||
return ModuleInfo._modules[filename]
|
|
@ -0,0 +1,382 @@
|
|||
# mako/util.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
import collections
|
||||
import codecs
|
||||
import os
|
||||
from mako import compat
|
||||
import operator
|
||||
|
||||
|
||||
def update_wrapper(decorated, fn):
|
||||
decorated.__wrapped__ = fn
|
||||
decorated.__name__ = fn.__name__
|
||||
return decorated
|
||||
|
||||
|
||||
class PluginLoader(object):
|
||||
|
||||
def __init__(self, group):
|
||||
self.group = group
|
||||
self.impls = {}
|
||||
|
||||
def load(self, name):
|
||||
if name in self.impls:
|
||||
return self.impls[name]()
|
||||
else:
|
||||
import pkg_resources
|
||||
for impl in pkg_resources.iter_entry_points(
|
||||
self.group,
|
||||
name):
|
||||
self.impls[name] = impl.load
|
||||
return impl.load()
|
||||
else:
|
||||
from mako import exceptions
|
||||
raise exceptions.RuntimeException(
|
||||
"Can't load plugin %s %s" %
|
||||
(self.group, name))
|
||||
|
||||
def register(self, name, modulepath, objname):
|
||||
def load():
|
||||
mod = __import__(modulepath)
|
||||
for token in modulepath.split(".")[1:]:
|
||||
mod = getattr(mod, token)
|
||||
return getattr(mod, objname)
|
||||
self.impls[name] = load
|
||||
|
||||
|
||||
def verify_directory(dir):
|
||||
"""create and/or verify a filesystem directory."""
|
||||
|
||||
tries = 0
|
||||
|
||||
while not os.path.exists(dir):
|
||||
try:
|
||||
tries += 1
|
||||
os.makedirs(dir, compat.octal("0775"))
|
||||
except:
|
||||
if tries > 5:
|
||||
raise
|
||||
|
||||
|
||||
def to_list(x, default=None):
|
||||
if x is None:
|
||||
return default
|
||||
if not isinstance(x, (list, tuple)):
|
||||
return [x]
|
||||
else:
|
||||
return x
|
||||
|
||||
|
||||
class memoized_property(object):
|
||||
|
||||
"""A read-only @property that is only evaluated once."""
|
||||
|
||||
def __init__(self, fget, doc=None):
|
||||
self.fget = fget
|
||||
self.__doc__ = doc or fget.__doc__
|
||||
self.__name__ = fget.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
obj.__dict__[self.__name__] = result = self.fget(obj)
|
||||
return result
|
||||
|
||||
|
||||
class memoized_instancemethod(object):
|
||||
|
||||
"""Decorate a method memoize its return value.
|
||||
|
||||
Best applied to no-arg methods: memoization is not sensitive to
|
||||
argument values, and will always return the same value even when
|
||||
called with different arguments.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, fget, doc=None):
|
||||
self.fget = fget
|
||||
self.__doc__ = doc or fget.__doc__
|
||||
self.__name__ = fget.__name__
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
|
||||
def oneshot(*args, **kw):
|
||||
result = self.fget(obj, *args, **kw)
|
||||
memo = lambda *a, **kw: result
|
||||
memo.__name__ = self.__name__
|
||||
memo.__doc__ = self.__doc__
|
||||
obj.__dict__[self.__name__] = memo
|
||||
return result
|
||||
oneshot.__name__ = self.__name__
|
||||
oneshot.__doc__ = self.__doc__
|
||||
return oneshot
|
||||
|
||||
|
||||
class SetLikeDict(dict):
|
||||
|
||||
"""a dictionary that has some setlike methods on it"""
|
||||
|
||||
def union(self, other):
|
||||
"""produce a 'union' of this dict and another (at the key level).
|
||||
|
||||
values in the second dict take precedence over that of the first"""
|
||||
x = SetLikeDict(**self)
|
||||
x.update(other)
|
||||
return x
|
||||
|
||||
|
||||
class FastEncodingBuffer(object):
|
||||
|
||||
"""a very rudimentary buffer that is faster than StringIO,
|
||||
but doesn't crash on unicode data like cStringIO."""
|
||||
|
||||
def __init__(self, encoding=None, errors='strict', as_unicode=False):
|
||||
self.data = collections.deque()
|
||||
self.encoding = encoding
|
||||
if as_unicode:
|
||||
self.delim = compat.u('')
|
||||
else:
|
||||
self.delim = ''
|
||||
self.as_unicode = as_unicode
|
||||
self.errors = errors
|
||||
self.write = self.data.append
|
||||
|
||||
def truncate(self):
|
||||
self.data = collections.deque()
|
||||
self.write = self.data.append
|
||||
|
||||
def getvalue(self):
|
||||
if self.encoding:
|
||||
return self.delim.join(self.data).encode(self.encoding,
|
||||
self.errors)
|
||||
else:
|
||||
return self.delim.join(self.data)
|
||||
|
||||
|
||||
class LRUCache(dict):
|
||||
|
||||
"""A dictionary-like object that stores a limited number of items,
|
||||
discarding lesser used items periodically.
|
||||
|
||||
this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
|
||||
paradigm so that synchronization is not really needed. the size management
|
||||
is inexact.
|
||||
"""
|
||||
|
||||
class _Item(object):
|
||||
|
||||
def __init__(self, key, value):
|
||||
self.key = key
|
||||
self.value = value
|
||||
self.timestamp = compat.time_func()
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.value)
|
||||
|
||||
def __init__(self, capacity, threshold=.5):
|
||||
self.capacity = capacity
|
||||
self.threshold = threshold
|
||||
|
||||
def __getitem__(self, key):
|
||||
item = dict.__getitem__(self, key)
|
||||
item.timestamp = compat.time_func()
|
||||
return item.value
|
||||
|
||||
def values(self):
|
||||
return [i.value for i in dict.values(self)]
|
||||
|
||||
def setdefault(self, key, value):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
self[key] = value
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
item = dict.get(self, key)
|
||||
if item is None:
|
||||
item = self._Item(key, value)
|
||||
dict.__setitem__(self, key, item)
|
||||
else:
|
||||
item.value = value
|
||||
self._manage_size()
|
||||
|
||||
def _manage_size(self):
|
||||
while len(self) > self.capacity + self.capacity * self.threshold:
|
||||
bytime = sorted(dict.values(self),
|
||||
key=operator.attrgetter('timestamp'), reverse=True)
|
||||
for item in bytime[self.capacity:]:
|
||||
try:
|
||||
del self[item.key]
|
||||
except KeyError:
|
||||
# if we couldn't find a key, most likely some other thread
|
||||
# broke in on us. loop around and try again
|
||||
break
|
||||
|
||||
# Regexp to match python magic encoding line
|
||||
_PYTHON_MAGIC_COMMENT_re = re.compile(
|
||||
r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
|
||||
re.VERBOSE)
|
||||
|
||||
|
||||
def parse_encoding(fp):
|
||||
"""Deduce the encoding of a Python source file (binary mode) from magic
|
||||
comment.
|
||||
|
||||
It does this in the same way as the `Python interpreter`__
|
||||
|
||||
.. __: http://docs.python.org/ref/encodings.html
|
||||
|
||||
The ``fp`` argument should be a seekable file object in binary mode.
|
||||
"""
|
||||
pos = fp.tell()
|
||||
fp.seek(0)
|
||||
try:
|
||||
line1 = fp.readline()
|
||||
has_bom = line1.startswith(codecs.BOM_UTF8)
|
||||
if has_bom:
|
||||
line1 = line1[len(codecs.BOM_UTF8):]
|
||||
|
||||
m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore'))
|
||||
if not m:
|
||||
try:
|
||||
import parser
|
||||
parser.suite(line1.decode('ascii', 'ignore'))
|
||||
except (ImportError, SyntaxError):
|
||||
# Either it's a real syntax error, in which case the source
|
||||
# is not valid python source, or line2 is a continuation of
|
||||
# line1, in which case we don't want to scan line2 for a magic
|
||||
# comment.
|
||||
pass
|
||||
else:
|
||||
line2 = fp.readline()
|
||||
m = _PYTHON_MAGIC_COMMENT_re.match(
|
||||
line2.decode('ascii', 'ignore'))
|
||||
|
||||
if has_bom:
|
||||
if m:
|
||||
raise SyntaxError(
|
||||
"python refuses to compile code with both a UTF8"
|
||||
" byte-order-mark and a magic encoding comment")
|
||||
return 'utf_8'
|
||||
elif m:
|
||||
return m.group(1)
|
||||
else:
|
||||
return None
|
||||
finally:
|
||||
fp.seek(pos)
|
||||
|
||||
|
||||
def sorted_dict_repr(d):
|
||||
"""repr() a dictionary with the keys in order.
|
||||
|
||||
Used by the lexer unit test to compare parse trees based on strings.
|
||||
|
||||
"""
|
||||
keys = list(d.keys())
|
||||
keys.sort()
|
||||
return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
|
||||
|
||||
|
||||
def restore__ast(_ast):
|
||||
"""Attempt to restore the required classes to the _ast module if it
|
||||
appears to be missing them
|
||||
"""
|
||||
if hasattr(_ast, 'AST'):
|
||||
return
|
||||
_ast.PyCF_ONLY_AST = 2 << 9
|
||||
m = compile("""\
|
||||
def foo(): pass
|
||||
class Bar(object): pass
|
||||
if False: pass
|
||||
baz = 'mako'
|
||||
1 + 2 - 3 * 4 / 5
|
||||
6 // 7 % 8 << 9 >> 10
|
||||
11 & 12 ^ 13 | 14
|
||||
15 and 16 or 17
|
||||
-baz + (not +18) - ~17
|
||||
baz and 'foo' or 'bar'
|
||||
(mako is baz == baz) is not baz != mako
|
||||
mako > baz < mako >= baz <= mako
|
||||
mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
|
||||
_ast.Module = type(m)
|
||||
|
||||
for cls in _ast.Module.__mro__:
|
||||
if cls.__name__ == 'mod':
|
||||
_ast.mod = cls
|
||||
elif cls.__name__ == 'AST':
|
||||
_ast.AST = cls
|
||||
|
||||
_ast.FunctionDef = type(m.body[0])
|
||||
_ast.ClassDef = type(m.body[1])
|
||||
_ast.If = type(m.body[2])
|
||||
|
||||
_ast.Name = type(m.body[3].targets[0])
|
||||
_ast.Store = type(m.body[3].targets[0].ctx)
|
||||
_ast.Str = type(m.body[3].value)
|
||||
|
||||
_ast.Sub = type(m.body[4].value.op)
|
||||
_ast.Add = type(m.body[4].value.left.op)
|
||||
_ast.Div = type(m.body[4].value.right.op)
|
||||
_ast.Mult = type(m.body[4].value.right.left.op)
|
||||
|
||||
_ast.RShift = type(m.body[5].value.op)
|
||||
_ast.LShift = type(m.body[5].value.left.op)
|
||||
_ast.Mod = type(m.body[5].value.left.left.op)
|
||||
_ast.FloorDiv = type(m.body[5].value.left.left.left.op)
|
||||
|
||||
_ast.BitOr = type(m.body[6].value.op)
|
||||
_ast.BitXor = type(m.body[6].value.left.op)
|
||||
_ast.BitAnd = type(m.body[6].value.left.left.op)
|
||||
|
||||
_ast.Or = type(m.body[7].value.op)
|
||||
_ast.And = type(m.body[7].value.values[0].op)
|
||||
|
||||
_ast.Invert = type(m.body[8].value.right.op)
|
||||
_ast.Not = type(m.body[8].value.left.right.op)
|
||||
_ast.UAdd = type(m.body[8].value.left.right.operand.op)
|
||||
_ast.USub = type(m.body[8].value.left.left.op)
|
||||
|
||||
_ast.Or = type(m.body[9].value.op)
|
||||
_ast.And = type(m.body[9].value.values[0].op)
|
||||
|
||||
_ast.IsNot = type(m.body[10].value.ops[0])
|
||||
_ast.NotEq = type(m.body[10].value.ops[1])
|
||||
_ast.Is = type(m.body[10].value.left.ops[0])
|
||||
_ast.Eq = type(m.body[10].value.left.ops[1])
|
||||
|
||||
_ast.Gt = type(m.body[11].value.ops[0])
|
||||
_ast.Lt = type(m.body[11].value.ops[1])
|
||||
_ast.GtE = type(m.body[11].value.ops[2])
|
||||
_ast.LtE = type(m.body[11].value.ops[3])
|
||||
|
||||
_ast.In = type(m.body[12].value.ops[0])
|
||||
_ast.NotIn = type(m.body[12].value.ops[1])
|
||||
|
||||
|
||||
def read_file(path, mode='rb'):
|
||||
fp = open(path, mode)
|
||||
try:
|
||||
data = fp.read()
|
||||
return data
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
|
||||
def read_python_file(path):
|
||||
fp = open(path, "rb")
|
||||
try:
|
||||
encoding = parse_encoding(fp)
|
||||
data = fp.read()
|
||||
if encoding:
|
||||
data = data.decode(encoding)
|
||||
return data
|
||||
finally:
|
||||
fp.close()
|
|
@ -0,0 +1 @@
|
|||
__version__ = '1.3.5'
|
|
@ -0,0 +1,12 @@
|
|||
# API Backwards compatibility
|
||||
|
||||
from pymemcache.client.base import Client # noqa
|
||||
from pymemcache.client.base import PooledClient # noqa
|
||||
|
||||
from pymemcache.exceptions import MemcacheError # noqa
|
||||
from pymemcache.exceptions import MemcacheClientError # noqa
|
||||
from pymemcache.exceptions import MemcacheUnknownCommandError # noqa
|
||||
from pymemcache.exceptions import MemcacheIllegalInputError # noqa
|
||||
from pymemcache.exceptions import MemcacheServerError # noqa
|
||||
from pymemcache.exceptions import MemcacheUnknownError # noqa
|
||||
from pymemcache.exceptions import MemcacheUnexpectedCloseError # noqa
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,333 @@
|
|||
import socket
|
||||
import time
|
||||
import logging
|
||||
|
||||
from pymemcache.client.base import Client, PooledClient, _check_key
|
||||
from pymemcache.client.rendezvous import RendezvousHash
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashClient(object):
|
||||
"""
|
||||
A client for communicating with a cluster of memcached servers
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
servers,
|
||||
hasher=RendezvousHash,
|
||||
serializer=None,
|
||||
deserializer=None,
|
||||
connect_timeout=None,
|
||||
timeout=None,
|
||||
no_delay=False,
|
||||
socket_module=socket,
|
||||
key_prefix=b'',
|
||||
max_pool_size=None,
|
||||
lock_generator=None,
|
||||
retry_attempts=2,
|
||||
retry_timeout=1,
|
||||
dead_timeout=60,
|
||||
use_pooling=False,
|
||||
ignore_exc=False,
|
||||
):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
Args:
|
||||
servers: list(tuple(hostname, port))
|
||||
hasher: optional class three functions ``get_node``, ``add_node``,
|
||||
and ``remove_node``
|
||||
defaults to Rendezvous (HRW) hash.
|
||||
|
||||
use_pooling: use py:class:`.PooledClient` as the default underlying
|
||||
class. ``max_pool_size`` and ``lock_generator`` can
|
||||
be used with this. default: False
|
||||
|
||||
retry_attempts: Amount of times a client should be tried before it
|
||||
is marked dead and removed from the pool.
|
||||
retry_timeout (float): Time in seconds that should pass between retry
|
||||
attempts.
|
||||
dead_timeout (float): Time in seconds before attempting to add a node
|
||||
back in the pool.
|
||||
|
||||
Further arguments are interpreted as for :py:class:`.Client`
|
||||
constructor.
|
||||
|
||||
The default ``hasher`` is using a pure python implementation that can
|
||||
be significantly improved performance wise by switching to a C based
|
||||
version. We recommend using ``python-clandestined`` if having a C
|
||||
dependency is acceptable.
|
||||
"""
|
||||
self.clients = {}
|
||||
self.retry_attempts = retry_attempts
|
||||
self.retry_timeout = retry_timeout
|
||||
self.dead_timeout = dead_timeout
|
||||
self.use_pooling = use_pooling
|
||||
self.key_prefix = key_prefix
|
||||
self.ignore_exc = ignore_exc
|
||||
self._failed_clients = {}
|
||||
self._dead_clients = {}
|
||||
self._last_dead_check_time = time.time()
|
||||
|
||||
self.hasher = hasher()
|
||||
|
||||
self.default_kwargs = {
|
||||
'connect_timeout': connect_timeout,
|
||||
'timeout': timeout,
|
||||
'no_delay': no_delay,
|
||||
'socket_module': socket_module,
|
||||
'key_prefix': key_prefix,
|
||||
'serializer': serializer,
|
||||
'deserializer': deserializer,
|
||||
}
|
||||
|
||||
if use_pooling is True:
|
||||
self.default_kwargs.update({
|
||||
'max_pool_size': max_pool_size,
|
||||
'lock_generator': lock_generator
|
||||
})
|
||||
|
||||
for server, port in servers:
|
||||
self.add_server(server, port)
|
||||
|
||||
def add_server(self, server, port):
|
||||
key = '%s:%s' % (server, port)
|
||||
|
||||
if self.use_pooling:
|
||||
client = PooledClient(
|
||||
(server, port),
|
||||
**self.default_kwargs
|
||||
)
|
||||
else:
|
||||
client = Client((server, port), **self.default_kwargs)
|
||||
|
||||
self.clients[key] = client
|
||||
self.hasher.add_node(key)
|
||||
|
||||
def remove_server(self, server, port):
|
||||
dead_time = time.time()
|
||||
self._failed_clients.pop((server, port))
|
||||
self._dead_clients[(server, port)] = dead_time
|
||||
key = '%s:%s' % (server, port)
|
||||
self.hasher.remove_node(key)
|
||||
|
||||
def _get_client(self, key):
|
||||
_check_key(key, self.key_prefix)
|
||||
if len(self._dead_clients) > 0:
|
||||
current_time = time.time()
|
||||
ldc = self._last_dead_check_time
|
||||
# we have dead clients and we have reached the
|
||||
# timeout retry
|
||||
if current_time - ldc > self.dead_timeout:
|
||||
for server, dead_time in self._dead_clients.items():
|
||||
if current_time - dead_time > self.dead_timeout:
|
||||
logger.debug(
|
||||
'bringing server back into rotation %s',
|
||||
server
|
||||
)
|
||||
self.add_server(*server)
|
||||
self._last_dead_check_time = current_time
|
||||
|
||||
server = self.hasher.get_node(key)
|
||||
# We've ran out of servers to try
|
||||
if server is None:
|
||||
if self.ignore_exc is True:
|
||||
return
|
||||
raise Exception('All servers seem to be down right now')
|
||||
|
||||
client = self.clients[server]
|
||||
return client
|
||||
|
||||
def _safely_run_func(self, client, func, default_val, *args, **kwargs):
|
||||
try:
|
||||
if client.server in self._failed_clients:
|
||||
# This server is currently failing, lets check if it is in
|
||||
# retry or marked as dead
|
||||
failed_metadata = self._failed_clients[client.server]
|
||||
|
||||
# we haven't tried our max amount yet, if it has been enough
|
||||
# time lets just retry using it
|
||||
if failed_metadata['attempts'] < self.retry_attempts:
|
||||
failed_time = failed_metadata['failed_time']
|
||||
if time.time() - failed_time > self.retry_timeout:
|
||||
logger.debug(
|
||||
'retrying failed server: %s', client.server
|
||||
)
|
||||
result = func(*args, **kwargs)
|
||||
# we were successful, lets remove it from the failed
|
||||
# clients
|
||||
self._failed_clients.pop(client.server)
|
||||
return result
|
||||
return default_val
|
||||
else:
|
||||
# We've reached our max retry attempts, we need to mark
|
||||
# the sever as dead
|
||||
logger.debug('marking server as dead: %s', client.server)
|
||||
self.remove_server(*client.server)
|
||||
|
||||
result = func(*args, **kwargs)
|
||||
return result
|
||||
|
||||
# Connecting to the server fail, we should enter
|
||||
# retry mode
|
||||
except socket.error:
|
||||
# This client has never failed, lets mark it for failure
|
||||
if (
|
||||
client.server not in self._failed_clients and
|
||||
self.retry_attempts > 0
|
||||
):
|
||||
self._failed_clients[client.server] = {
|
||||
'failed_time': time.time(),
|
||||
'attempts': 0,
|
||||
}
|
||||
# We aren't allowing any retries, we should mark the server as
|
||||
# dead immediately
|
||||
elif (
|
||||
client.server not in self._failed_clients and
|
||||
self.retry_attempts <= 0
|
||||
):
|
||||
self._failed_clients[client.server] = {
|
||||
'failed_time': time.time(),
|
||||
'attempts': 0,
|
||||
}
|
||||
logger.debug("marking server as dead %s", client.server)
|
||||
self.remove_server(*client.server)
|
||||
# This client has failed previously, we need to update the metadata
|
||||
# to reflect that we have attempted it again
|
||||
else:
|
||||
failed_metadata = self._failed_clients[client.server]
|
||||
failed_metadata['attempts'] += 1
|
||||
failed_metadata['failed_time'] = time.time()
|
||||
self._failed_clients[client.server] = failed_metadata
|
||||
|
||||
# if we haven't enabled ignore_exc, don't move on gracefully, just
|
||||
# raise the exception
|
||||
if not self.ignore_exc:
|
||||
raise
|
||||
|
||||
return default_val
|
||||
except:
|
||||
# any exceptions that aren't socket.error we need to handle
|
||||
# gracefully as well
|
||||
if not self.ignore_exc:
|
||||
raise
|
||||
|
||||
return default_val
|
||||
|
||||
def _run_cmd(self, cmd, key, default_val, *args, **kwargs):
|
||||
client = self._get_client(key)
|
||||
|
||||
if client is None:
|
||||
return False
|
||||
|
||||
func = getattr(client, cmd)
|
||||
args = list(args)
|
||||
args.insert(0, key)
|
||||
return self._safely_run_func(
|
||||
client, func, default_val, *args, **kwargs
|
||||
)
|
||||
|
||||
def set(self, key, *args, **kwargs):
|
||||
return self._run_cmd('set', key, False, *args, **kwargs)
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self._run_cmd('get', key, None, *args, **kwargs)
|
||||
|
||||
def incr(self, key, *args, **kwargs):
|
||||
return self._run_cmd('incr', key, False, *args, **kwargs)
|
||||
|
||||
def decr(self, key, *args, **kwargs):
|
||||
return self._run_cmd('decr', key, False, *args, **kwargs)
|
||||
|
||||
def set_many(self, values, *args, **kwargs):
|
||||
client_batches = {}
|
||||
end = []
|
||||
|
||||
for key, value in values.items():
|
||||
client = self._get_client(key)
|
||||
|
||||
if client is None:
|
||||
end.append(False)
|
||||
continue
|
||||
|
||||
if client.server not in client_batches:
|
||||
client_batches[client.server] = {}
|
||||
|
||||
client_batches[client.server][key] = value
|
||||
|
||||
for server, values in client_batches.items():
|
||||
client = self.clients['%s:%s' % server]
|
||||
new_args = list(args)
|
||||
new_args.insert(0, values)
|
||||
result = self._safely_run_func(
|
||||
client,
|
||||
client.set_many, False, *new_args, **kwargs
|
||||
)
|
||||
end.append(result)
|
||||
|
||||
return all(end)
|
||||
|
||||
set_multi = set_many
|
||||
|
||||
def get_many(self, keys, *args, **kwargs):
|
||||
client_batches = {}
|
||||
end = {}
|
||||
|
||||
for key in keys:
|
||||
client = self._get_client(key)
|
||||
|
||||
if client is None:
|
||||
end[key] = False
|
||||
continue
|
||||
|
||||
if client.server not in client_batches:
|
||||
client_batches[client.server] = []
|
||||
|
||||
client_batches[client.server].append(key)
|
||||
|
||||
for server, keys in client_batches.items():
|
||||
client = self.clients['%s:%s' % server]
|
||||
new_args = list(args)
|
||||
new_args.insert(0, keys)
|
||||
result = self._safely_run_func(
|
||||
client,
|
||||
client.get_many, {}, *new_args, **kwargs
|
||||
)
|
||||
end.update(result)
|
||||
|
||||
return end
|
||||
|
||||
get_multi = get_many
|
||||
|
||||
def gets(self, key, *args, **kwargs):
|
||||
return self._run_cmd('gets', key, None, *args, **kwargs)
|
||||
|
||||
def add(self, key, *args, **kwargs):
|
||||
return self._run_cmd('add', key, False, *args, **kwargs)
|
||||
|
||||
def prepend(self, key, *args, **kwargs):
|
||||
return self._run_cmd('prepend', key, False, *args, **kwargs)
|
||||
|
||||
def append(self, key, *args, **kwargs):
|
||||
return self._run_cmd('append', key, False, *args, **kwargs)
|
||||
|
||||
def delete(self, key, *args, **kwargs):
|
||||
return self._run_cmd('delete', key, False, *args, **kwargs)
|
||||
|
||||
def delete_many(self, keys, *args, **kwargs):
|
||||
for key in keys:
|
||||
self._run_cmd('delete', key, False, *args, **kwargs)
|
||||
return True
|
||||
|
||||
delete_multi = delete_many
|
||||
|
||||
def cas(self, key, *args, **kwargs):
|
||||
return self._run_cmd('cas', key, False, *args, **kwargs)
|
||||
|
||||
def replace(self, key, *args, **kwargs):
|
||||
return self._run_cmd('replace', key, False, *args, **kwargs)
|
||||
|
||||
def flush_all(self):
|
||||
for _, client in self.clients.items():
|
||||
self._safely_run_func(client, client.flush_all, False)
|
|
@ -0,0 +1,51 @@
|
|||
def murmur3_32(data, seed=0):
|
||||
"""MurmurHash3 was written by Austin Appleby, and is placed in the
|
||||
public domain. The author hereby disclaims copyright to this source
|
||||
code."""
|
||||
|
||||
c1 = 0xcc9e2d51
|
||||
c2 = 0x1b873593
|
||||
|
||||
length = len(data)
|
||||
h1 = seed
|
||||
roundedEnd = (length & 0xfffffffc) # round down to 4 byte block
|
||||
for i in range(0, roundedEnd, 4):
|
||||
# little endian load order
|
||||
k1 = (ord(data[i]) & 0xff) | ((ord(data[i + 1]) & 0xff) << 8) | \
|
||||
((ord(data[i + 2]) & 0xff) << 16) | (ord(data[i + 3]) << 24)
|
||||
k1 *= c1
|
||||
k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15)
|
||||
k1 *= c2
|
||||
|
||||
h1 ^= k1
|
||||
h1 = (h1 << 13) | ((h1 & 0xffffffff) >> 19) # ROTL32(h1,13)
|
||||
h1 = h1 * 5 + 0xe6546b64
|
||||
|
||||
# tail
|
||||
k1 = 0
|
||||
|
||||
val = length & 0x03
|
||||
if val == 3:
|
||||
k1 = (ord(data[roundedEnd + 2]) & 0xff) << 16
|
||||
# fallthrough
|
||||
if val in [2, 3]:
|
||||
k1 |= (ord(data[roundedEnd + 1]) & 0xff) << 8
|
||||
# fallthrough
|
||||
if val in [1, 2, 3]:
|
||||
k1 |= ord(data[roundedEnd]) & 0xff
|
||||
k1 *= c1
|
||||
k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15)
|
||||
k1 *= c2
|
||||
h1 ^= k1
|
||||
|
||||
# finalization
|
||||
h1 ^= length
|
||||
|
||||
# fmix(h1)
|
||||
h1 ^= ((h1 & 0xffffffff) >> 16)
|
||||
h1 *= 0x85ebca6b
|
||||
h1 ^= ((h1 & 0xffffffff) >> 13)
|
||||
h1 *= 0xc2b2ae35
|
||||
h1 ^= ((h1 & 0xffffffff) >> 16)
|
||||
|
||||
return h1 & 0xffffffff
|
|
@ -0,0 +1,46 @@
|
|||
from pymemcache.client.murmur3 import murmur3_32
|
||||
|
||||
|
||||
class RendezvousHash(object):
|
||||
"""
|
||||
Implements the Highest Random Weight (HRW) hashing algorithm most
|
||||
commonly referred to as rendezvous hashing.
|
||||
|
||||
Originally developed as part of python-clandestined.
|
||||
|
||||
Copyright (c) 2014 Ernest W. Durbin III
|
||||
"""
|
||||
def __init__(self, nodes=None, seed=0, hash_function=murmur3_32):
|
||||
"""
|
||||
Constructor.
|
||||
"""
|
||||
self.nodes = []
|
||||
self.seed = seed
|
||||
if nodes is not None:
|
||||
self.nodes = nodes
|
||||
self.hash_function = lambda x: hash_function(x, seed)
|
||||
|
||||
def add_node(self, node):
|
||||
if node not in self.nodes:
|
||||
self.nodes.append(node)
|
||||
|
||||
def remove_node(self, node):
|
||||
if node in self.nodes:
|
||||
self.nodes.remove(node)
|
||||
else:
|
||||
raise ValueError("No such node %s to remove" % (node))
|
||||
|
||||
def get_node(self, key):
|
||||
high_score = -1
|
||||
winner = None
|
||||
|
||||
for node in self.nodes:
|
||||
score = self.hash_function(
|
||||
"%s-%s" % (str(node), str(key)))
|
||||
|
||||
if score > high_score:
|
||||
(high_score, winner) = (score, node)
|
||||
elif score == high_score:
|
||||
(high_score, winner) = (score, max(str(node), str(winner)))
|
||||
|
||||
return winner
|
|
@ -0,0 +1,40 @@
|
|||
class MemcacheError(Exception):
|
||||
"Base exception class"
|
||||
pass
|
||||
|
||||
|
||||
class MemcacheClientError(MemcacheError):
|
||||
"""Raised when memcached fails to parse the arguments to a request, likely
|
||||
due to a malformed key and/or value, a bug in this library, or a version
|
||||
mismatch with memcached."""
|
||||
pass
|
||||
|
||||
|
||||
class MemcacheUnknownCommandError(MemcacheClientError):
|
||||
"""Raised when memcached fails to parse a request, likely due to a bug in
|
||||
this library or a version mismatch with memcached."""
|
||||
pass
|
||||
|
||||
|
||||
class MemcacheIllegalInputError(MemcacheClientError):
|
||||
"""Raised when a key or value is not legal for Memcache (see the class docs
|
||||
for Client for more details)."""
|
||||
pass
|
||||
|
||||
|
||||
class MemcacheServerError(MemcacheError):
|
||||
"""Raised when memcached reports a failure while processing a request,
|
||||
likely due to a bug or transient issue in memcached."""
|
||||
pass
|
||||
|
||||
|
||||
class MemcacheUnknownError(MemcacheError):
|
||||
"""Raised when this library receives a response from memcached that it
|
||||
cannot parse, likely due to a bug in this library or a version mismatch
|
||||
with memcached."""
|
||||
pass
|
||||
|
||||
|
||||
class MemcacheUnexpectedCloseError(MemcacheServerError):
|
||||
"Raised when the connection with memcached closes unexpectedly."
|
||||
pass
|
|
@ -0,0 +1,123 @@
|
|||
# Copyright 2012 Pinterest.com
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
A client for falling back to older memcached servers when performing reads.
|
||||
|
||||
It is sometimes necessary to deploy memcached on new servers, or with a
|
||||
different configuration. In theses cases, it is undesirable to start up an
|
||||
empty memcached server and point traffic to it, since the cache will be cold,
|
||||
and the backing store will have a large increase in traffic.
|
||||
|
||||
This class attempts to solve that problem by providing an interface identical
|
||||
to the Client interface, but which can fall back to older memcached servers
|
||||
when reads to the primary server fail. The approach for upgrading memcached
|
||||
servers or configuration then becomes:
|
||||
|
||||
1. Deploy a new host (or fleet) with memcached, possibly with a new
|
||||
configuration.
|
||||
2. From your application servers, use FallbackClient to write and read from
|
||||
the new cluster, and to read from the old cluster when there is a miss in
|
||||
the new cluster.
|
||||
3. Wait until the new cache is warm enough to support the load.
|
||||
4. Switch from FallbackClient to a regular Client library for doing all
|
||||
reads and writes to the new cluster.
|
||||
5. Take down the old cluster.
|
||||
|
||||
Best Practices:
|
||||
---------------
|
||||
- Make sure that the old client has "ignore_exc" set to True, so that it
|
||||
treats failures like cache misses. That will allow you to take down the
|
||||
old cluster before you switch away from FallbackClient.
|
||||
"""
|
||||
|
||||
|
||||
class FallbackClient(object):
|
||||
def __init__(self, caches):
|
||||
assert len(caches) > 0
|
||||
self.caches = caches
|
||||
|
||||
def close(self):
|
||||
"Close each of the memcached clients"
|
||||
for cache in self.caches:
|
||||
cache.close()
|
||||
|
||||
def set(self, key, value, expire=0, noreply=True):
|
||||
self.caches[0].set(key, value, expire, noreply)
|
||||
|
||||
def add(self, key, value, expire=0, noreply=True):
|
||||
self.caches[0].add(key, value, expire, noreply)
|
||||
|
||||
def replace(self, key, value, expire=0, noreply=True):
|
||||
self.caches[0].replace(key, value, expire, noreply)
|
||||
|
||||
def append(self, key, value, expire=0, noreply=True):
|
||||
self.caches[0].append(key, value, expire, noreply)
|
||||
|
||||
def prepend(self, key, value, expire=0, noreply=True):
|
||||
self.caches[0].prepend(key, value, expire, noreply)
|
||||
|
||||
def cas(self, key, value, cas, expire=0, noreply=True):
|
||||
self.caches[0].cas(key, value, cas, expire, noreply)
|
||||
|
||||
def get(self, key):
|
||||
for cache in self.caches:
|
||||
result = cache.get(key)
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
|
||||
def get_many(self, keys):
|
||||
for cache in self.caches:
|
||||
result = cache.get_many(keys)
|
||||
if result:
|
||||
return result
|
||||
return []
|
||||
|
||||
def gets(self, key):
|
||||
for cache in self.caches:
|
||||
result = cache.gets(key)
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
|
||||
def gets_many(self, keys):
|
||||
for cache in self.caches:
|
||||
result = cache.gets_many(keys)
|
||||
if result:
|
||||
return result
|
||||
return []
|
||||
|
||||
def delete(self, key, noreply=True):
|
||||
self.caches[0].delete(key, noreply)
|
||||
|
||||
def incr(self, key, value, noreply=True):
|
||||
self.caches[0].incr(key, value, noreply)
|
||||
|
||||
def decr(self, key, value, noreply=True):
|
||||
self.caches[0].decr(key, value, noreply)
|
||||
|
||||
def touch(self, key, expire=0, noreply=True):
|
||||
self.caches[0].touch(key, expire, noreply)
|
||||
|
||||
def stats(self):
|
||||
# TODO: ??
|
||||
pass
|
||||
|
||||
def flush_all(self, delay=0, noreply=True):
|
||||
self.caches[0].flush_all(delay, noreply)
|
||||
|
||||
def quit(self):
|
||||
# TODO: ??
|
||||
pass
|
|
@ -0,0 +1,114 @@
|
|||
# Copyright 2015 Yahoo.com
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class ObjectPool(object):
|
||||
"""A pool of objects that release/creates/destroys as needed."""
|
||||
|
||||
def __init__(self, obj_creator,
|
||||
after_remove=None, max_size=None,
|
||||
lock_generator=None):
|
||||
self._used_objs = collections.deque()
|
||||
self._free_objs = collections.deque()
|
||||
self._obj_creator = obj_creator
|
||||
if lock_generator is None:
|
||||
self._lock = threading.Lock()
|
||||
else:
|
||||
self._lock = lock_generator()
|
||||
self._after_remove = after_remove
|
||||
max_size = max_size or 2 ** 31
|
||||
if not isinstance(max_size, six.integer_types) or max_size < 0:
|
||||
raise ValueError('"max_size" must be a positive integer')
|
||||
self.max_size = max_size
|
||||
|
||||
@property
|
||||
def used(self):
|
||||
return tuple(self._used_objs)
|
||||
|
||||
@property
|
||||
def free(self):
|
||||
return tuple(self._free_objs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def get_and_release(self, destroy_on_fail=False):
|
||||
obj = self.get()
|
||||
try:
|
||||
yield obj
|
||||
except Exception:
|
||||
exc_info = sys.exc_info()
|
||||
if not destroy_on_fail:
|
||||
self.release(obj)
|
||||
else:
|
||||
self.destroy(obj)
|
||||
six.reraise(exc_info[0], exc_info[1], exc_info[2])
|
||||
self.release(obj)
|
||||
|
||||
def get(self):
|
||||
with self._lock:
|
||||
if not self._free_objs:
|
||||
curr_count = len(self._used_objs)
|
||||
if curr_count >= self.max_size:
|
||||
raise RuntimeError("Too many objects,"
|
||||
" %s >= %s" % (curr_count,
|
||||
self.max_size))
|
||||
obj = self._obj_creator()
|
||||
self._used_objs.append(obj)
|
||||
return obj
|
||||
else:
|
||||
obj = self._free_objs.pop()
|
||||
self._used_objs.append(obj)
|
||||
return obj
|
||||
|
||||
def destroy(self, obj, silent=True):
|
||||
was_dropped = False
|
||||
with self._lock:
|
||||
try:
|
||||
self._used_objs.remove(obj)
|
||||
was_dropped = True
|
||||
except ValueError:
|
||||
if not silent:
|
||||
raise
|
||||
if was_dropped and self._after_remove is not None:
|
||||
self._after_remove(obj)
|
||||
|
||||
def release(self, obj, silent=True):
|
||||
with self._lock:
|
||||
try:
|
||||
self._used_objs.remove(obj)
|
||||
self._free_objs.append(obj)
|
||||
except ValueError:
|
||||
if not silent:
|
||||
raise
|
||||
|
||||
def clear(self):
|
||||
if self._after_remove is not None:
|
||||
needs_destroy = []
|
||||
with self._lock:
|
||||
needs_destroy.extend(self._used_objs)
|
||||
needs_destroy.extend(self._free_objs)
|
||||
self._free_objs.clear()
|
||||
self._used_objs.clear()
|
||||
for obj in needs_destroy:
|
||||
self._after_remove(obj)
|
||||
else:
|
||||
with self._lock:
|
||||
self._free_objs.clear()
|
||||
self._used_objs.clear()
|
|
@ -0,0 +1,69 @@
|
|||
# Copyright 2012 Pinterest.com
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
FLAG_PICKLE = 1 << 0
|
||||
FLAG_INTEGER = 1 << 1
|
||||
FLAG_LONG = 1 << 2
|
||||
|
||||
|
||||
def python_memcache_serializer(key, value):
|
||||
flags = 0
|
||||
|
||||
if isinstance(value, str):
|
||||
pass
|
||||
elif isinstance(value, int):
|
||||
flags |= FLAG_INTEGER
|
||||
value = "%d" % value
|
||||
elif isinstance(value, long):
|
||||
flags |= FLAG_LONG
|
||||
value = "%d" % value
|
||||
else:
|
||||
flags |= FLAG_PICKLE
|
||||
output = StringIO()
|
||||
pickler = pickle.Pickler(output, 0)
|
||||
pickler.dump(value)
|
||||
value = output.getvalue()
|
||||
|
||||
return value, flags
|
||||
|
||||
|
||||
def python_memcache_deserializer(key, value, flags):
|
||||
if flags == 0:
|
||||
return value
|
||||
|
||||
if flags & FLAG_INTEGER:
|
||||
return int(value)
|
||||
|
||||
if flags & FLAG_LONG:
|
||||
return long(value)
|
||||
|
||||
if flags & FLAG_PICKLE:
|
||||
try:
|
||||
buf = StringIO(value)
|
||||
unpickler = pickle.Unpickler(buf)
|
||||
return unpickler.load()
|
||||
except Exception:
|
||||
logging.info('Pickle error', exc_info=True)
|
||||
return None
|
||||
|
||||
return value
|
|
@ -0,0 +1,133 @@
|
|||
'''
|
||||
PyMySQL: A pure-Python MySQL client library.
|
||||
|
||||
Copyright (c) 2010, 2013 PyMySQL contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
'''
|
||||
|
||||
VERSION = (0, 6, 7, None)
|
||||
|
||||
from ._compat import text_type, JYTHON, IRONPYTHON
|
||||
from .constants import FIELD_TYPE
|
||||
from .converters import escape_dict, escape_sequence, escape_string
|
||||
from .err import Warning, Error, InterfaceError, DataError, \
|
||||
DatabaseError, OperationalError, IntegrityError, InternalError, \
|
||||
NotSupportedError, ProgrammingError, MySQLError
|
||||
from .times import Date, Time, Timestamp, \
|
||||
DateFromTicks, TimeFromTicks, TimestampFromTicks
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
threadsafety = 1
|
||||
apilevel = "2.0"
|
||||
paramstyle = "format"
|
||||
|
||||
class DBAPISet(frozenset):
|
||||
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, set):
|
||||
return frozenset.__ne__(self, other)
|
||||
else:
|
||||
return other not in self
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, frozenset):
|
||||
return frozenset.__eq__(self, other)
|
||||
else:
|
||||
return other in self
|
||||
|
||||
def __hash__(self):
|
||||
return frozenset.__hash__(self)
|
||||
|
||||
|
||||
STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING,
|
||||
FIELD_TYPE.VAR_STRING])
|
||||
BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB,
|
||||
FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB])
|
||||
NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT,
|
||||
FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG,
|
||||
FIELD_TYPE.TINY, FIELD_TYPE.YEAR])
|
||||
DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
|
||||
TIME = DBAPISet([FIELD_TYPE.TIME])
|
||||
TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
|
||||
DATETIME = TIMESTAMP
|
||||
ROWID = DBAPISet()
|
||||
|
||||
def Binary(x):
|
||||
"""Return x as a binary type."""
|
||||
if isinstance(x, text_type) and not (JYTHON or IRONPYTHON):
|
||||
return x.encode()
|
||||
return bytes(x)
|
||||
|
||||
def Connect(*args, **kwargs):
|
||||
"""
|
||||
Connect to the database; see connections.Connection.__init__() for
|
||||
more information.
|
||||
"""
|
||||
from .connections import Connection
|
||||
return Connection(*args, **kwargs)
|
||||
|
||||
from pymysql import connections as _orig_conn
|
||||
if _orig_conn.Connection.__init__.__doc__ is not None:
|
||||
Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ + ("""
|
||||
See connections.Connection.__init__() for information about defaults.
|
||||
""")
|
||||
del _orig_conn
|
||||
|
||||
def get_client_info(): # for MySQLdb compatibility
|
||||
return '.'.join(map(str, VERSION))
|
||||
|
||||
connect = Connection = Connect
|
||||
|
||||
# we include a doctored version_info here for MySQLdb compatibility
|
||||
version_info = (1,2,2,"final",0)
|
||||
|
||||
NULL = "NULL"
|
||||
|
||||
__version__ = get_client_info()
|
||||
|
||||
def thread_safe():
|
||||
return True # match MySQLdb.thread_safe()
|
||||
|
||||
def install_as_MySQLdb():
|
||||
"""
|
||||
After this function is called, any application that imports MySQLdb or
|
||||
_mysql will unwittingly actually use
|
||||
"""
|
||||
sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"]
|
||||
|
||||
__all__ = [
|
||||
'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date',
|
||||
'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks',
|
||||
'DataError', 'DatabaseError', 'Error', 'FIELD_TYPE', 'IntegrityError',
|
||||
'InterfaceError', 'InternalError', 'MySQLError', 'NULL', 'NUMBER',
|
||||
'NotSupportedError', 'DBAPISet', 'OperationalError', 'ProgrammingError',
|
||||
'ROWID', 'STRING', 'TIME', 'TIMESTAMP', 'Warning', 'apilevel', 'connect',
|
||||
'connections', 'constants', 'converters', 'cursors',
|
||||
'escape_dict', 'escape_sequence', 'escape_string', 'get_client_info',
|
||||
'paramstyle', 'threadsafety', 'version_info',
|
||||
|
||||
"install_as_MySQLdb",
|
||||
|
||||
"NULL","__version__",
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
import sys
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PYPY = hasattr(sys, 'pypy_translation_info')
|
||||
JYTHON = sys.platform.startswith('java')
|
||||
IRONPYTHON = sys.platform == 'cli'
|
||||
CPYTHON = not PYPY and not JYTHON and not IRONPYTHON
|
||||
|
||||
if PY2:
|
||||
range_type = xrange
|
||||
text_type = unicode
|
||||
long_type = long
|
||||
str_type = basestring
|
||||
else:
|
||||
range_type = range
|
||||
text_type = str
|
||||
long_type = int
|
||||
str_type = str
|
|
@ -0,0 +1,134 @@
|
|||
"""
|
||||
SocketIO imported from socket module in Python 3.
|
||||
|
||||
Copyright (c) 2001-2013 Python Software Foundation; All Rights Reserved.
|
||||
"""
|
||||
|
||||
from socket import *
|
||||
import io
|
||||
import errno
|
||||
|
||||
__all__ = ['SocketIO']
|
||||
|
||||
EINTR = errno.EINTR
|
||||
_blocking_errnos = (errno.EAGAIN, errno.EWOULDBLOCK)
|
||||
|
||||
class SocketIO(io.RawIOBase):
|
||||
|
||||
"""Raw I/O implementation for stream sockets.
|
||||
|
||||
This class supports the makefile() method on sockets. It provides
|
||||
the raw I/O interface on top of a socket object.
|
||||
"""
|
||||
|
||||
# One might wonder why not let FileIO do the job instead. There are two
|
||||
# main reasons why FileIO is not adapted:
|
||||
# - it wouldn't work under Windows (where you can't used read() and
|
||||
# write() on a socket handle)
|
||||
# - it wouldn't work with socket timeouts (FileIO would ignore the
|
||||
# timeout and consider the socket non-blocking)
|
||||
|
||||
# XXX More docs
|
||||
|
||||
def __init__(self, sock, mode):
|
||||
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
|
||||
raise ValueError("invalid mode: %r" % mode)
|
||||
io.RawIOBase.__init__(self)
|
||||
self._sock = sock
|
||||
if "b" not in mode:
|
||||
mode += "b"
|
||||
self._mode = mode
|
||||
self._reading = "r" in mode
|
||||
self._writing = "w" in mode
|
||||
self._timeout_occurred = False
|
||||
|
||||
def readinto(self, b):
|
||||
"""Read up to len(b) bytes into the writable buffer *b* and return
|
||||
the number of bytes read. If the socket is non-blocking and no bytes
|
||||
are available, None is returned.
|
||||
|
||||
If *b* is non-empty, a 0 return value indicates that the connection
|
||||
was shutdown at the other end.
|
||||
"""
|
||||
self._checkClosed()
|
||||
self._checkReadable()
|
||||
if self._timeout_occurred:
|
||||
raise IOError("cannot read from timed out object")
|
||||
while True:
|
||||
try:
|
||||
return self._sock.recv_into(b)
|
||||
except timeout:
|
||||
self._timeout_occurred = True
|
||||
raise
|
||||
except error as e:
|
||||
n = e.args[0]
|
||||
if n == EINTR:
|
||||
continue
|
||||
if n in _blocking_errnos:
|
||||
return None
|
||||
raise
|
||||
|
||||
def write(self, b):
|
||||
"""Write the given bytes or bytearray object *b* to the socket
|
||||
and return the number of bytes written. This can be less than
|
||||
len(b) if not all data could be written. If the socket is
|
||||
non-blocking and no bytes could be written None is returned.
|
||||
"""
|
||||
self._checkClosed()
|
||||
self._checkWritable()
|
||||
try:
|
||||
return self._sock.send(b)
|
||||
except error as e:
|
||||
# XXX what about EINTR?
|
||||
if e.args[0] in _blocking_errnos:
|
||||
return None
|
||||
raise
|
||||
|
||||
def readable(self):
|
||||
"""True if the SocketIO is open for reading.
|
||||
"""
|
||||
if self.closed:
|
||||
raise ValueError("I/O operation on closed socket.")
|
||||
return self._reading
|
||||
|
||||
def writable(self):
|
||||
"""True if the SocketIO is open for writing.
|
||||
"""
|
||||
if self.closed:
|
||||
raise ValueError("I/O operation on closed socket.")
|
||||
return self._writing
|
||||
|
||||
def seekable(self):
|
||||
"""True if the SocketIO is open for seeking.
|
||||
"""
|
||||
if self.closed:
|
||||
raise ValueError("I/O operation on closed socket.")
|
||||
return super().seekable()
|
||||
|
||||
def fileno(self):
|
||||
"""Return the file descriptor of the underlying socket.
|
||||
"""
|
||||
self._checkClosed()
|
||||
return self._sock.fileno()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if not self.closed:
|
||||
return self.fileno()
|
||||
else:
|
||||
return -1
|
||||
|
||||
@property
|
||||
def mode(self):
|
||||
return self._mode
|
||||
|
||||
def close(self):
|
||||
"""Close the SocketIO object. This doesn't close the underlying
|
||||
socket, except if all references to it have disappeared.
|
||||
"""
|
||||
if self.closed:
|
||||
return
|
||||
io.RawIOBase.close(self)
|
||||
self._sock._decref_socketios()
|
||||
self._sock = None
|
||||
|
|
@ -0,0 +1,262 @@
|
|||
MBLENGTH = {
|
||||
8:1,
|
||||
33:3,
|
||||
88:2,
|
||||
91:2
|
||||
}
|
||||
|
||||
|
||||
class Charset(object):
|
||||
def __init__(self, id, name, collation, is_default):
|
||||
self.id, self.name, self.collation = id, name, collation
|
||||
self.is_default = is_default == 'Yes'
|
||||
|
||||
@property
|
||||
def encoding(self):
|
||||
name = self.name
|
||||
if name == 'utf8mb4':
|
||||
return 'utf8'
|
||||
return name
|
||||
|
||||
@property
|
||||
def is_binary(self):
|
||||
return self.id == 63
|
||||
|
||||
|
||||
class Charsets:
|
||||
def __init__(self):
|
||||
self._by_id = {}
|
||||
|
||||
def add(self, c):
|
||||
self._by_id[c.id] = c
|
||||
|
||||
def by_id(self, id):
|
||||
return self._by_id[id]
|
||||
|
||||
def by_name(self, name):
|
||||
name = name.lower()
|
||||
for c in self._by_id.values():
|
||||
if c.name == name and c.is_default:
|
||||
return c
|
||||
|
||||
_charsets = Charsets()
|
||||
"""
|
||||
Generated with:
|
||||
|
||||
mysql -N -s -e "select id, character_set_name, collation_name, is_default
|
||||
from information_schema.collations order by id;" | python -c "import sys
|
||||
for l in sys.stdin.readlines():
|
||||
id, name, collation, is_default = l.split(chr(9))
|
||||
print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \
|
||||
% (id, name, collation, is_default.strip())
|
||||
"
|
||||
|
||||
"""
|
||||
_charsets.add(Charset(1, 'big5', 'big5_chinese_ci', 'Yes'))
|
||||
_charsets.add(Charset(2, 'latin2', 'latin2_czech_cs', ''))
|
||||
_charsets.add(Charset(3, 'dec8', 'dec8_swedish_ci', 'Yes'))
|
||||
_charsets.add(Charset(4, 'cp850', 'cp850_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(5, 'latin1', 'latin1_german1_ci', ''))
|
||||
_charsets.add(Charset(6, 'hp8', 'hp8_english_ci', 'Yes'))
|
||||
_charsets.add(Charset(7, 'koi8r', 'koi8r_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(8, 'latin1', 'latin1_swedish_ci', 'Yes'))
|
||||
_charsets.add(Charset(9, 'latin2', 'latin2_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(10, 'swe7', 'swe7_swedish_ci', 'Yes'))
|
||||
_charsets.add(Charset(11, 'ascii', 'ascii_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(12, 'ujis', 'ujis_japanese_ci', 'Yes'))
|
||||
_charsets.add(Charset(13, 'sjis', 'sjis_japanese_ci', 'Yes'))
|
||||
_charsets.add(Charset(14, 'cp1251', 'cp1251_bulgarian_ci', ''))
|
||||
_charsets.add(Charset(15, 'latin1', 'latin1_danish_ci', ''))
|
||||
_charsets.add(Charset(16, 'hebrew', 'hebrew_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(18, 'tis620', 'tis620_thai_ci', 'Yes'))
|
||||
_charsets.add(Charset(19, 'euckr', 'euckr_korean_ci', 'Yes'))
|
||||
_charsets.add(Charset(20, 'latin7', 'latin7_estonian_cs', ''))
|
||||
_charsets.add(Charset(21, 'latin2', 'latin2_hungarian_ci', ''))
|
||||
_charsets.add(Charset(22, 'koi8u', 'koi8u_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(23, 'cp1251', 'cp1251_ukrainian_ci', ''))
|
||||
_charsets.add(Charset(24, 'gb2312', 'gb2312_chinese_ci', 'Yes'))
|
||||
_charsets.add(Charset(25, 'greek', 'greek_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(26, 'cp1250', 'cp1250_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(27, 'latin2', 'latin2_croatian_ci', ''))
|
||||
_charsets.add(Charset(28, 'gbk', 'gbk_chinese_ci', 'Yes'))
|
||||
_charsets.add(Charset(29, 'cp1257', 'cp1257_lithuanian_ci', ''))
|
||||
_charsets.add(Charset(30, 'latin5', 'latin5_turkish_ci', 'Yes'))
|
||||
_charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', ''))
|
||||
_charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', ''))
|
||||
_charsets.add(Charset(35, 'ucs2', 'ucs2_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(39, 'macroman', 'macroman_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(40, 'cp852', 'cp852_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(41, 'latin7', 'latin7_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(42, 'latin7', 'latin7_general_cs', ''))
|
||||
_charsets.add(Charset(43, 'macce', 'macce_bin', ''))
|
||||
_charsets.add(Charset(44, 'cp1250', 'cp1250_croatian_ci', ''))
|
||||
_charsets.add(Charset(45, 'utf8mb4', 'utf8mb4_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(46, 'utf8mb4', 'utf8mb4_bin', ''))
|
||||
_charsets.add(Charset(47, 'latin1', 'latin1_bin', ''))
|
||||
_charsets.add(Charset(48, 'latin1', 'latin1_general_ci', ''))
|
||||
_charsets.add(Charset(49, 'latin1', 'latin1_general_cs', ''))
|
||||
_charsets.add(Charset(50, 'cp1251', 'cp1251_bin', ''))
|
||||
_charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', ''))
|
||||
_charsets.add(Charset(53, 'macroman', 'macroman_bin', ''))
|
||||
_charsets.add(Charset(54, 'utf16', 'utf16_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(55, 'utf16', 'utf16_bin', ''))
|
||||
_charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(58, 'cp1257', 'cp1257_bin', ''))
|
||||
_charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(60, 'utf32', 'utf32_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(61, 'utf32', 'utf32_bin', ''))
|
||||
_charsets.add(Charset(63, 'binary', 'binary', 'Yes'))
|
||||
_charsets.add(Charset(64, 'armscii8', 'armscii8_bin', ''))
|
||||
_charsets.add(Charset(65, 'ascii', 'ascii_bin', ''))
|
||||
_charsets.add(Charset(66, 'cp1250', 'cp1250_bin', ''))
|
||||
_charsets.add(Charset(67, 'cp1256', 'cp1256_bin', ''))
|
||||
_charsets.add(Charset(68, 'cp866', 'cp866_bin', ''))
|
||||
_charsets.add(Charset(69, 'dec8', 'dec8_bin', ''))
|
||||
_charsets.add(Charset(70, 'greek', 'greek_bin', ''))
|
||||
_charsets.add(Charset(71, 'hebrew', 'hebrew_bin', ''))
|
||||
_charsets.add(Charset(72, 'hp8', 'hp8_bin', ''))
|
||||
_charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', ''))
|
||||
_charsets.add(Charset(74, 'koi8r', 'koi8r_bin', ''))
|
||||
_charsets.add(Charset(75, 'koi8u', 'koi8u_bin', ''))
|
||||
_charsets.add(Charset(77, 'latin2', 'latin2_bin', ''))
|
||||
_charsets.add(Charset(78, 'latin5', 'latin5_bin', ''))
|
||||
_charsets.add(Charset(79, 'latin7', 'latin7_bin', ''))
|
||||
_charsets.add(Charset(80, 'cp850', 'cp850_bin', ''))
|
||||
_charsets.add(Charset(81, 'cp852', 'cp852_bin', ''))
|
||||
_charsets.add(Charset(82, 'swe7', 'swe7_bin', ''))
|
||||
_charsets.add(Charset(83, 'utf8', 'utf8_bin', ''))
|
||||
_charsets.add(Charset(84, 'big5', 'big5_bin', ''))
|
||||
_charsets.add(Charset(85, 'euckr', 'euckr_bin', ''))
|
||||
_charsets.add(Charset(86, 'gb2312', 'gb2312_bin', ''))
|
||||
_charsets.add(Charset(87, 'gbk', 'gbk_bin', ''))
|
||||
_charsets.add(Charset(88, 'sjis', 'sjis_bin', ''))
|
||||
_charsets.add(Charset(89, 'tis620', 'tis620_bin', ''))
|
||||
_charsets.add(Charset(90, 'ucs2', 'ucs2_bin', ''))
|
||||
_charsets.add(Charset(91, 'ujis', 'ujis_bin', ''))
|
||||
_charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes'))
|
||||
_charsets.add(Charset(93, 'geostd8', 'geostd8_bin', ''))
|
||||
_charsets.add(Charset(94, 'latin1', 'latin1_spanish_ci', ''))
|
||||
_charsets.add(Charset(95, 'cp932', 'cp932_japanese_ci', 'Yes'))
|
||||
_charsets.add(Charset(96, 'cp932', 'cp932_bin', ''))
|
||||
_charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes'))
|
||||
_charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', ''))
|
||||
_charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', ''))
|
||||
_charsets.add(Charset(101, 'utf16', 'utf16_unicode_ci', ''))
|
||||
_charsets.add(Charset(102, 'utf16', 'utf16_icelandic_ci', ''))
|
||||
_charsets.add(Charset(103, 'utf16', 'utf16_latvian_ci', ''))
|
||||
_charsets.add(Charset(104, 'utf16', 'utf16_romanian_ci', ''))
|
||||
_charsets.add(Charset(105, 'utf16', 'utf16_slovenian_ci', ''))
|
||||
_charsets.add(Charset(106, 'utf16', 'utf16_polish_ci', ''))
|
||||
_charsets.add(Charset(107, 'utf16', 'utf16_estonian_ci', ''))
|
||||
_charsets.add(Charset(108, 'utf16', 'utf16_spanish_ci', ''))
|
||||
_charsets.add(Charset(109, 'utf16', 'utf16_swedish_ci', ''))
|
||||
_charsets.add(Charset(110, 'utf16', 'utf16_turkish_ci', ''))
|
||||
_charsets.add(Charset(111, 'utf16', 'utf16_czech_ci', ''))
|
||||
_charsets.add(Charset(112, 'utf16', 'utf16_danish_ci', ''))
|
||||
_charsets.add(Charset(113, 'utf16', 'utf16_lithuanian_ci', ''))
|
||||
_charsets.add(Charset(114, 'utf16', 'utf16_slovak_ci', ''))
|
||||
_charsets.add(Charset(115, 'utf16', 'utf16_spanish2_ci', ''))
|
||||
_charsets.add(Charset(116, 'utf16', 'utf16_roman_ci', ''))
|
||||
_charsets.add(Charset(117, 'utf16', 'utf16_persian_ci', ''))
|
||||
_charsets.add(Charset(118, 'utf16', 'utf16_esperanto_ci', ''))
|
||||
_charsets.add(Charset(119, 'utf16', 'utf16_hungarian_ci', ''))
|
||||
_charsets.add(Charset(120, 'utf16', 'utf16_sinhala_ci', ''))
|
||||
_charsets.add(Charset(128, 'ucs2', 'ucs2_unicode_ci', ''))
|
||||
_charsets.add(Charset(129, 'ucs2', 'ucs2_icelandic_ci', ''))
|
||||
_charsets.add(Charset(130, 'ucs2', 'ucs2_latvian_ci', ''))
|
||||
_charsets.add(Charset(131, 'ucs2', 'ucs2_romanian_ci', ''))
|
||||
_charsets.add(Charset(132, 'ucs2', 'ucs2_slovenian_ci', ''))
|
||||
_charsets.add(Charset(133, 'ucs2', 'ucs2_polish_ci', ''))
|
||||
_charsets.add(Charset(134, 'ucs2', 'ucs2_estonian_ci', ''))
|
||||
_charsets.add(Charset(135, 'ucs2', 'ucs2_spanish_ci', ''))
|
||||
_charsets.add(Charset(136, 'ucs2', 'ucs2_swedish_ci', ''))
|
||||
_charsets.add(Charset(137, 'ucs2', 'ucs2_turkish_ci', ''))
|
||||
_charsets.add(Charset(138, 'ucs2', 'ucs2_czech_ci', ''))
|
||||
_charsets.add(Charset(139, 'ucs2', 'ucs2_danish_ci', ''))
|
||||
_charsets.add(Charset(140, 'ucs2', 'ucs2_lithuanian_ci', ''))
|
||||
_charsets.add(Charset(141, 'ucs2', 'ucs2_slovak_ci', ''))
|
||||
_charsets.add(Charset(142, 'ucs2', 'ucs2_spanish2_ci', ''))
|
||||
_charsets.add(Charset(143, 'ucs2', 'ucs2_roman_ci', ''))
|
||||
_charsets.add(Charset(144, 'ucs2', 'ucs2_persian_ci', ''))
|
||||
_charsets.add(Charset(145, 'ucs2', 'ucs2_esperanto_ci', ''))
|
||||
_charsets.add(Charset(146, 'ucs2', 'ucs2_hungarian_ci', ''))
|
||||
_charsets.add(Charset(147, 'ucs2', 'ucs2_sinhala_ci', ''))
|
||||
_charsets.add(Charset(159, 'ucs2', 'ucs2_general_mysql500_ci', ''))
|
||||
_charsets.add(Charset(160, 'utf32', 'utf32_unicode_ci', ''))
|
||||
_charsets.add(Charset(161, 'utf32', 'utf32_icelandic_ci', ''))
|
||||
_charsets.add(Charset(162, 'utf32', 'utf32_latvian_ci', ''))
|
||||
_charsets.add(Charset(163, 'utf32', 'utf32_romanian_ci', ''))
|
||||
_charsets.add(Charset(164, 'utf32', 'utf32_slovenian_ci', ''))
|
||||
_charsets.add(Charset(165, 'utf32', 'utf32_polish_ci', ''))
|
||||
_charsets.add(Charset(166, 'utf32', 'utf32_estonian_ci', ''))
|
||||
_charsets.add(Charset(167, 'utf32', 'utf32_spanish_ci', ''))
|
||||
_charsets.add(Charset(168, 'utf32', 'utf32_swedish_ci', ''))
|
||||
_charsets.add(Charset(169, 'utf32', 'utf32_turkish_ci', ''))
|
||||
_charsets.add(Charset(170, 'utf32', 'utf32_czech_ci', ''))
|
||||
_charsets.add(Charset(171, 'utf32', 'utf32_danish_ci', ''))
|
||||
_charsets.add(Charset(172, 'utf32', 'utf32_lithuanian_ci', ''))
|
||||
_charsets.add(Charset(173, 'utf32', 'utf32_slovak_ci', ''))
|
||||
_charsets.add(Charset(174, 'utf32', 'utf32_spanish2_ci', ''))
|
||||
_charsets.add(Charset(175, 'utf32', 'utf32_roman_ci', ''))
|
||||
_charsets.add(Charset(176, 'utf32', 'utf32_persian_ci', ''))
|
||||
_charsets.add(Charset(177, 'utf32', 'utf32_esperanto_ci', ''))
|
||||
_charsets.add(Charset(178, 'utf32', 'utf32_hungarian_ci', ''))
|
||||
_charsets.add(Charset(179, 'utf32', 'utf32_sinhala_ci', ''))
|
||||
_charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', ''))
|
||||
_charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', ''))
|
||||
_charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', ''))
|
||||
_charsets.add(Charset(195, 'utf8', 'utf8_romanian_ci', ''))
|
||||
_charsets.add(Charset(196, 'utf8', 'utf8_slovenian_ci', ''))
|
||||
_charsets.add(Charset(197, 'utf8', 'utf8_polish_ci', ''))
|
||||
_charsets.add(Charset(198, 'utf8', 'utf8_estonian_ci', ''))
|
||||
_charsets.add(Charset(199, 'utf8', 'utf8_spanish_ci', ''))
|
||||
_charsets.add(Charset(200, 'utf8', 'utf8_swedish_ci', ''))
|
||||
_charsets.add(Charset(201, 'utf8', 'utf8_turkish_ci', ''))
|
||||
_charsets.add(Charset(202, 'utf8', 'utf8_czech_ci', ''))
|
||||
_charsets.add(Charset(203, 'utf8', 'utf8_danish_ci', ''))
|
||||
_charsets.add(Charset(204, 'utf8', 'utf8_lithuanian_ci', ''))
|
||||
_charsets.add(Charset(205, 'utf8', 'utf8_slovak_ci', ''))
|
||||
_charsets.add(Charset(206, 'utf8', 'utf8_spanish2_ci', ''))
|
||||
_charsets.add(Charset(207, 'utf8', 'utf8_roman_ci', ''))
|
||||
_charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', ''))
|
||||
_charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', ''))
|
||||
_charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', ''))
|
||||
_charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', ''))
|
||||
_charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', ''))
|
||||
_charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', ''))
|
||||
_charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', ''))
|
||||
_charsets.add(Charset(226, 'utf8mb4', 'utf8mb4_latvian_ci', ''))
|
||||
_charsets.add(Charset(227, 'utf8mb4', 'utf8mb4_romanian_ci', ''))
|
||||
_charsets.add(Charset(228, 'utf8mb4', 'utf8mb4_slovenian_ci', ''))
|
||||
_charsets.add(Charset(229, 'utf8mb4', 'utf8mb4_polish_ci', ''))
|
||||
_charsets.add(Charset(230, 'utf8mb4', 'utf8mb4_estonian_ci', ''))
|
||||
_charsets.add(Charset(231, 'utf8mb4', 'utf8mb4_spanish_ci', ''))
|
||||
_charsets.add(Charset(232, 'utf8mb4', 'utf8mb4_swedish_ci', ''))
|
||||
_charsets.add(Charset(233, 'utf8mb4', 'utf8mb4_turkish_ci', ''))
|
||||
_charsets.add(Charset(234, 'utf8mb4', 'utf8mb4_czech_ci', ''))
|
||||
_charsets.add(Charset(235, 'utf8mb4', 'utf8mb4_danish_ci', ''))
|
||||
_charsets.add(Charset(236, 'utf8mb4', 'utf8mb4_lithuanian_ci', ''))
|
||||
_charsets.add(Charset(237, 'utf8mb4', 'utf8mb4_slovak_ci', ''))
|
||||
_charsets.add(Charset(238, 'utf8mb4', 'utf8mb4_spanish2_ci', ''))
|
||||
_charsets.add(Charset(239, 'utf8mb4', 'utf8mb4_roman_ci', ''))
|
||||
_charsets.add(Charset(240, 'utf8mb4', 'utf8mb4_persian_ci', ''))
|
||||
_charsets.add(Charset(241, 'utf8mb4', 'utf8mb4_esperanto_ci', ''))
|
||||
_charsets.add(Charset(242, 'utf8mb4', 'utf8mb4_hungarian_ci', ''))
|
||||
_charsets.add(Charset(243, 'utf8mb4', 'utf8mb4_sinhala_ci', ''))
|
||||
|
||||
|
||||
charset_by_name = _charsets.by_name
|
||||
charset_by_id = _charsets.by_id
|
||||
|
||||
|
||||
def charset_to_encoding(name):
|
||||
"""Convert MySQL's charset name to Python's codec name"""
|
||||
if name == 'utf8mb4':
|
||||
return 'utf8'
|
||||
return name
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,19 @@
|
|||
LONG_PASSWORD = 1
|
||||
FOUND_ROWS = 1 << 1
|
||||
LONG_FLAG = 1 << 2
|
||||
CONNECT_WITH_DB = 1 << 3
|
||||
NO_SCHEMA = 1 << 4
|
||||
COMPRESS = 1 << 5
|
||||
ODBC = 1 << 6
|
||||
LOCAL_FILES = 1 << 7
|
||||
IGNORE_SPACE = 1 << 8
|
||||
PROTOCOL_41 = 1 << 9
|
||||
INTERACTIVE = 1 << 10
|
||||
SSL = 1 << 11
|
||||
IGNORE_SIGPIPE = 1 << 12
|
||||
TRANSACTIONS = 1 << 13
|
||||
SECURE_CONNECTION = 1 << 15
|
||||
MULTI_STATEMENTS = 1 << 16
|
||||
MULTI_RESULTS = 1 << 17
|
||||
CAPABILITIES = (LONG_PASSWORD | LONG_FLAG | TRANSACTIONS |
|
||||
PROTOCOL_41 | SECURE_CONNECTION)
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
COM_SLEEP = 0x00
|
||||
COM_QUIT = 0x01
|
||||
COM_INIT_DB = 0x02
|
||||
COM_QUERY = 0x03
|
||||
COM_FIELD_LIST = 0x04
|
||||
COM_CREATE_DB = 0x05
|
||||
COM_DROP_DB = 0x06
|
||||
COM_REFRESH = 0x07
|
||||
COM_SHUTDOWN = 0x08
|
||||
COM_STATISTICS = 0x09
|
||||
COM_PROCESS_INFO = 0x0a
|
||||
COM_CONNECT = 0x0b
|
||||
COM_PROCESS_KILL = 0x0c
|
||||
COM_DEBUG = 0x0d
|
||||
COM_PING = 0x0e
|
||||
COM_TIME = 0x0f
|
||||
COM_DELAYED_INSERT = 0x10
|
||||
COM_CHANGE_USER = 0x11
|
||||
COM_BINLOG_DUMP = 0x12
|
||||
COM_TABLE_DUMP = 0x13
|
||||
COM_CONNECT_OUT = 0x14
|
||||
COM_REGISTER_SLAVE = 0x15
|
||||
COM_STMT_PREPARE = 0x16
|
||||
COM_STMT_EXECUTE = 0x17
|
||||
COM_STMT_SEND_LONG_DATA = 0x18
|
||||
COM_STMT_CLOSE = 0x19
|
||||
COM_STMT_RESET = 0x1a
|
||||
COM_SET_OPTION = 0x1b
|
||||
COM_STMT_FETCH = 0x1c
|
||||
COM_DAEMON = 0x1d
|
||||
COM_BINLOG_DUMP_GTID = 0x1e
|
||||
COM_END = 0x1f
|
|
@ -0,0 +1,67 @@
|
|||
# errmsg.h
|
||||
CR_ERROR_FIRST = 2000
|
||||
CR_UNKNOWN_ERROR = 2000
|
||||
CR_SOCKET_CREATE_ERROR = 2001
|
||||
CR_CONNECTION_ERROR = 2002
|
||||
CR_CONN_HOST_ERROR = 2003
|
||||
CR_IPSOCK_ERROR = 2004
|
||||
CR_UNKNOWN_HOST = 2005
|
||||
CR_SERVER_GONE_ERROR = 2006
|
||||
CR_VERSION_ERROR = 2007
|
||||
CR_OUT_OF_MEMORY = 2008
|
||||
CR_WRONG_HOST_INFO = 2009
|
||||
CR_LOCALHOST_CONNECTION = 2010
|
||||
CR_TCP_CONNECTION = 2011
|
||||
CR_SERVER_HANDSHAKE_ERR = 2012
|
||||
CR_SERVER_LOST = 2013
|
||||
CR_COMMANDS_OUT_OF_SYNC = 2014
|
||||
CR_NAMEDPIPE_CONNECTION = 2015
|
||||
CR_NAMEDPIPEWAIT_ERROR = 2016
|
||||
CR_NAMEDPIPEOPEN_ERROR = 2017
|
||||
CR_NAMEDPIPESETSTATE_ERROR = 2018
|
||||
CR_CANT_READ_CHARSET = 2019
|
||||
CR_NET_PACKET_TOO_LARGE = 2020
|
||||
CR_EMBEDDED_CONNECTION = 2021
|
||||
CR_PROBE_SLAVE_STATUS = 2022
|
||||
CR_PROBE_SLAVE_HOSTS = 2023
|
||||
CR_PROBE_SLAVE_CONNECT = 2024
|
||||
CR_PROBE_MASTER_CONNECT = 2025
|
||||
CR_SSL_CONNECTION_ERROR = 2026
|
||||
CR_MALFORMED_PACKET = 2027
|
||||
CR_WRONG_LICENSE = 2028
|
||||
|
||||
CR_NULL_POINTER = 2029
|
||||
CR_NO_PREPARE_STMT = 2030
|
||||
CR_PARAMS_NOT_BOUND = 2031
|
||||
CR_DATA_TRUNCATED = 2032
|
||||
CR_NO_PARAMETERS_EXISTS = 2033
|
||||
CR_INVALID_PARAMETER_NO = 2034
|
||||
CR_INVALID_BUFFER_USE = 2035
|
||||
CR_UNSUPPORTED_PARAM_TYPE = 2036
|
||||
|
||||
CR_SHARED_MEMORY_CONNECTION = 2037
|
||||
CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
|
||||
CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
|
||||
CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
|
||||
CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
|
||||
CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
|
||||
CR_SHARED_MEMORY_MAP_ERROR = 2043
|
||||
CR_SHARED_MEMORY_EVENT_ERROR = 2044
|
||||
CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
|
||||
CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
|
||||
CR_CONN_UNKNOW_PROTOCOL = 2047
|
||||
CR_INVALID_CONN_HANDLE = 2048
|
||||
CR_SECURE_AUTH = 2049
|
||||
CR_FETCH_CANCELED = 2050
|
||||
CR_NO_DATA = 2051
|
||||
CR_NO_STMT_METADATA = 2052
|
||||
CR_NO_RESULT_SET = 2053
|
||||
CR_NOT_IMPLEMENTED = 2054
|
||||
CR_SERVER_LOST_EXTENDED = 2055
|
||||
CR_STMT_CLOSED = 2056
|
||||
CR_NEW_STMT_METADATA = 2057
|
||||
CR_ALREADY_CONNECTED = 2058
|
||||
CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
|
||||
CR_DUPLICATE_CONNECTION_ATTR = 2060
|
||||
CR_AUTH_PLUGIN_ERR = 2061
|
||||
CR_ERROR_LAST = 2061
|
|
@ -0,0 +1,472 @@
|
|||
|
||||
ERROR_FIRST = 1000
|
||||
HASHCHK = 1000
|
||||
NISAMCHK = 1001
|
||||
NO = 1002
|
||||
YES = 1003
|
||||
CANT_CREATE_FILE = 1004
|
||||
CANT_CREATE_TABLE = 1005
|
||||
CANT_CREATE_DB = 1006
|
||||
DB_CREATE_EXISTS = 1007
|
||||
DB_DROP_EXISTS = 1008
|
||||
DB_DROP_DELETE = 1009
|
||||
DB_DROP_RMDIR = 1010
|
||||
CANT_DELETE_FILE = 1011
|
||||
CANT_FIND_SYSTEM_REC = 1012
|
||||
CANT_GET_STAT = 1013
|
||||
CANT_GET_WD = 1014
|
||||
CANT_LOCK = 1015
|
||||
CANT_OPEN_FILE = 1016
|
||||
FILE_NOT_FOUND = 1017
|
||||
CANT_READ_DIR = 1018
|
||||
CANT_SET_WD = 1019
|
||||
CHECKREAD = 1020
|
||||
DISK_FULL = 1021
|
||||
DUP_KEY = 1022
|
||||
ERROR_ON_CLOSE = 1023
|
||||
ERROR_ON_READ = 1024
|
||||
ERROR_ON_RENAME = 1025
|
||||
ERROR_ON_WRITE = 1026
|
||||
FILE_USED = 1027
|
||||
FILSORT_ABORT = 1028
|
||||
FORM_NOT_FOUND = 1029
|
||||
GET_ERRNO = 1030
|
||||
ILLEGAL_HA = 1031
|
||||
KEY_NOT_FOUND = 1032
|
||||
NOT_FORM_FILE = 1033
|
||||
NOT_KEYFILE = 1034
|
||||
OLD_KEYFILE = 1035
|
||||
OPEN_AS_READONLY = 1036
|
||||
OUTOFMEMORY = 1037
|
||||
OUT_OF_SORTMEMORY = 1038
|
||||
UNEXPECTED_EOF = 1039
|
||||
CON_COUNT_ERROR = 1040
|
||||
OUT_OF_RESOURCES = 1041
|
||||
BAD_HOST_ERROR = 1042
|
||||
HANDSHAKE_ERROR = 1043
|
||||
DBACCESS_DENIED_ERROR = 1044
|
||||
ACCESS_DENIED_ERROR = 1045
|
||||
NO_DB_ERROR = 1046
|
||||
UNKNOWN_COM_ERROR = 1047
|
||||
BAD_NULL_ERROR = 1048
|
||||
BAD_DB_ERROR = 1049
|
||||
TABLE_EXISTS_ERROR = 1050
|
||||
BAD_TABLE_ERROR = 1051
|
||||
NON_UNIQ_ERROR = 1052
|
||||
SERVER_SHUTDOWN = 1053
|
||||
BAD_FIELD_ERROR = 1054
|
||||
WRONG_FIELD_WITH_GROUP = 1055
|
||||
WRONG_GROUP_FIELD = 1056
|
||||
WRONG_SUM_SELECT = 1057
|
||||
WRONG_VALUE_COUNT = 1058
|
||||
TOO_LONG_IDENT = 1059
|
||||
DUP_FIELDNAME = 1060
|
||||
DUP_KEYNAME = 1061
|
||||
DUP_ENTRY = 1062
|
||||
WRONG_FIELD_SPEC = 1063
|
||||
PARSE_ERROR = 1064
|
||||
EMPTY_QUERY = 1065
|
||||
NONUNIQ_TABLE = 1066
|
||||
INVALID_DEFAULT = 1067
|
||||
MULTIPLE_PRI_KEY = 1068
|
||||
TOO_MANY_KEYS = 1069
|
||||
TOO_MANY_KEY_PARTS = 1070
|
||||
TOO_LONG_KEY = 1071
|
||||
KEY_COLUMN_DOES_NOT_EXITS = 1072
|
||||
BLOB_USED_AS_KEY = 1073
|
||||
TOO_BIG_FIELDLENGTH = 1074
|
||||
WRONG_AUTO_KEY = 1075
|
||||
READY = 1076
|
||||
NORMAL_SHUTDOWN = 1077
|
||||
GOT_SIGNAL = 1078
|
||||
SHUTDOWN_COMPLETE = 1079
|
||||
FORCING_CLOSE = 1080
|
||||
IPSOCK_ERROR = 1081
|
||||
NO_SUCH_INDEX = 1082
|
||||
WRONG_FIELD_TERMINATORS = 1083
|
||||
BLOBS_AND_NO_TERMINATED = 1084
|
||||
TEXTFILE_NOT_READABLE = 1085
|
||||
FILE_EXISTS_ERROR = 1086
|
||||
LOAD_INFO = 1087
|
||||
ALTER_INFO = 1088
|
||||
WRONG_SUB_KEY = 1089
|
||||
CANT_REMOVE_ALL_FIELDS = 1090
|
||||
CANT_DROP_FIELD_OR_KEY = 1091
|
||||
INSERT_INFO = 1092
|
||||
UPDATE_TABLE_USED = 1093
|
||||
NO_SUCH_THREAD = 1094
|
||||
KILL_DENIED_ERROR = 1095
|
||||
NO_TABLES_USED = 1096
|
||||
TOO_BIG_SET = 1097
|
||||
NO_UNIQUE_LOGFILE = 1098
|
||||
TABLE_NOT_LOCKED_FOR_WRITE = 1099
|
||||
TABLE_NOT_LOCKED = 1100
|
||||
BLOB_CANT_HAVE_DEFAULT = 1101
|
||||
WRONG_DB_NAME = 1102
|
||||
WRONG_TABLE_NAME = 1103
|
||||
TOO_BIG_SELECT = 1104
|
||||
UNKNOWN_ERROR = 1105
|
||||
UNKNOWN_PROCEDURE = 1106
|
||||
WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
|
||||
WRONG_PARAMETERS_TO_PROCEDURE = 1108
|
||||
UNKNOWN_TABLE = 1109
|
||||
FIELD_SPECIFIED_TWICE = 1110
|
||||
INVALID_GROUP_FUNC_USE = 1111
|
||||
UNSUPPORTED_EXTENSION = 1112
|
||||
TABLE_MUST_HAVE_COLUMNS = 1113
|
||||
RECORD_FILE_FULL = 1114
|
||||
UNKNOWN_CHARACTER_SET = 1115
|
||||
TOO_MANY_TABLES = 1116
|
||||
TOO_MANY_FIELDS = 1117
|
||||
TOO_BIG_ROWSIZE = 1118
|
||||
STACK_OVERRUN = 1119
|
||||
WRONG_OUTER_JOIN = 1120
|
||||
NULL_COLUMN_IN_INDEX = 1121
|
||||
CANT_FIND_UDF = 1122
|
||||
CANT_INITIALIZE_UDF = 1123
|
||||
UDF_NO_PATHS = 1124
|
||||
UDF_EXISTS = 1125
|
||||
CANT_OPEN_LIBRARY = 1126
|
||||
CANT_FIND_DL_ENTRY = 1127
|
||||
FUNCTION_NOT_DEFINED = 1128
|
||||
HOST_IS_BLOCKED = 1129
|
||||
HOST_NOT_PRIVILEGED = 1130
|
||||
PASSWORD_ANONYMOUS_USER = 1131
|
||||
PASSWORD_NOT_ALLOWED = 1132
|
||||
PASSWORD_NO_MATCH = 1133
|
||||
UPDATE_INFO = 1134
|
||||
CANT_CREATE_THREAD = 1135
|
||||
WRONG_VALUE_COUNT_ON_ROW = 1136
|
||||
CANT_REOPEN_TABLE = 1137
|
||||
INVALID_USE_OF_NULL = 1138
|
||||
REGEXP_ERROR = 1139
|
||||
MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
|
||||
NONEXISTING_GRANT = 1141
|
||||
TABLEACCESS_DENIED_ERROR = 1142
|
||||
COLUMNACCESS_DENIED_ERROR = 1143
|
||||
ILLEGAL_GRANT_FOR_TABLE = 1144
|
||||
GRANT_WRONG_HOST_OR_USER = 1145
|
||||
NO_SUCH_TABLE = 1146
|
||||
NONEXISTING_TABLE_GRANT = 1147
|
||||
NOT_ALLOWED_COMMAND = 1148
|
||||
SYNTAX_ERROR = 1149
|
||||
DELAYED_CANT_CHANGE_LOCK = 1150
|
||||
TOO_MANY_DELAYED_THREADS = 1151
|
||||
ABORTING_CONNECTION = 1152
|
||||
NET_PACKET_TOO_LARGE = 1153
|
||||
NET_READ_ERROR_FROM_PIPE = 1154
|
||||
NET_FCNTL_ERROR = 1155
|
||||
NET_PACKETS_OUT_OF_ORDER = 1156
|
||||
NET_UNCOMPRESS_ERROR = 1157
|
||||
NET_READ_ERROR = 1158
|
||||
NET_READ_INTERRUPTED = 1159
|
||||
NET_ERROR_ON_WRITE = 1160
|
||||
NET_WRITE_INTERRUPTED = 1161
|
||||
TOO_LONG_STRING = 1162
|
||||
TABLE_CANT_HANDLE_BLOB = 1163
|
||||
TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
|
||||
DELAYED_INSERT_TABLE_LOCKED = 1165
|
||||
WRONG_COLUMN_NAME = 1166
|
||||
WRONG_KEY_COLUMN = 1167
|
||||
WRONG_MRG_TABLE = 1168
|
||||
DUP_UNIQUE = 1169
|
||||
BLOB_KEY_WITHOUT_LENGTH = 1170
|
||||
PRIMARY_CANT_HAVE_NULL = 1171
|
||||
TOO_MANY_ROWS = 1172
|
||||
REQUIRES_PRIMARY_KEY = 1173
|
||||
NO_RAID_COMPILED = 1174
|
||||
UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
|
||||
KEY_DOES_NOT_EXITS = 1176
|
||||
CHECK_NO_SUCH_TABLE = 1177
|
||||
CHECK_NOT_IMPLEMENTED = 1178
|
||||
CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
|
||||
ERROR_DURING_COMMIT = 1180
|
||||
ERROR_DURING_ROLLBACK = 1181
|
||||
ERROR_DURING_FLUSH_LOGS = 1182
|
||||
ERROR_DURING_CHECKPOINT = 1183
|
||||
NEW_ABORTING_CONNECTION = 1184
|
||||
DUMP_NOT_IMPLEMENTED = 1185
|
||||
FLUSH_MASTER_BINLOG_CLOSED = 1186
|
||||
INDEX_REBUILD = 1187
|
||||
MASTER = 1188
|
||||
MASTER_NET_READ = 1189
|
||||
MASTER_NET_WRITE = 1190
|
||||
FT_MATCHING_KEY_NOT_FOUND = 1191
|
||||
LOCK_OR_ACTIVE_TRANSACTION = 1192
|
||||
UNKNOWN_SYSTEM_VARIABLE = 1193
|
||||
CRASHED_ON_USAGE = 1194
|
||||
CRASHED_ON_REPAIR = 1195
|
||||
WARNING_NOT_COMPLETE_ROLLBACK = 1196
|
||||
TRANS_CACHE_FULL = 1197
|
||||
SLAVE_MUST_STOP = 1198
|
||||
SLAVE_NOT_RUNNING = 1199
|
||||
BAD_SLAVE = 1200
|
||||
MASTER_INFO = 1201
|
||||
SLAVE_THREAD = 1202
|
||||
TOO_MANY_USER_CONNECTIONS = 1203
|
||||
SET_CONSTANTS_ONLY = 1204
|
||||
LOCK_WAIT_TIMEOUT = 1205
|
||||
LOCK_TABLE_FULL = 1206
|
||||
READ_ONLY_TRANSACTION = 1207
|
||||
DROP_DB_WITH_READ_LOCK = 1208
|
||||
CREATE_DB_WITH_READ_LOCK = 1209
|
||||
WRONG_ARGUMENTS = 1210
|
||||
NO_PERMISSION_TO_CREATE_USER = 1211
|
||||
UNION_TABLES_IN_DIFFERENT_DIR = 1212
|
||||
LOCK_DEADLOCK = 1213
|
||||
TABLE_CANT_HANDLE_FT = 1214
|
||||
CANNOT_ADD_FOREIGN = 1215
|
||||
NO_REFERENCED_ROW = 1216
|
||||
ROW_IS_REFERENCED = 1217
|
||||
CONNECT_TO_MASTER = 1218
|
||||
QUERY_ON_MASTER = 1219
|
||||
ERROR_WHEN_EXECUTING_COMMAND = 1220
|
||||
WRONG_USAGE = 1221
|
||||
WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
|
||||
CANT_UPDATE_WITH_READLOCK = 1223
|
||||
MIXING_NOT_ALLOWED = 1224
|
||||
DUP_ARGUMENT = 1225
|
||||
USER_LIMIT_REACHED = 1226
|
||||
SPECIFIC_ACCESS_DENIED_ERROR = 1227
|
||||
LOCAL_VARIABLE = 1228
|
||||
GLOBAL_VARIABLE = 1229
|
||||
NO_DEFAULT = 1230
|
||||
WRONG_VALUE_FOR_VAR = 1231
|
||||
WRONG_TYPE_FOR_VAR = 1232
|
||||
VAR_CANT_BE_READ = 1233
|
||||
CANT_USE_OPTION_HERE = 1234
|
||||
NOT_SUPPORTED_YET = 1235
|
||||
MASTER_FATAL_ERROR_READING_BINLOG = 1236
|
||||
SLAVE_IGNORED_TABLE = 1237
|
||||
INCORRECT_GLOBAL_LOCAL_VAR = 1238
|
||||
WRONG_FK_DEF = 1239
|
||||
KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
|
||||
OPERAND_COLUMNS = 1241
|
||||
SUBQUERY_NO_1_ROW = 1242
|
||||
UNKNOWN_STMT_HANDLER = 1243
|
||||
CORRUPT_HELP_DB = 1244
|
||||
CYCLIC_REFERENCE = 1245
|
||||
AUTO_CONVERT = 1246
|
||||
ILLEGAL_REFERENCE = 1247
|
||||
DERIVED_MUST_HAVE_ALIAS = 1248
|
||||
SELECT_REDUCED = 1249
|
||||
TABLENAME_NOT_ALLOWED_HERE = 1250
|
||||
NOT_SUPPORTED_AUTH_MODE = 1251
|
||||
SPATIAL_CANT_HAVE_NULL = 1252
|
||||
COLLATION_CHARSET_MISMATCH = 1253
|
||||
SLAVE_WAS_RUNNING = 1254
|
||||
SLAVE_WAS_NOT_RUNNING = 1255
|
||||
TOO_BIG_FOR_UNCOMPRESS = 1256
|
||||
ZLIB_Z_MEM_ERROR = 1257
|
||||
ZLIB_Z_BUF_ERROR = 1258
|
||||
ZLIB_Z_DATA_ERROR = 1259
|
||||
CUT_VALUE_GROUP_CONCAT = 1260
|
||||
WARN_TOO_FEW_RECORDS = 1261
|
||||
WARN_TOO_MANY_RECORDS = 1262
|
||||
WARN_NULL_TO_NOTNULL = 1263
|
||||
WARN_DATA_OUT_OF_RANGE = 1264
|
||||
WARN_DATA_TRUNCATED = 1265
|
||||
WARN_USING_OTHER_HANDLER = 1266
|
||||
CANT_AGGREGATE_2COLLATIONS = 1267
|
||||
DROP_USER = 1268
|
||||
REVOKE_GRANTS = 1269
|
||||
CANT_AGGREGATE_3COLLATIONS = 1270
|
||||
CANT_AGGREGATE_NCOLLATIONS = 1271
|
||||
VARIABLE_IS_NOT_STRUCT = 1272
|
||||
UNKNOWN_COLLATION = 1273
|
||||
SLAVE_IGNORED_SSL_PARAMS = 1274
|
||||
SERVER_IS_IN_SECURE_AUTH_MODE = 1275
|
||||
WARN_FIELD_RESOLVED = 1276
|
||||
BAD_SLAVE_UNTIL_COND = 1277
|
||||
MISSING_SKIP_SLAVE = 1278
|
||||
UNTIL_COND_IGNORED = 1279
|
||||
WRONG_NAME_FOR_INDEX = 1280
|
||||
WRONG_NAME_FOR_CATALOG = 1281
|
||||
WARN_QC_RESIZE = 1282
|
||||
BAD_FT_COLUMN = 1283
|
||||
UNKNOWN_KEY_CACHE = 1284
|
||||
WARN_HOSTNAME_WONT_WORK = 1285
|
||||
UNKNOWN_STORAGE_ENGINE = 1286
|
||||
WARN_DEPRECATED_SYNTAX = 1287
|
||||
NON_UPDATABLE_TABLE = 1288
|
||||
FEATURE_DISABLED = 1289
|
||||
OPTION_PREVENTS_STATEMENT = 1290
|
||||
DUPLICATED_VALUE_IN_TYPE = 1291
|
||||
TRUNCATED_WRONG_VALUE = 1292
|
||||
TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
|
||||
INVALID_ON_UPDATE = 1294
|
||||
UNSUPPORTED_PS = 1295
|
||||
GET_ERRMSG = 1296
|
||||
GET_TEMPORARY_ERRMSG = 1297
|
||||
UNKNOWN_TIME_ZONE = 1298
|
||||
WARN_INVALID_TIMESTAMP = 1299
|
||||
INVALID_CHARACTER_STRING = 1300
|
||||
WARN_ALLOWED_PACKET_OVERFLOWED = 1301
|
||||
CONFLICTING_DECLARATIONS = 1302
|
||||
SP_NO_RECURSIVE_CREATE = 1303
|
||||
SP_ALREADY_EXISTS = 1304
|
||||
SP_DOES_NOT_EXIST = 1305
|
||||
SP_DROP_FAILED = 1306
|
||||
SP_STORE_FAILED = 1307
|
||||
SP_LILABEL_MISMATCH = 1308
|
||||
SP_LABEL_REDEFINE = 1309
|
||||
SP_LABEL_MISMATCH = 1310
|
||||
SP_UNINIT_VAR = 1311
|
||||
SP_BADSELECT = 1312
|
||||
SP_BADRETURN = 1313
|
||||
SP_BADSTATEMENT = 1314
|
||||
UPDATE_LOG_DEPRECATED_IGNORED = 1315
|
||||
UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
|
||||
QUERY_INTERRUPTED = 1317
|
||||
SP_WRONG_NO_OF_ARGS = 1318
|
||||
SP_COND_MISMATCH = 1319
|
||||
SP_NORETURN = 1320
|
||||
SP_NORETURNEND = 1321
|
||||
SP_BAD_CURSOR_QUERY = 1322
|
||||
SP_BAD_CURSOR_SELECT = 1323
|
||||
SP_CURSOR_MISMATCH = 1324
|
||||
SP_CURSOR_ALREADY_OPEN = 1325
|
||||
SP_CURSOR_NOT_OPEN = 1326
|
||||
SP_UNDECLARED_VAR = 1327
|
||||
SP_WRONG_NO_OF_FETCH_ARGS = 1328
|
||||
SP_FETCH_NO_DATA = 1329
|
||||
SP_DUP_PARAM = 1330
|
||||
SP_DUP_VAR = 1331
|
||||
SP_DUP_COND = 1332
|
||||
SP_DUP_CURS = 1333
|
||||
SP_CANT_ALTER = 1334
|
||||
SP_SUBSELECT_NYI = 1335
|
||||
STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
|
||||
SP_VARCOND_AFTER_CURSHNDLR = 1337
|
||||
SP_CURSOR_AFTER_HANDLER = 1338
|
||||
SP_CASE_NOT_FOUND = 1339
|
||||
FPARSER_TOO_BIG_FILE = 1340
|
||||
FPARSER_BAD_HEADER = 1341
|
||||
FPARSER_EOF_IN_COMMENT = 1342
|
||||
FPARSER_ERROR_IN_PARAMETER = 1343
|
||||
FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
|
||||
VIEW_NO_EXPLAIN = 1345
|
||||
FRM_UNKNOWN_TYPE = 1346
|
||||
WRONG_OBJECT = 1347
|
||||
NONUPDATEABLE_COLUMN = 1348
|
||||
VIEW_SELECT_DERIVED = 1349
|
||||
VIEW_SELECT_CLAUSE = 1350
|
||||
VIEW_SELECT_VARIABLE = 1351
|
||||
VIEW_SELECT_TMPTABLE = 1352
|
||||
VIEW_WRONG_LIST = 1353
|
||||
WARN_VIEW_MERGE = 1354
|
||||
WARN_VIEW_WITHOUT_KEY = 1355
|
||||
VIEW_INVALID = 1356
|
||||
SP_NO_DROP_SP = 1357
|
||||
SP_GOTO_IN_HNDLR = 1358
|
||||
TRG_ALREADY_EXISTS = 1359
|
||||
TRG_DOES_NOT_EXIST = 1360
|
||||
TRG_ON_VIEW_OR_TEMP_TABLE = 1361
|
||||
TRG_CANT_CHANGE_ROW = 1362
|
||||
TRG_NO_SUCH_ROW_IN_TRG = 1363
|
||||
NO_DEFAULT_FOR_FIELD = 1364
|
||||
DIVISION_BY_ZERO = 1365
|
||||
TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
|
||||
ILLEGAL_VALUE_FOR_TYPE = 1367
|
||||
VIEW_NONUPD_CHECK = 1368
|
||||
VIEW_CHECK_FAILED = 1369
|
||||
PROCACCESS_DENIED_ERROR = 1370
|
||||
RELAY_LOG_FAIL = 1371
|
||||
PASSWD_LENGTH = 1372
|
||||
UNKNOWN_TARGET_BINLOG = 1373
|
||||
IO_ERR_LOG_INDEX_READ = 1374
|
||||
BINLOG_PURGE_PROHIBITED = 1375
|
||||
FSEEK_FAIL = 1376
|
||||
BINLOG_PURGE_FATAL_ERR = 1377
|
||||
LOG_IN_USE = 1378
|
||||
LOG_PURGE_UNKNOWN_ERR = 1379
|
||||
RELAY_LOG_INIT = 1380
|
||||
NO_BINARY_LOGGING = 1381
|
||||
RESERVED_SYNTAX = 1382
|
||||
WSAS_FAILED = 1383
|
||||
DIFF_GROUPS_PROC = 1384
|
||||
NO_GROUP_FOR_PROC = 1385
|
||||
ORDER_WITH_PROC = 1386
|
||||
LOGGING_PROHIBIT_CHANGING_OF = 1387
|
||||
NO_FILE_MAPPING = 1388
|
||||
WRONG_MAGIC = 1389
|
||||
PS_MANY_PARAM = 1390
|
||||
KEY_PART_0 = 1391
|
||||
VIEW_CHECKSUM = 1392
|
||||
VIEW_MULTIUPDATE = 1393
|
||||
VIEW_NO_INSERT_FIELD_LIST = 1394
|
||||
VIEW_DELETE_MERGE_VIEW = 1395
|
||||
CANNOT_USER = 1396
|
||||
XAER_NOTA = 1397
|
||||
XAER_INVAL = 1398
|
||||
XAER_RMFAIL = 1399
|
||||
XAER_OUTSIDE = 1400
|
||||
XAER_RMERR = 1401
|
||||
XA_RBROLLBACK = 1402
|
||||
NONEXISTING_PROC_GRANT = 1403
|
||||
PROC_AUTO_GRANT_FAIL = 1404
|
||||
PROC_AUTO_REVOKE_FAIL = 1405
|
||||
DATA_TOO_LONG = 1406
|
||||
SP_BAD_SQLSTATE = 1407
|
||||
STARTUP = 1408
|
||||
LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
|
||||
CANT_CREATE_USER_WITH_GRANT = 1410
|
||||
WRONG_VALUE_FOR_TYPE = 1411
|
||||
TABLE_DEF_CHANGED = 1412
|
||||
SP_DUP_HANDLER = 1413
|
||||
SP_NOT_VAR_ARG = 1414
|
||||
SP_NO_RETSET = 1415
|
||||
CANT_CREATE_GEOMETRY_OBJECT = 1416
|
||||
FAILED_ROUTINE_BREAK_BINLOG = 1417
|
||||
BINLOG_UNSAFE_ROUTINE = 1418
|
||||
BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
|
||||
EXEC_STMT_WITH_OPEN_CURSOR = 1420
|
||||
STMT_HAS_NO_OPEN_CURSOR = 1421
|
||||
COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
|
||||
NO_DEFAULT_FOR_VIEW_FIELD = 1423
|
||||
SP_NO_RECURSION = 1424
|
||||
TOO_BIG_SCALE = 1425
|
||||
TOO_BIG_PRECISION = 1426
|
||||
M_BIGGER_THAN_D = 1427
|
||||
WRONG_LOCK_OF_SYSTEM_TABLE = 1428
|
||||
CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
|
||||
QUERY_ON_FOREIGN_DATA_SOURCE = 1430
|
||||
FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
|
||||
FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
|
||||
FOREIGN_DATA_STRING_INVALID = 1433
|
||||
CANT_CREATE_FEDERATED_TABLE = 1434
|
||||
TRG_IN_WRONG_SCHEMA = 1435
|
||||
STACK_OVERRUN_NEED_MORE = 1436
|
||||
TOO_LONG_BODY = 1437
|
||||
WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
|
||||
TOO_BIG_DISPLAYWIDTH = 1439
|
||||
XAER_DUPID = 1440
|
||||
DATETIME_FUNCTION_OVERFLOW = 1441
|
||||
CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
|
||||
VIEW_PREVENT_UPDATE = 1443
|
||||
PS_NO_RECURSION = 1444
|
||||
SP_CANT_SET_AUTOCOMMIT = 1445
|
||||
MALFORMED_DEFINER = 1446
|
||||
VIEW_FRM_NO_USER = 1447
|
||||
VIEW_OTHER_USER = 1448
|
||||
NO_SUCH_USER = 1449
|
||||
FORBID_SCHEMA_CHANGE = 1450
|
||||
ROW_IS_REFERENCED_2 = 1451
|
||||
NO_REFERENCED_ROW_2 = 1452
|
||||
SP_BAD_VAR_SHADOW = 1453
|
||||
TRG_NO_DEFINER = 1454
|
||||
OLD_FILE_FORMAT = 1455
|
||||
SP_RECURSION_LIMIT = 1456
|
||||
SP_PROC_TABLE_CORRUPT = 1457
|
||||
SP_WRONG_NAME = 1458
|
||||
TABLE_NEEDS_UPGRADE = 1459
|
||||
SP_NO_AGGREGATE = 1460
|
||||
MAX_PREPARED_STMT_COUNT_REACHED = 1461
|
||||
VIEW_RECURSIVE = 1462
|
||||
NON_GROUPING_FIELD_USED = 1463
|
||||
TABLE_CANT_HANDLE_SPKEYS = 1464
|
||||
NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
|
||||
USERNAME = 1466
|
||||
HOSTNAME = 1467
|
||||
WRONG_STRING_LENGTH = 1468
|
||||
ERROR_LAST = 1468
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
|
||||
DECIMAL = 0
|
||||
TINY = 1
|
||||
SHORT = 2
|
||||
LONG = 3
|
||||
FLOAT = 4
|
||||
DOUBLE = 5
|
||||
NULL = 6
|
||||
TIMESTAMP = 7
|
||||
LONGLONG = 8
|
||||
INT24 = 9
|
||||
DATE = 10
|
||||
TIME = 11
|
||||
DATETIME = 12
|
||||
YEAR = 13
|
||||
NEWDATE = 14
|
||||
VARCHAR = 15
|
||||
BIT = 16
|
||||
NEWDECIMAL = 246
|
||||
ENUM = 247
|
||||
SET = 248
|
||||
TINY_BLOB = 249
|
||||
MEDIUM_BLOB = 250
|
||||
LONG_BLOB = 251
|
||||
BLOB = 252
|
||||
VAR_STRING = 253
|
||||
STRING = 254
|
||||
GEOMETRY = 255
|
||||
|
||||
CHAR = TINY
|
||||
INTERVAL = ENUM
|
|
@ -0,0 +1,15 @@
|
|||
NOT_NULL = 1
|
||||
PRI_KEY = 2
|
||||
UNIQUE_KEY = 4
|
||||
MULTIPLE_KEY = 8
|
||||
BLOB = 16
|
||||
UNSIGNED = 32
|
||||
ZEROFILL = 64
|
||||
BINARY = 128
|
||||
ENUM = 256
|
||||
AUTO_INCREMENT = 512
|
||||
TIMESTAMP = 1024
|
||||
SET = 2048
|
||||
PART_KEY = 16384
|
||||
GROUP = 32767
|
||||
UNIQUE = 65536
|
|
@ -0,0 +1,11 @@
|
|||
|
||||
SERVER_STATUS_IN_TRANS = 1
|
||||
SERVER_STATUS_AUTOCOMMIT = 2
|
||||
SERVER_MORE_RESULTS_EXISTS = 8
|
||||
SERVER_QUERY_NO_GOOD_INDEX_USED = 16
|
||||
SERVER_QUERY_NO_INDEX_USED = 32
|
||||
SERVER_STATUS_CURSOR_EXISTS = 64
|
||||
SERVER_STATUS_LAST_ROW_SENT = 128
|
||||
SERVER_STATUS_DB_DROPPED = 256
|
||||
SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512
|
||||
SERVER_STATUS_METADATA_CHANGED = 1024
|
|
@ -0,0 +1,356 @@
|
|||
from ._compat import PY2, text_type, long_type, JYTHON, IRONPYTHON
|
||||
|
||||
import sys
|
||||
import binascii
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
import re
|
||||
import time
|
||||
|
||||
from .constants import FIELD_TYPE, FLAG
|
||||
from .charset import charset_by_id, charset_to_encoding
|
||||
|
||||
|
||||
ESCAPE_REGEX = re.compile(r"[\0\n\r\032\'\"\\]")
|
||||
ESCAPE_MAP = {'\0': '\\0', '\n': '\\n', '\r': '\\r', '\032': '\\Z',
|
||||
'\'': '\\\'', '"': '\\"', '\\': '\\\\'}
|
||||
|
||||
def escape_item(val, charset, mapping=None):
|
||||
if mapping is None:
|
||||
mapping = encoders
|
||||
encoder = mapping.get(type(val))
|
||||
|
||||
# Fallback to default when no encoder found
|
||||
if not encoder:
|
||||
try:
|
||||
encoder = mapping[text_type]
|
||||
except KeyError:
|
||||
raise TypeError("no default type converter defined")
|
||||
|
||||
if encoder in (escape_dict, escape_sequence):
|
||||
val = encoder(val, charset, mapping)
|
||||
else:
|
||||
val = encoder(val, mapping)
|
||||
return val
|
||||
|
||||
def escape_dict(val, charset, mapping=None):
|
||||
n = {}
|
||||
for k, v in val.items():
|
||||
quoted = escape_item(v, charset, mapping)
|
||||
n[k] = quoted
|
||||
return n
|
||||
|
||||
def escape_sequence(val, charset, mapping=None):
|
||||
n = []
|
||||
for item in val:
|
||||
quoted = escape_item(item, charset, mapping)
|
||||
n.append(quoted)
|
||||
return "(" + ",".join(n) + ")"
|
||||
|
||||
def escape_set(val, charset, mapping=None):
|
||||
val = map(lambda x: escape_item(x, charset, mapping), val)
|
||||
return ','.join(val)
|
||||
|
||||
def escape_bool(value, mapping=None):
|
||||
return str(int(value))
|
||||
|
||||
def escape_object(value, mapping=None):
|
||||
return str(value)
|
||||
|
||||
def escape_int(value, mapping=None):
|
||||
return str(value)
|
||||
|
||||
def escape_float(value, mapping=None):
|
||||
return ('%.15g' % value)
|
||||
|
||||
def escape_string(value, mapping=None):
|
||||
return ("%s" % (ESCAPE_REGEX.sub(
|
||||
lambda match: ESCAPE_MAP.get(match.group(0)), value),))
|
||||
|
||||
def escape_str(value, mapping=None):
|
||||
return "'%s'" % escape_string(value, mapping)
|
||||
|
||||
def escape_unicode(value, mapping=None):
|
||||
return escape_str(value, mapping)
|
||||
|
||||
def escape_bytes(value, mapping=None):
|
||||
# escape_bytes is calld only on Python 3.
|
||||
return escape_str(value.decode('ascii', 'surrogateescape'), mapping)
|
||||
|
||||
def escape_None(value, mapping=None):
|
||||
return 'NULL'
|
||||
|
||||
def escape_timedelta(obj, mapping=None):
|
||||
seconds = int(obj.seconds) % 60
|
||||
minutes = int(obj.seconds // 60) % 60
|
||||
hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
|
||||
if obj.microseconds:
|
||||
fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'"
|
||||
else:
|
||||
fmt = "'{0:02d}:{1:02d}:{2:02d}'"
|
||||
return fmt.format(hours, minutes, seconds, obj.microseconds)
|
||||
|
||||
def escape_time(obj, mapping=None):
|
||||
if obj.microsecond:
|
||||
fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
|
||||
else:
|
||||
fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'"
|
||||
return fmt.format(obj)
|
||||
|
||||
def escape_datetime(obj, mapping=None):
|
||||
if obj.microsecond:
|
||||
fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
|
||||
else:
|
||||
fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'"
|
||||
return fmt.format(obj)
|
||||
|
||||
def escape_date(obj, mapping=None):
|
||||
fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'"
|
||||
return fmt.format(obj)
|
||||
|
||||
def escape_struct_time(obj, mapping=None):
|
||||
return escape_datetime(datetime.datetime(*obj[:6]))
|
||||
|
||||
def convert_datetime(obj):
|
||||
"""Returns a DATETIME or TIMESTAMP column value as a datetime object:
|
||||
|
||||
>>> datetime_or_None('2007-02-25 23:06:20')
|
||||
datetime.datetime(2007, 2, 25, 23, 6, 20)
|
||||
>>> datetime_or_None('2007-02-25T23:06:20')
|
||||
datetime.datetime(2007, 2, 25, 23, 6, 20)
|
||||
|
||||
Illegal values are returned as None:
|
||||
|
||||
>>> datetime_or_None('2007-02-31T23:06:20') is None
|
||||
True
|
||||
>>> datetime_or_None('0000-00-00 00:00:00') is None
|
||||
True
|
||||
|
||||
"""
|
||||
if ' ' in obj:
|
||||
sep = ' '
|
||||
elif 'T' in obj:
|
||||
sep = 'T'
|
||||
else:
|
||||
return convert_date(obj)
|
||||
|
||||
try:
|
||||
ymd, hms = obj.split(sep, 1)
|
||||
usecs = '0'
|
||||
if '.' in hms:
|
||||
hms, usecs = hms.split('.')
|
||||
usecs = float('0.' + usecs) * 1e6
|
||||
return datetime.datetime(*[ int(x) for x in ymd.split('-')+hms.split(':')+[usecs] ])
|
||||
except ValueError:
|
||||
return convert_date(obj)
|
||||
|
||||
|
||||
def convert_timedelta(obj):
|
||||
"""Returns a TIME column as a timedelta object:
|
||||
|
||||
>>> timedelta_or_None('25:06:17')
|
||||
datetime.timedelta(1, 3977)
|
||||
>>> timedelta_or_None('-25:06:17')
|
||||
datetime.timedelta(-2, 83177)
|
||||
|
||||
Illegal values are returned as None:
|
||||
|
||||
>>> timedelta_or_None('random crap') is None
|
||||
True
|
||||
|
||||
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
|
||||
can accept values as (+|-)DD HH:MM:SS. The latter format will not
|
||||
be parsed correctly by this function.
|
||||
"""
|
||||
try:
|
||||
microseconds = 0
|
||||
if "." in obj:
|
||||
(obj, tail) = obj.split('.')
|
||||
microseconds = float('0.' + tail) * 1e6
|
||||
hours, minutes, seconds = obj.split(':')
|
||||
negate = 1
|
||||
if hours.startswith("-"):
|
||||
hours = hours[1:]
|
||||
negate = -1
|
||||
tdelta = datetime.timedelta(
|
||||
hours = int(hours),
|
||||
minutes = int(minutes),
|
||||
seconds = int(seconds),
|
||||
microseconds = int(microseconds)
|
||||
) * negate
|
||||
return tdelta
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def convert_time(obj):
|
||||
"""Returns a TIME column as a time object:
|
||||
|
||||
>>> time_or_None('15:06:17')
|
||||
datetime.time(15, 6, 17)
|
||||
|
||||
Illegal values are returned as None:
|
||||
|
||||
>>> time_or_None('-25:06:17') is None
|
||||
True
|
||||
>>> time_or_None('random crap') is None
|
||||
True
|
||||
|
||||
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
|
||||
can accept values as (+|-)DD HH:MM:SS. The latter format will not
|
||||
be parsed correctly by this function.
|
||||
|
||||
Also note that MySQL's TIME column corresponds more closely to
|
||||
Python's timedelta and not time. However if you want TIME columns
|
||||
to be treated as time-of-day and not a time offset, then you can
|
||||
use set this function as the converter for FIELD_TYPE.TIME.
|
||||
"""
|
||||
try:
|
||||
microseconds = 0
|
||||
if "." in obj:
|
||||
(obj, tail) = obj.split('.')
|
||||
microseconds = float('0.' + tail) * 1e6
|
||||
hours, minutes, seconds = obj.split(':')
|
||||
return datetime.time(hour=int(hours), minute=int(minutes),
|
||||
second=int(seconds), microsecond=int(microseconds))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def convert_date(obj):
|
||||
"""Returns a DATE column as a date object:
|
||||
|
||||
>>> date_or_None('2007-02-26')
|
||||
datetime.date(2007, 2, 26)
|
||||
|
||||
Illegal values are returned as None:
|
||||
|
||||
>>> date_or_None('2007-02-31') is None
|
||||
True
|
||||
>>> date_or_None('0000-00-00') is None
|
||||
True
|
||||
|
||||
"""
|
||||
try:
|
||||
return datetime.date(*[ int(x) for x in obj.split('-', 2) ])
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def convert_mysql_timestamp(timestamp):
|
||||
"""Convert a MySQL TIMESTAMP to a Timestamp object.
|
||||
|
||||
MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME:
|
||||
|
||||
>>> mysql_timestamp_converter('2007-02-25 22:32:17')
|
||||
datetime.datetime(2007, 2, 25, 22, 32, 17)
|
||||
|
||||
MySQL < 4.1 uses a big string of numbers:
|
||||
|
||||
>>> mysql_timestamp_converter('20070225223217')
|
||||
datetime.datetime(2007, 2, 25, 22, 32, 17)
|
||||
|
||||
Illegal values are returned as None:
|
||||
|
||||
>>> mysql_timestamp_converter('2007-02-31 22:32:17') is None
|
||||
True
|
||||
>>> mysql_timestamp_converter('00000000000000') is None
|
||||
True
|
||||
|
||||
"""
|
||||
if timestamp[4] == '-':
|
||||
return convert_datetime(timestamp)
|
||||
timestamp += "0"*(14-len(timestamp)) # padding
|
||||
year, month, day, hour, minute, second = \
|
||||
int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \
|
||||
int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14])
|
||||
try:
|
||||
return datetime.datetime(year, month, day, hour, minute, second)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def convert_set(s):
|
||||
return set(s.split(","))
|
||||
|
||||
|
||||
def through(x):
|
||||
return x
|
||||
|
||||
|
||||
#def convert_bit(b):
|
||||
# b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
|
||||
# return struct.unpack(">Q", b)[0]
|
||||
#
|
||||
# the snippet above is right, but MySQLdb doesn't process bits,
|
||||
# so we shouldn't either
|
||||
convert_bit = through
|
||||
|
||||
|
||||
def convert_characters(connection, field, data):
|
||||
field_charset = charset_by_id(field.charsetnr).name
|
||||
encoding = charset_to_encoding(field_charset)
|
||||
if field.flags & FLAG.SET:
|
||||
return convert_set(data.decode(encoding))
|
||||
if field.flags & FLAG.BINARY:
|
||||
return data
|
||||
|
||||
if connection.use_unicode:
|
||||
data = data.decode(encoding)
|
||||
elif connection.charset != field_charset:
|
||||
data = data.decode(encoding)
|
||||
data = data.encode(connection.encoding)
|
||||
return data
|
||||
|
||||
encoders = {
|
||||
bool: escape_bool,
|
||||
int: escape_int,
|
||||
long_type: escape_int,
|
||||
float: escape_float,
|
||||
str: escape_str,
|
||||
text_type: escape_unicode,
|
||||
tuple: escape_sequence,
|
||||
list: escape_sequence,
|
||||
set: escape_sequence,
|
||||
dict: escape_dict,
|
||||
type(None): escape_None,
|
||||
datetime.date: escape_date,
|
||||
datetime.datetime: escape_datetime,
|
||||
datetime.timedelta: escape_timedelta,
|
||||
datetime.time: escape_time,
|
||||
time.struct_time: escape_struct_time,
|
||||
Decimal: escape_object,
|
||||
}
|
||||
|
||||
if not PY2 or JYTHON or IRONPYTHON:
|
||||
encoders[bytes] = escape_bytes
|
||||
|
||||
decoders = {
|
||||
FIELD_TYPE.BIT: convert_bit,
|
||||
FIELD_TYPE.TINY: int,
|
||||
FIELD_TYPE.SHORT: int,
|
||||
FIELD_TYPE.LONG: int,
|
||||
FIELD_TYPE.FLOAT: float,
|
||||
FIELD_TYPE.DOUBLE: float,
|
||||
FIELD_TYPE.LONGLONG: int,
|
||||
FIELD_TYPE.INT24: int,
|
||||
FIELD_TYPE.YEAR: int,
|
||||
FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp,
|
||||
FIELD_TYPE.DATETIME: convert_datetime,
|
||||
FIELD_TYPE.TIME: convert_timedelta,
|
||||
FIELD_TYPE.DATE: convert_date,
|
||||
FIELD_TYPE.SET: convert_set,
|
||||
FIELD_TYPE.BLOB: through,
|
||||
FIELD_TYPE.TINY_BLOB: through,
|
||||
FIELD_TYPE.MEDIUM_BLOB: through,
|
||||
FIELD_TYPE.LONG_BLOB: through,
|
||||
FIELD_TYPE.STRING: through,
|
||||
FIELD_TYPE.VAR_STRING: through,
|
||||
FIELD_TYPE.VARCHAR: through,
|
||||
FIELD_TYPE.DECIMAL: Decimal,
|
||||
FIELD_TYPE.NEWDECIMAL: Decimal,
|
||||
}
|
||||
|
||||
|
||||
# for MySQLdb compatibility
|
||||
conversions = decoders
|
||||
|
||||
def Thing2Literal(obj):
|
||||
return escape_str(str(obj))
|
|
@ -0,0 +1,485 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import print_function, absolute_import
|
||||
from functools import partial
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from ._compat import range_type, text_type, PY2
|
||||
|
||||
from . import err
|
||||
|
||||
|
||||
#: Regular expression for :meth:`Cursor.executemany`.
|
||||
#: executemany only suports simple bulk insert.
|
||||
#: You can use it to load large dataset.
|
||||
RE_INSERT_VALUES = re.compile(r"""(INSERT\s.+\sVALUES\s+)(\(\s*%s\s*(?:,\s*%s\s*)*\))(\s*(?:ON DUPLICATE.*)?)\Z""",
|
||||
re.IGNORECASE | re.DOTALL)
|
||||
|
||||
|
||||
class Cursor(object):
|
||||
'''
|
||||
This is the object you use to interact with the database.
|
||||
'''
|
||||
|
||||
#: Max stetement size which :meth:`executemany` generates.
|
||||
#:
|
||||
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
|
||||
#: Default value of max_allowed_packet is 1048576.
|
||||
max_stmt_length = 1024000
|
||||
|
||||
def __init__(self, connection):
|
||||
'''
|
||||
Do not create an instance of a Cursor yourself. Call
|
||||
connections.Connection.cursor().
|
||||
'''
|
||||
self.connection = connection
|
||||
self.description = None
|
||||
self.rownumber = 0
|
||||
self.rowcount = -1
|
||||
self.arraysize = 1
|
||||
self._executed = None
|
||||
self._result = None
|
||||
self._rows = None
|
||||
|
||||
def close(self):
|
||||
'''
|
||||
Closing a cursor just exhausts all remaining data.
|
||||
'''
|
||||
conn = self.connection
|
||||
if conn is None:
|
||||
return
|
||||
try:
|
||||
while self.nextset():
|
||||
pass
|
||||
finally:
|
||||
self.connection = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
del exc_info
|
||||
self.close()
|
||||
|
||||
def _get_db(self):
|
||||
if not self.connection:
|
||||
raise err.ProgrammingError("Cursor closed")
|
||||
return self.connection
|
||||
|
||||
def _check_executed(self):
|
||||
if not self._executed:
|
||||
raise err.ProgrammingError("execute() first")
|
||||
|
||||
def _conv_row(self, row):
|
||||
return row
|
||||
|
||||
def setinputsizes(self, *args):
|
||||
"""Does nothing, required by DB API."""
|
||||
|
||||
def setoutputsizes(self, *args):
|
||||
"""Does nothing, required by DB API."""
|
||||
|
||||
def _nextset(self, unbuffered=False):
|
||||
"""Get the next query set"""
|
||||
conn = self._get_db()
|
||||
current_result = self._result
|
||||
if current_result is None or current_result is not conn._result:
|
||||
return None
|
||||
if not current_result.has_next:
|
||||
return None
|
||||
conn.next_result(unbuffered=unbuffered)
|
||||
self._do_get_result()
|
||||
return True
|
||||
|
||||
def nextset(self):
|
||||
return self._nextset(False)
|
||||
|
||||
def _ensure_bytes(self, x, encoding=None):
|
||||
if isinstance(x, text_type):
|
||||
x = x.encode(encoding)
|
||||
elif isinstance(x, (tuple, list)):
|
||||
x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x)
|
||||
return x
|
||||
|
||||
def _escape_args(self, args, conn):
|
||||
ensure_bytes = partial(self._ensure_bytes, encoding=conn.encoding)
|
||||
|
||||
if isinstance(args, (tuple, list)):
|
||||
if PY2:
|
||||
args = tuple(map(ensure_bytes, args))
|
||||
return tuple(conn.escape(arg) for arg in args)
|
||||
elif isinstance(args, dict):
|
||||
if PY2:
|
||||
args = dict((ensure_bytes(key), ensure_bytes(val)) for
|
||||
(key, val) in args.items())
|
||||
return dict((key, conn.escape(val)) for (key, val) in args.items())
|
||||
else:
|
||||
# If it's not a dictionary let's try escaping it anyways.
|
||||
# Worst case it will throw a Value error
|
||||
if PY2:
|
||||
ensure_bytes(args)
|
||||
return conn.escape(args)
|
||||
|
||||
def mogrify(self, query, args=None):
|
||||
"""
|
||||
Returns the exact string that is sent to the database by calling the
|
||||
execute() method.
|
||||
|
||||
This method follows the extension to the DB API 2.0 followed by Psycopg.
|
||||
"""
|
||||
conn = self._get_db()
|
||||
if PY2: # Use bytes on Python 2 always
|
||||
query = self._ensure_bytes(query, encoding=conn.encoding)
|
||||
|
||||
if args is not None:
|
||||
query = query % self._escape_args(args, conn)
|
||||
|
||||
return query
|
||||
|
||||
def execute(self, query, args=None):
|
||||
'''Execute a query'''
|
||||
while self.nextset():
|
||||
pass
|
||||
|
||||
query = self.mogrify(query, args)
|
||||
|
||||
result = self._query(query)
|
||||
self._executed = query
|
||||
return result
|
||||
|
||||
def executemany(self, query, args):
|
||||
"""Run several data against one query
|
||||
|
||||
PyMySQL can execute bulkinsert for query like 'INSERT ... VALUES (%s)'.
|
||||
In other form of queries, just run :meth:`execute` many times.
|
||||
"""
|
||||
if not args:
|
||||
return
|
||||
|
||||
m = RE_INSERT_VALUES.match(query)
|
||||
if m:
|
||||
q_prefix = m.group(1)
|
||||
q_values = m.group(2).rstrip()
|
||||
q_postfix = m.group(3) or ''
|
||||
assert q_values[0] == '(' and q_values[-1] == ')'
|
||||
return self._do_execute_many(q_prefix, q_values, q_postfix, args,
|
||||
self.max_stmt_length,
|
||||
self._get_db().encoding)
|
||||
|
||||
self.rowcount = sum(self.execute(query, arg) for arg in args)
|
||||
return self.rowcount
|
||||
|
||||
def _do_execute_many(self, prefix, values, postfix, args, max_stmt_length, encoding):
|
||||
conn = self._get_db()
|
||||
escape = self._escape_args
|
||||
if isinstance(prefix, text_type):
|
||||
prefix = prefix.encode(encoding)
|
||||
if PY2 and isinstance(values, text_type):
|
||||
values = values.encode(encoding)
|
||||
if isinstance(postfix, text_type):
|
||||
postfix = postfix.encode(encoding)
|
||||
sql = bytearray(prefix)
|
||||
args = iter(args)
|
||||
v = values % escape(next(args), conn)
|
||||
if isinstance(v, text_type):
|
||||
if PY2:
|
||||
v = v.encode(encoding)
|
||||
else:
|
||||
v = v.encode(encoding, 'surrogateescape')
|
||||
sql += v
|
||||
rows = 0
|
||||
for arg in args:
|
||||
v = values % escape(arg, conn)
|
||||
if isinstance(v, text_type):
|
||||
if PY2:
|
||||
v = v.encode(encoding)
|
||||
else:
|
||||
v = v.encode(encoding, 'surrogateescape')
|
||||
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
|
||||
rows += self.execute(sql + postfix)
|
||||
sql = bytearray(prefix)
|
||||
else:
|
||||
sql += b','
|
||||
sql += v
|
||||
rows += self.execute(sql + postfix)
|
||||
self.rowcount = rows
|
||||
return rows
|
||||
|
||||
def callproc(self, procname, args=()):
|
||||
"""Execute stored procedure procname with args
|
||||
|
||||
procname -- string, name of procedure to execute on server
|
||||
|
||||
args -- Sequence of parameters to use with procedure
|
||||
|
||||
Returns the original args.
|
||||
|
||||
Compatibility warning: PEP-249 specifies that any modified
|
||||
parameters must be returned. This is currently impossible
|
||||
as they are only available by storing them in a server
|
||||
variable and then retrieved by a query. Since stored
|
||||
procedures return zero or more result sets, there is no
|
||||
reliable way to get at OUT or INOUT parameters via callproc.
|
||||
The server variables are named @_procname_n, where procname
|
||||
is the parameter above and n is the position of the parameter
|
||||
(from zero). Once all result sets generated by the procedure
|
||||
have been fetched, you can issue a SELECT @_procname_0, ...
|
||||
query using .execute() to get any OUT or INOUT values.
|
||||
|
||||
Compatibility warning: The act of calling a stored procedure
|
||||
itself creates an empty result set. This appears after any
|
||||
result sets generated by the procedure. This is non-standard
|
||||
behavior with respect to the DB-API. Be sure to use nextset()
|
||||
to advance through all result sets; otherwise you may get
|
||||
disconnected.
|
||||
"""
|
||||
conn = self._get_db()
|
||||
for index, arg in enumerate(args):
|
||||
q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg))
|
||||
self._query(q)
|
||||
self.nextset()
|
||||
|
||||
q = "CALL %s(%s)" % (procname,
|
||||
','.join(['@_%s_%d' % (procname, i)
|
||||
for i in range_type(len(args))]))
|
||||
self._query(q)
|
||||
self._executed = q
|
||||
return args
|
||||
|
||||
def fetchone(self):
|
||||
''' Fetch the next row '''
|
||||
self._check_executed()
|
||||
if self._rows is None or self.rownumber >= len(self._rows):
|
||||
return None
|
||||
result = self._rows[self.rownumber]
|
||||
self.rownumber += 1
|
||||
return result
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
''' Fetch several rows '''
|
||||
self._check_executed()
|
||||
if self._rows is None:
|
||||
return ()
|
||||
end = self.rownumber + (size or self.arraysize)
|
||||
result = self._rows[self.rownumber:end]
|
||||
self.rownumber = min(end, len(self._rows))
|
||||
return result
|
||||
|
||||
def fetchall(self):
|
||||
''' Fetch all the rows '''
|
||||
self._check_executed()
|
||||
if self._rows is None:
|
||||
return ()
|
||||
if self.rownumber:
|
||||
result = self._rows[self.rownumber:]
|
||||
else:
|
||||
result = self._rows
|
||||
self.rownumber = len(self._rows)
|
||||
return result
|
||||
|
||||
def scroll(self, value, mode='relative'):
|
||||
self._check_executed()
|
||||
if mode == 'relative':
|
||||
r = self.rownumber + value
|
||||
elif mode == 'absolute':
|
||||
r = value
|
||||
else:
|
||||
raise err.ProgrammingError("unknown scroll mode %s" % mode)
|
||||
|
||||
if not (0 <= r < len(self._rows)):
|
||||
raise IndexError("out of range")
|
||||
self.rownumber = r
|
||||
|
||||
def _query(self, q):
|
||||
conn = self._get_db()
|
||||
self._last_executed = q
|
||||
conn.query(q)
|
||||
self._do_get_result()
|
||||
return self.rowcount
|
||||
|
||||
def _do_get_result(self):
|
||||
conn = self._get_db()
|
||||
|
||||
self.rownumber = 0
|
||||
self._result = result = conn._result
|
||||
|
||||
self.rowcount = result.affected_rows
|
||||
self.description = result.description
|
||||
self.lastrowid = result.insert_id
|
||||
self._rows = result.rows
|
||||
|
||||
if result.warning_count > 0:
|
||||
self._show_warnings(conn)
|
||||
|
||||
def _show_warnings(self, conn):
|
||||
if self._result and self._result.has_next:
|
||||
return
|
||||
ws = conn.show_warnings()
|
||||
if ws is None:
|
||||
return
|
||||
for w in ws:
|
||||
msg = w[-1]
|
||||
if PY2:
|
||||
if isinstance(msg, unicode):
|
||||
msg = msg.encode('utf-8', 'replace')
|
||||
warnings.warn(str(msg), err.Warning, 4)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.fetchone, None)
|
||||
|
||||
Warning = err.Warning
|
||||
Error = err.Error
|
||||
InterfaceError = err.InterfaceError
|
||||
DatabaseError = err.DatabaseError
|
||||
DataError = err.DataError
|
||||
OperationalError = err.OperationalError
|
||||
IntegrityError = err.IntegrityError
|
||||
InternalError = err.InternalError
|
||||
ProgrammingError = err.ProgrammingError
|
||||
NotSupportedError = err.NotSupportedError
|
||||
|
||||
|
||||
class DictCursorMixin(object):
|
||||
# You can override this to use OrderedDict or other dict-like types.
|
||||
dict_type = dict
|
||||
|
||||
def _do_get_result(self):
|
||||
super(DictCursorMixin, self)._do_get_result()
|
||||
fields = []
|
||||
if self.description:
|
||||
for f in self._result.fields:
|
||||
name = f.name
|
||||
if name in fields:
|
||||
name = f.table_name + '.' + name
|
||||
fields.append(name)
|
||||
self._fields = fields
|
||||
|
||||
if fields and self._rows:
|
||||
self._rows = [self._conv_row(r) for r in self._rows]
|
||||
|
||||
def _conv_row(self, row):
|
||||
if row is None:
|
||||
return None
|
||||
return self.dict_type(zip(self._fields, row))
|
||||
|
||||
|
||||
class DictCursor(DictCursorMixin, Cursor):
|
||||
"""A cursor which returns results as a dictionary"""
|
||||
|
||||
|
||||
class SSCursor(Cursor):
|
||||
"""
|
||||
Unbuffered Cursor, mainly useful for queries that return a lot of data,
|
||||
or for connections to remote servers over a slow network.
|
||||
|
||||
Instead of copying every row of data into a buffer, this will fetch
|
||||
rows as needed. The upside of this, is the client uses much less memory,
|
||||
and rows are returned much faster when traveling over a slow network,
|
||||
or if the result set is very big.
|
||||
|
||||
There are limitations, though. The MySQL protocol doesn't support
|
||||
returning the total number of rows, so the only way to tell how many rows
|
||||
there are is to iterate over every row returned. Also, it currently isn't
|
||||
possible to scroll backwards, as only the current row is held in memory.
|
||||
"""
|
||||
|
||||
def _conv_row(self, row):
|
||||
return row
|
||||
|
||||
def close(self):
|
||||
conn = self.connection
|
||||
if conn is None:
|
||||
return
|
||||
|
||||
if self._result is not None and self._result is conn._result:
|
||||
self._result._finish_unbuffered_query()
|
||||
|
||||
try:
|
||||
while self.nextset():
|
||||
pass
|
||||
finally:
|
||||
self.connection = None
|
||||
|
||||
def _query(self, q):
|
||||
conn = self._get_db()
|
||||
self._last_executed = q
|
||||
conn.query(q, unbuffered=True)
|
||||
self._do_get_result()
|
||||
return self.rowcount
|
||||
|
||||
def nextset(self):
|
||||
return self._nextset(unbuffered=True)
|
||||
|
||||
def read_next(self):
|
||||
""" Read next row """
|
||||
return self._conv_row(self._result._read_rowdata_packet_unbuffered())
|
||||
|
||||
def fetchone(self):
|
||||
""" Fetch next row """
|
||||
self._check_executed()
|
||||
row = self.read_next()
|
||||
if row is None:
|
||||
return None
|
||||
self.rownumber += 1
|
||||
return row
|
||||
|
||||
def fetchall(self):
|
||||
"""
|
||||
Fetch all, as per MySQLdb. Pretty useless for large queries, as
|
||||
it is buffered. See fetchall_unbuffered(), if you want an unbuffered
|
||||
generator version of this method.
|
||||
"""
|
||||
return list(self.fetchall_unbuffered())
|
||||
|
||||
def fetchall_unbuffered(self):
|
||||
"""
|
||||
Fetch all, implemented as a generator, which isn't to standard,
|
||||
however, it doesn't make sense to return everything in a list, as that
|
||||
would use ridiculous memory for large result sets.
|
||||
"""
|
||||
return iter(self.fetchone, None)
|
||||
|
||||
def __iter__(self):
|
||||
return self.fetchall_unbuffered()
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
""" Fetch many """
|
||||
self._check_executed()
|
||||
if size is None:
|
||||
size = self.arraysize
|
||||
|
||||
rows = []
|
||||
for i in range_type(size):
|
||||
row = self.read_next()
|
||||
if row is None:
|
||||
break
|
||||
rows.append(row)
|
||||
self.rownumber += 1
|
||||
return rows
|
||||
|
||||
def scroll(self, value, mode='relative'):
|
||||
self._check_executed()
|
||||
|
||||
if mode == 'relative':
|
||||
if value < 0:
|
||||
raise err.NotSupportedError(
|
||||
"Backwards scrolling not supported by this cursor")
|
||||
|
||||
for _ in range_type(value):
|
||||
self.read_next()
|
||||
self.rownumber += value
|
||||
elif mode == 'absolute':
|
||||
if value < self.rownumber:
|
||||
raise err.NotSupportedError(
|
||||
"Backwards scrolling not supported by this cursor")
|
||||
|
||||
end = value - self.rownumber
|
||||
for _ in range_type(end):
|
||||
self.read_next()
|
||||
self.rownumber = value
|
||||
else:
|
||||
raise err.ProgrammingError("unknown scroll mode %s" % mode)
|
||||
|
||||
|
||||
class SSDictCursor(DictCursorMixin, SSCursor):
|
||||
""" An unbuffered cursor, which returns results as a dictionary """
|
|
@ -0,0 +1,120 @@
|
|||
import struct
|
||||
|
||||
from .constants import ER
|
||||
|
||||
|
||||
class MySQLError(Exception):
|
||||
"""Exception related to operation with MySQL."""
|
||||
|
||||
|
||||
class Warning(Warning, MySQLError):
|
||||
"""Exception raised for important warnings like data truncations
|
||||
while inserting, etc."""
|
||||
|
||||
|
||||
class Error(MySQLError):
|
||||
"""Exception that is the base class of all other error exceptions
|
||||
(not Warning)."""
|
||||
|
||||
|
||||
class InterfaceError(Error):
|
||||
"""Exception raised for errors that are related to the database
|
||||
interface rather than the database itself."""
|
||||
|
||||
|
||||
class DatabaseError(Error):
|
||||
"""Exception raised for errors that are related to the
|
||||
database."""
|
||||
|
||||
|
||||
class DataError(DatabaseError):
|
||||
"""Exception raised for errors that are due to problems with the
|
||||
processed data like division by zero, numeric value out of range,
|
||||
etc."""
|
||||
|
||||
|
||||
class OperationalError(DatabaseError):
|
||||
"""Exception raised for errors that are related to the database's
|
||||
operation and not necessarily under the control of the programmer,
|
||||
e.g. an unexpected disconnect occurs, the data source name is not
|
||||
found, a transaction could not be processed, a memory allocation
|
||||
error occurred during processing, etc."""
|
||||
|
||||
|
||||
class IntegrityError(DatabaseError):
|
||||
"""Exception raised when the relational integrity of the database
|
||||
is affected, e.g. a foreign key check fails, duplicate key,
|
||||
etc."""
|
||||
|
||||
|
||||
class InternalError(DatabaseError):
|
||||
"""Exception raised when the database encounters an internal
|
||||
error, e.g. the cursor is not valid anymore, the transaction is
|
||||
out of sync, etc."""
|
||||
|
||||
|
||||
class ProgrammingError(DatabaseError):
|
||||
"""Exception raised for programming errors, e.g. table not found
|
||||
or already exists, syntax error in the SQL statement, wrong number
|
||||
of parameters specified, etc."""
|
||||
|
||||
|
||||
class NotSupportedError(DatabaseError):
|
||||
"""Exception raised in case a method or database API was used
|
||||
which is not supported by the database, e.g. requesting a
|
||||
.rollback() on a connection that does not support transaction or
|
||||
has transactions turned off."""
|
||||
|
||||
|
||||
error_map = {}
|
||||
|
||||
def _map_error(exc, *errors):
|
||||
for error in errors:
|
||||
error_map[error] = exc
|
||||
|
||||
_map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR,
|
||||
ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME,
|
||||
ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE,
|
||||
ER.INVALID_GROUP_FUNC_USE, ER.UNSUPPORTED_EXTENSION,
|
||||
ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION)
|
||||
_map_error(DataError, ER.WARN_DATA_TRUNCATED, ER.WARN_NULL_TO_NOTNULL,
|
||||
ER.WARN_DATA_OUT_OF_RANGE, ER.NO_DEFAULT, ER.PRIMARY_CANT_HAVE_NULL,
|
||||
ER.DATA_TOO_LONG, ER.DATETIME_FUNCTION_OVERFLOW)
|
||||
_map_error(IntegrityError, ER.DUP_ENTRY, ER.NO_REFERENCED_ROW,
|
||||
ER.NO_REFERENCED_ROW_2, ER.ROW_IS_REFERENCED, ER.ROW_IS_REFERENCED_2,
|
||||
ER.CANNOT_ADD_FOREIGN, ER.BAD_NULL_ERROR)
|
||||
_map_error(NotSupportedError, ER.WARNING_NOT_COMPLETE_ROLLBACK,
|
||||
ER.NOT_SUPPORTED_YET, ER.FEATURE_DISABLED, ER.UNKNOWN_STORAGE_ENGINE)
|
||||
_map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR,
|
||||
ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR,
|
||||
ER.COLUMNACCESS_DENIED_ERROR)
|
||||
|
||||
del _map_error, ER
|
||||
|
||||
|
||||
def _get_error_info(data):
|
||||
errno = struct.unpack('<h', data[1:3])[0]
|
||||
is_41 = data[3:4] == b"#"
|
||||
if is_41:
|
||||
# version 4.1
|
||||
sqlstate = data[4:9].decode("utf8", 'replace')
|
||||
errorvalue = data[9:].decode("utf8", 'replace')
|
||||
return (errno, sqlstate, errorvalue)
|
||||
else:
|
||||
# version 4.0
|
||||
return (errno, None, data[3:].decode("utf8", 'replace'))
|
||||
|
||||
|
||||
def _check_mysql_exception(errinfo):
|
||||
errno, sqlstate, errorvalue = errinfo
|
||||
errorclass = error_map.get(errno, None)
|
||||
if errorclass:
|
||||
raise errorclass(errno, errorvalue)
|
||||
|
||||
# couldn't find the right error number
|
||||
raise InternalError(errno, errorvalue)
|
||||
|
||||
|
||||
def raise_mysql_exception(data):
|
||||
errinfo = _get_error_info(data)
|
||||
_check_mysql_exception(errinfo)
|
|
@ -0,0 +1,20 @@
|
|||
from ._compat import PY2
|
||||
|
||||
if PY2:
|
||||
import ConfigParser as configparser
|
||||
else:
|
||||
import configparser
|
||||
|
||||
|
||||
class Parser(configparser.RawConfigParser):
|
||||
|
||||
def __remove_quotes(self, value):
|
||||
quotes = ["'", "\""]
|
||||
for quote in quotes:
|
||||
if len(value) >= 2 and value[0] == value[-1] == quote:
|
||||
return value[1:-1]
|
||||
return value
|
||||
|
||||
def get(self, section, option):
|
||||
value = configparser.RawConfigParser.get(self, section, option)
|
||||
return self.__remove_quotes(value)
|
|
@ -0,0 +1,16 @@
|
|||
from time import localtime
|
||||
from datetime import date, datetime, time, timedelta
|
||||
|
||||
Date = date
|
||||
Time = time
|
||||
TimeDelta = timedelta
|
||||
Timestamp = datetime
|
||||
|
||||
def DateFromTicks(ticks):
|
||||
return date(*localtime(ticks)[:3])
|
||||
|
||||
def TimeFromTicks(ticks):
|
||||
return time(*localtime(ticks)[3:6])
|
||||
|
||||
def TimestampFromTicks(ticks):
|
||||
return datetime(*localtime(ticks)[:6])
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue