diff --git a/build/build.py b/build/build.py index 0415bca..cd45905 100644 --- a/build/build.py +++ b/build/build.py @@ -61,6 +61,9 @@ def main(): if x == 'c': clean_all() continue + elif x == 'a': + clean_everything() + continue try: x = int(x) @@ -90,8 +93,27 @@ def main(): def clean_all(): - cc.e('sorry, clean not implemented yet.') - # utils.remove(os.path.join(env.root_path, 'out')) + # cc.e('sorry, clean not implemented yet.') + utils.remove(os.path.join(env.root_path, 'out')) + + +def clean_everything(): + utils.remove(os.path.join(env.root_path, 'out')) + utils.remove(os.path.join(env.root_path, 'external', 'jsoncpp')) + utils.remove(os.path.join(env.root_path, 'external', 'libuv')) + utils.remove(os.path.join(env.root_path, 'external', 'mbedtls')) + utils.remove(os.path.join(env.root_path, 'external', 'mongoose')) + utils.remove(os.path.join(env.root_path, 'external', 'openssl')) + utils.remove(os.path.join(env.root_path, 'external', 'python')) + utils.remove(os.path.join(env.root_path, 'external', 'libssh-win-static', 'lib')) + utils.remove(os.path.join(env.root_path, 'external', 'libssh-win-static', 'src')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'tmp')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libmbedcrypto.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libmbedtls.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libmbedx509.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libsqlite3.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libssh.a')) + utils.remove(os.path.join(env.root_path, 'external', 'linux', 'release', 'lib', 'libuv.a')) def do_opt(opt): @@ -210,7 +232,7 @@ def get_input(msg, log_func=cc.w): def show_logo(): cc.v('[]=======================================================[]') cc.o((cc.CR_VERBOSE, ' | '), (cc.CR_INFO, 'Teleport Projects Builder'), (cc.CR_VERBOSE, ' |')) - cc.v(' | auth: apexliu@eomsoft.net |') + cc.v(' | auth: apex.liu@qq.com |') cc.v('[]=======================================================[]') @@ -224,7 +246,8 @@ def show_menu(): cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, '%2d' % options[o]['id']), (cc.CR_NORMAL, '] ', options[o]['disp'])) cc.v(' -------------------------------------------------------') - cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' C'), (cc.CR_NORMAL, '] clean build and dist env.')) + cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' C'), (cc.CR_NORMAL, '] clean build and dist.')) + cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' A'), (cc.CR_NORMAL, '] clean everything.')) cc.v(' -------------------------------------------------------') cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' Q'), (cc.CR_NORMAL, '] exit')) diff --git a/build/builder/build-installer.py b/build/builder/build-installer.py index e7f4fa8..eb682d2 100644 --- a/build/builder/build-installer.py +++ b/build/builder/build-installer.py @@ -73,6 +73,7 @@ class BuilderWin(BuilderBase): utils.copy_ex(out_path, bin_path, 'tp_web.exe') utils.copy_ex(out_path, bin_path, 'tp_core.exe') utils.copy_ex(out_path, bin_path, 'tpssh.dll') + utils.copy_ex(out_path, bin_path, 'tprdp.dll') utils.copy_ex(os.path.join(env.root_path, 'out', 'pysrt'), bin_path, (ctx.dist_path, 'pysrt')) diff --git a/build/builder/core/env.py b/build/builder/core/env.py index a312dbc..4b2923e 100644 --- a/build/builder/core/env.py +++ b/build/builder/core/env.py @@ -1,224 +1,244 @@ -# -*- coding: utf8 -*- - -import os -import platform -import sys -import configparser - -from . import colorconsole as cc - -if platform.system().lower() == 'windows': - try: - import winreg - except ImportError: - cc.e('Can not load module `winreg`, so I can not locate toolchain for you.') - - -class Env(object): - BITS_32 = 32 - BITS_64 = 64 - - def __init__(self): - _this_path = os.path.abspath(os.path.dirname(__file__)) - - self.root_path = os.path.abspath(os.path.join(_this_path, '..', '..', '..')) - self.build_path = os.path.abspath(os.path.join(_this_path, '..', '..')) - self.builder_path = os.path.join(self.build_path, 'builder') - self.win32_tools_path = os.path.join(self.build_path, 'tools', 'win32') - - self.is_py2 = sys.version_info[0] == 2 - self.is_py3 = sys.version_info[0] == 3 - - self.py_ver = platform.python_version_tuple() - self.py_ver_str = '%s%s' % (self.py_ver[0], self.py_ver[1]) - self.py_exec = sys.executable - - self.bits = self.BITS_32 - self.bits_str = 'x86' - - _bits = platform.architecture()[0] - if _bits == '64bit': - self.bits = self.BITS_64 - self.bits_str = 'x64' - - self.is_win = False - self.is_win_x64 = False - self.is_linux = False - self.is_macos = False - - _os = platform.system().lower() - self.plat = '' - if _os == 'windows': - self.is_win = True - self.plat = 'windows' - self.is_win_x64 = 'PROGRAMFILES(X86)' in os.environ - elif _os == 'linux': - self.is_linux = True - self.plat = 'linux' - elif _os == 'darwin': - self.is_macos = True - self.plat = 'macos' - - def init(self, warn_miss_tool=False): - if not self._load_config(warn_miss_tool): - return False - - return True - - def _load_config(self, warn_miss_tool): - _cfg_file = os.path.join(self.root_path, 'config.ini') - if not os.path.exists(_cfg_file): - cc.e('can not load configuration.\n\nplease copy `config.ini.in` into `config.ini` and modify it to fit your condition and try again.') - return False - - _cfg = configparser.ConfigParser() - _cfg.read(_cfg_file) - if 'external_ver' not in _cfg.sections() or 'toolchain' not in _cfg.sections(): - cc.e('invalid configuration file: need `external_ver` and `toolchain` section.') - return False - - _tmp = _cfg['external_ver'] - try: - _v_openssl = _tmp['openssl'].split(',') - self.ver_openssl = _v_openssl[0].strip() - self.ver_openssl_number = _v_openssl[1].strip() - - self.ver_libuv = _tmp['libuv'] - self.ver_mbedtls = _tmp['mbedtls'] - self.ver_sqlite = _tmp['sqlite'] - self.ver_libssh = _tmp['libssh'] - self.ver_jsoncpp = _tmp['jsoncpp'] - self.ver_mongoose = _tmp['mongoose'] - except KeyError: - cc.e('invalid configuration file: not all necessary external version are set.') - return False - - _tmp = _cfg['toolchain'] - if self.is_win: - if 'wget' in _tmp: - self.wget = _tmp['wget'] - else: - self.wget = None - - if self.wget is None or not os.path.exists(self.wget): - if warn_miss_tool: - cc.w(' - can not find `wget.exe`, you can get it at https://eternallybored.org/misc/wget/') - - if '7z' in _tmp: - self.zip7 = _tmp['7z'] - else: - self.zip7 = None - if self.zip7 is None or not os.path.exists(self.zip7): - if warn_miss_tool: - cc.w(' - can not find `7z.exe`, you can get it at http://www.7-zip.org') - - if 'nasm' in _tmp: - self.nasm = _tmp['nasm'] - else: - self.nasm = self._get_nasm() - - if self.nasm is None or not os.path.exists(self.nasm): - if warn_miss_tool: - cc.w(' - can not locate `nasm`, so I can build openssl.') - - if 'perl' in _tmp: - self.perl = _tmp['perl'] - else: - self.perl = self._get_perl() - - if self.perl is None or not os.path.exists(self.perl): - if warn_miss_tool: - cc.w(' - can not locate `perl`, so I can build openssl.') - - self.visual_studio_path = self._get_visual_studio_path() - if self.visual_studio_path is None or not os.path.exists(self.visual_studio_path): - if warn_miss_tool: - cc.w(' - can not locate Visual Studio installation, so I can build openssl.') - - if 'msbuild' in _tmp: - self.msbuild = _tmp['msbuild'] - else: - self.msbuild = self._get_msbuild() - - if self.msbuild is None or not os.path.exists(self.msbuild): - if warn_miss_tool: - cc.w(' - can not locate `MSBuild`, so I can build nothing.') - - if 'nsis' in _tmp: - self.nsis = _tmp['nsis'] - else: - self.nsis = self._get_nsis() - - if self.nsis is None or not os.path.exists(self.nsis): - if warn_miss_tool: - cc.w(' - can not locate `nsis`, so I can not make installer.') - - elif self.is_linux: - if 'cmake' in _tmp: - self.cmake = _tmp['cmake'] - else: - self.cmake = '/usr/bin/cmake' - - if not os.path.exists(self.cmake): - if warn_miss_tool: - cc.e(' - can not locate `cmake`, so I can not build binary from source.') - - return True - - def _get_msbuild(self): - # 14.0 = VS2015 - # 12.0 = VS2012 - # 4.0 = VS2008 - chk = ['14.0', '12.0', '4.0'] - - p = None - for c in chk: - p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\MSBuild\ToolsVersions\{}'.format(c), r'MSBuildToolsPath') - if p is not None: - break - - return os.path.join(p[0], 'MSBuild.exe') if p is not None else None - - def _get_visual_studio_path(self): - chk = ['14.0', '12.0', '4.0'] - p = None - for c in chk: - p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\VisualStudio\{}'.format(c), r'ShellFolder') - if p is not None: - break - - return p[0] if p is not None else None - - def _get_perl(self): - p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\perl', 'BinDir') - return p[0] if p is not None else None - - def _get_nasm(self): - p = self._winreg_read(winreg.HKEY_CURRENT_USER, r'SOFTWARE\nasm', '') - return os.path.join(p[0], 'nasm.exe') if p is not None else None - - def _get_nsis(self): - p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\NSIS\Unicode', '') - if p is None: - p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\NSIS', '') - return os.path.join(p[0], 'makensis.exe') if p is not None else None - - def _winreg_read(self, base, path, key): - try: - if self.is_win_x64: - hkey = winreg.CreateKeyEx(base, path, 0, winreg.KEY_READ | winreg.KEY_WOW64_32KEY) - else: - hkey = winreg.CreateKeyEx(base, path, 0, winreg.KEY_READ) - - value = winreg.QueryValueEx(hkey, key) - return value - - except OSError: - return None - - -env = Env() -del Env - -if __name__ == '__main__': - pass +# -*- coding: utf8 -*- + +import os +import platform +import sys +import configparser + +from . import colorconsole as cc + +if platform.system().lower() == 'windows': + try: + import winreg + except ImportError: + cc.e('Can not load module `winreg`, so I can not locate toolchain for you.') + + +class Env(object): + BITS_32 = 32 + BITS_64 = 64 + + def __init__(self): + _this_path = os.path.abspath(os.path.dirname(__file__)) + + self.root_path = os.path.abspath(os.path.join(_this_path, '..', '..', '..')) + self.build_path = os.path.abspath(os.path.join(_this_path, '..', '..')) + self.builder_path = os.path.join(self.build_path, 'builder') + self.win32_tools_path = os.path.join(self.build_path, 'tools', 'win32') + + self.is_py2 = sys.version_info[0] == 2 + self.is_py3 = sys.version_info[0] == 3 + + self.py_ver = platform.python_version_tuple() + self.py_ver_str = '%s%s' % (self.py_ver[0], self.py_ver[1]) + self.py_exec = sys.executable + + self.bits = self.BITS_32 + self.bits_str = 'x86' + + _bits = platform.architecture()[0] + if _bits == '64bit': + self.bits = self.BITS_64 + self.bits_str = 'x64' + + self.is_win = False + self.is_win_x64 = False + self.is_linux = False + self.is_macos = False + + _os = platform.system().lower() + self.plat = '' + if _os == 'windows': + self.is_win = True + self.plat = 'windows' + self.is_win_x64 = 'PROGRAMFILES(X86)' in os.environ + elif _os == 'linux': + self.is_linux = True + self.plat = 'linux' + elif _os == 'darwin': + self.is_macos = True + self.plat = 'macos' + + def init(self, warn_miss_tool=False): + if not self._load_config(warn_miss_tool): + return False + + if not self._load_version(): + return False + + return True + + def _load_config(self, warn_miss_tool): + _cfg_file = os.path.join(self.root_path, 'config.ini') + if not os.path.exists(_cfg_file): + cc.e('can not load configuration.\n\nplease copy `config.ini.in` into `config.ini` and modify it to fit your condition and try again.') + return False + + _cfg = configparser.ConfigParser() + _cfg.read(_cfg_file) + if 'toolchain' not in _cfg.sections(): + cc.e('invalid configuration file: need `toolchain` section.') + return False + + _tmp = _cfg['toolchain'] + if self.is_win: + if 'wget' in _tmp: + self.wget = _tmp['wget'] + else: + self.wget = None + + if self.wget is None or not os.path.exists(self.wget): + if warn_miss_tool: + cc.w(' - can not find `wget.exe`, you can get it at https://eternallybored.org/misc/wget/') + + if '7z' in _tmp: + self.zip7 = _tmp['7z'] + else: + self.zip7 = None + if self.zip7 is None or not os.path.exists(self.zip7): + if warn_miss_tool: + cc.w(' - can not find `7z.exe`, you can get it at http://www.7-zip.org') + + if 'nasm' in _tmp: + self.nasm = _tmp['nasm'] + else: + self.nasm = self._get_nasm() + + if self.nasm is None or not os.path.exists(self.nasm): + if warn_miss_tool: + cc.w(' - can not locate `nasm`, so I can build openssl.') + else: + _nasm_path = os.path.abspath(os.path.join(self.nasm, '..')) + os.environ['path'] = os.environ['path'] + ';' + _nasm_path + + if 'perl' in _tmp: + self.perl = _tmp['perl'] + else: + self.perl = self._get_perl() + + if self.perl is None or not os.path.exists(self.perl): + if warn_miss_tool: + cc.w(' - can not locate `perl`, so I can build openssl.') + + self.visual_studio_path = self._get_visual_studio_path() + if self.visual_studio_path is None or not os.path.exists(self.visual_studio_path): + if warn_miss_tool: + cc.w(' - can not locate Visual Studio installation, so I can build openssl.') + + if 'msbuild' in _tmp: + self.msbuild = _tmp['msbuild'] + else: + self.msbuild = self._get_msbuild() + + if self.msbuild is None or not os.path.exists(self.msbuild): + if warn_miss_tool: + cc.w(' - can not locate `MSBuild`, so I can build nothing.') + + if 'nsis' in _tmp: + self.nsis = _tmp['nsis'] + else: + self.nsis = self._get_nsis() + + if self.nsis is None or not os.path.exists(self.nsis): + if warn_miss_tool: + cc.w(' - can not locate `nsis`, so I can not make installer.') + + elif self.is_linux: + if 'cmake' in _tmp: + self.cmake = _tmp['cmake'] + else: + self.cmake = '/usr/bin/cmake' + + if not os.path.exists(self.cmake): + if warn_miss_tool: + cc.e(' - can not locate `cmake`, so I can not build binary from source.') + + return True + + def _load_version(self): + _ver_file = os.path.join(self.root_path, 'external', 'version.ini') + if not os.path.exists(_ver_file): + cc.e('can not load version configuration for external.') + return False + + _cfg = configparser.ConfigParser() + _cfg.read(_ver_file) + if 'external_ver' not in _cfg.sections(): + cc.e('invalid configuration file: need `external_ver` section.') + return False + + _tmp = _cfg['external_ver'] + try: + _v_openssl = _tmp['openssl'].split(',') + self.ver_openssl = _v_openssl[0].strip() + self.ver_openssl_number = _v_openssl[1].strip() + + self.ver_libuv = _tmp['libuv'] + self.ver_mbedtls = _tmp['mbedtls'] + self.ver_sqlite = _tmp['sqlite'] + self.ver_libssh = _tmp['libssh'] + self.ver_jsoncpp = _tmp['jsoncpp'] + self.ver_mongoose = _tmp['mongoose'] + except KeyError: + cc.e('invalid configuration file: not all necessary external version are set.') + return False + + return True + + def _get_msbuild(self): + # 14.0 = VS2015 + # 12.0 = VS2012 + # 4.0 = VS2008 + chk = ['14.0', '12.0', '4.0'] + + p = None + for c in chk: + p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\MSBuild\ToolsVersions\{}'.format(c), r'MSBuildToolsPath') + if p is not None: + break + + return os.path.join(p[0], 'MSBuild.exe') if p is not None else None + + def _get_visual_studio_path(self): + chk = ['14.0', '12.0', '4.0'] + p = None + for c in chk: + p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\VisualStudio\{}'.format(c), r'ShellFolder') + if p is not None: + break + + return p[0] if p is not None else None + + def _get_perl(self): + p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\perl', 'BinDir') + return p[0] if p is not None else None + + def _get_nasm(self): + p = self._winreg_read(winreg.HKEY_CURRENT_USER, r'SOFTWARE\nasm', '') + return os.path.join(p[0], 'nasm.exe') if p is not None else None + + def _get_nsis(self): + p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\NSIS\Unicode', '') + if p is None: + p = self._winreg_read(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\NSIS', '') + return os.path.join(p[0], 'makensis.exe') if p is not None else None + + def _winreg_read(self, base, path, key): + try: + if self.is_win_x64: + hkey = winreg.CreateKeyEx(base, path, 0, winreg.KEY_READ | winreg.KEY_WOW64_32KEY) + else: + hkey = winreg.CreateKeyEx(base, path, 0, winreg.KEY_READ) + + value = winreg.QueryValueEx(hkey, key) + return value + + except OSError: + return None + + +env = Env() +del Env + +if __name__ == '__main__': + pass diff --git a/build/builder/core/ver.py b/build/builder/core/ver.py index 153b46d..3fd5bd2 100644 --- a/build/builder/core/ver.py +++ b/build/builder/core/ver.py @@ -1,3 +1,3 @@ -# -*- coding: utf8 -*- -VER_TELEPORT_SERVER = "2.2.5.1" -VER_TELEPORT_ASSIST = "2.2.5.1" +# -*- coding: utf8 -*- +VER_TELEPORT_SERVER = "2.2.6.1" +VER_TELEPORT_ASSIST = "2.2.5.1" diff --git a/config.ini.in b/config.ini.in index f327296..e8de23d 100644 --- a/config.ini.in +++ b/config.ini.in @@ -30,13 +30,3 @@ wget = C:\Program Files (x86)\wget\wget.exe # if not set cmake path, default to '/usr/bin/cmake' cmake = /opt/cmake/bin/cmake - -[external_ver] -openssl = 1.0.2h,1000208f -libuv = 1.11.0 -mbedtls = 2.3.0 -sqlite = 3170000 -libssh = 0.7.4 -jsoncpp = 0.10.6 -mongoose = 6.6 - diff --git a/dist/server/script/main.py b/dist/server/script/main.py index 3eac6d5..56c5b1a 100644 --- a/dist/server/script/main.py +++ b/dist/server/script/main.py @@ -517,7 +517,7 @@ class InstallerLinux(InstallerBase): def _fix_path(self): self._config_path = '/etc/teleport' - self._data_path = os.path.join('/var/teleport') + self._data_path = os.path.join('/var/lib/teleport') self._log_path = os.path.join('/var/log/teleport') def _copy_files(self): diff --git a/external/version.ini b/external/version.ini new file mode 100644 index 0000000..f327296 --- /dev/null +++ b/external/version.ini @@ -0,0 +1,42 @@ +[toolchain] +#============================================ +# for windows +#============================================ + +# Need wget to download necessary dependency files. +wget = C:\Program Files (x86)\wget\wget.exe + +# Need 7z to unzip downloaded files. +7z = C:\Program Files (x86)\7zip\7z.exe + +# need perl to build openssl on Windows, if not set, default to get it from register. +# suggest install ActivePerl. +#perl = C:\Perl\bin\perl.exe + +# need nasm to build openssl on Windows, if not set, default to locate it from register. +#nasm = C:\Users\username\AppData\Local\NASM\nasm.exe + +# if not set nsis path, default to get it by register. +#nsis = C:\Program Files (x86)\NSIS\Unicode\makensis.exe + +# if not set msbuild path, default to get it by register. +#msbuild = C:\Program Files (x86)\MSBuild\14.0\bin\MSBuild.exe + + +# ============================================ +# for linux +# ============================================ + +# if not set cmake path, default to '/usr/bin/cmake' +cmake = /opt/cmake/bin/cmake + + +[external_ver] +openssl = 1.0.2h,1000208f +libuv = 1.11.0 +mbedtls = 2.3.0 +sqlite = 3170000 +libssh = 0.7.4 +jsoncpp = 0.10.6 +mongoose = 6.6 + diff --git a/server/tp_core/core/tp_core.rc b/server/tp_core/core/tp_core.rc index 9cd0fbc..736ce78 100644 Binary files a/server/tp_core/core/tp_core.rc and b/server/tp_core/core/tp_core.rc differ diff --git a/server/tp_core/core/ts_ver.h b/server/tp_core/core/ts_ver.h index afb273e..69e47d2 100644 --- a/server/tp_core/core/ts_ver.h +++ b/server/tp_core/core/ts_ver.h @@ -1,6 +1,6 @@ -#ifndef __TS_SERVER_VER_H__ -#define __TS_SERVER_VER_H__ - -#define TP_SERVER_VER L"2.2.5.1" - -#endif // __TS_SERVER_VER_H__ +#ifndef __TS_SERVER_VER_H__ +#define __TS_SERVER_VER_H__ + +#define TP_SERVER_VER L"2.2.6.1" + +#endif // __TS_SERVER_VER_H__ diff --git a/server/tp_core/protocol/ssh/ssh_session.cpp b/server/tp_core/protocol/ssh/ssh_session.cpp index 00a82a1..a6763b5 100644 --- a/server/tp_core/protocol/ssh/ssh_session.cpp +++ b/server/tp_core/protocol/ssh/ssh_session.cpp @@ -965,55 +965,55 @@ int SshSession::_on_server_channel_data(ssh_session session, ssh_channel channel } else { - if (len > 5 && len < 256) - { - const ex_u8* _begin = ex_memmem((const ex_u8*)data, len, (const ex_u8*)"\033]0;", 4); - if (NULL != _begin) - { - size_t len_before = _begin - (const ex_u8*)data; - const ex_u8* _end = ex_memmem(_begin + 4, len - len_before, (const ex_u8*)"\007", 1); - if (NULL != _end) - { - _end++; - - // 这个包中含有改变标题的数据,将标题换为我们想要的 - size_t len_end = len - (_end - (const ex_u8*)data); - MemBuffer mbuf; - - if (len_before > 0) - mbuf.append((ex_u8*)data, len_before); - - mbuf.append((ex_u8*)"\033]0;tpssh://", 13); - mbuf.append((ex_u8*)_this->m_server_ip.c_str(), _this->m_server_ip.length()); - mbuf.append((ex_u8*)"\007", 1); - - if (len_end > 0) - mbuf.append((ex_u8*)_end, len_end); - - if(mbuf.size() > 0) - { - ret = ssh_channel_write(info->channel, mbuf.data(), mbuf.size()); - if (ret <= 0) - EXLOGE("[ssh] send to client failed (1).\n"); - else - ret = len; - } - else - { - ret = ssh_channel_write(info->channel, data, len); - } - } - else - { - ret = ssh_channel_write(info->channel, data, len); - } - } - else - { - ret = ssh_channel_write(info->channel, data, len); - } - } - else +// if (len > 5 && len < 256) +// { +// const ex_u8* _begin = ex_memmem((const ex_u8*)data, len, (const ex_u8*)"\033]0;", 4); +// if (NULL != _begin) +// { +// size_t len_before = _begin - (const ex_u8*)data; +// const ex_u8* _end = ex_memmem(_begin + 4, len - len_before, (const ex_u8*)"\007", 1); +// if (NULL != _end) +// { +// _end++; +// +// // 这个包中含有改变标题的数据,将标题换为我们想要的 +// size_t len_end = len - (_end - (const ex_u8*)data); +// MemBuffer mbuf; +// +// if (len_before > 0) +// mbuf.append((ex_u8*)data, len_before); +// +// mbuf.append((ex_u8*)"\033]0;tpssh://", 13); +// mbuf.append((ex_u8*)_this->m_server_ip.c_str(), _this->m_server_ip.length()); +// mbuf.append((ex_u8*)"\007", 1); +// +// if (len_end > 0) +// mbuf.append((ex_u8*)_end, len_end); +// +// if(mbuf.size() > 0) +// { +// ret = ssh_channel_write(info->channel, mbuf.data(), mbuf.size()); +// if (ret <= 0) +// EXLOGE("[ssh] send to client failed (1).\n"); +// else +// ret = len; +// } +// else +// { +// ret = ssh_channel_write(info->channel, data, len); +// } +// } +// else +// { +// ret = ssh_channel_write(info->channel, data, len); +// } +// } +// else +// { +// ret = ssh_channel_write(info->channel, data, len); +// } +// } +// else { ret = ssh_channel_write(info->channel, data, len); } diff --git a/server/tp_web/src/tp_web.rc b/server/tp_web/src/tp_web.rc index d8d1646..9e650a9 100644 Binary files a/server/tp_web/src/tp_web.rc and b/server/tp_web/src/tp_web.rc differ diff --git a/server/tp_web/src/ts_ver.h b/server/tp_web/src/ts_ver.h index afb273e..69e47d2 100644 --- a/server/tp_web/src/ts_ver.h +++ b/server/tp_web/src/ts_ver.h @@ -1,6 +1,6 @@ -#ifndef __TS_SERVER_VER_H__ -#define __TS_SERVER_VER_H__ - -#define TP_SERVER_VER L"2.2.5.1" - -#endif // __TS_SERVER_VER_H__ +#ifndef __TS_SERVER_VER_H__ +#define __TS_SERVER_VER_H__ + +#define TP_SERVER_VER L"2.2.6.1" + +#endif // __TS_SERVER_VER_H__ diff --git a/server/www/teleport/app/eom_app/app/db.py b/server/www/teleport/app/eom_app/app/db.py index 0d2be09..46d1f83 100644 --- a/server/www/teleport/app/eom_app/app/db.py +++ b/server/www/teleport/app/eom_app/app/db.py @@ -57,8 +57,8 @@ class TPDatabase: return False # 鐪嬬湅鏁版嵁搴撲腑鏄惁瀛樺湪鎸囧畾鐨勬暟鎹〃锛堝鏋滀笉瀛樺湪锛屽彲鑳芥槸涓涓┖鏁版嵁搴撴枃浠讹級锛屽垯鍙兘鏄竴涓柊瀹夎鐨勭郴缁 - # ret = self.query('SELECT COUNT(*) FROM `sqlite_master` WHERE `type`="table" AND `name`="{}account";'.format(self._table_prefix)) - ret = self.is_table_exists('{}group'.format(self._table_prefix)) + # ret = self.query('SELECT COUNT(*) FROM `sqlite_master` WHERE `type`="table" AND `name`="{}account";'.format(self._table_prefix)) + ret = self.is_table_exists('{}group'.format(self._table_prefix)) if ret is None or not ret: log.w('database need create.\n') self.need_create = True @@ -76,9 +76,9 @@ class TPDatabase: self.need_upgrade = True return True - # DO TEST - # self.alter_table('ts_account', [['account_id', 'id'], ['account_type', 'type']]) - + # DO TEST + # self.alter_table('ts_account', [['account_id', 'id'], ['account_type', 'type']]) + return True def is_table_exists(self, table_name): @@ -128,6 +128,14 @@ class TPDatabase: if not os.path.exists(db_path): log.e('can not create folder `{}` to store database file.\n'.format(db_path)) return False + # 鍒涘缓涓涓┖鏁版嵁鏂囦欢锛岃繖鏍锋墠鑳借繘琛宑onnect銆 + if not os.path.exists(self.db_source['file']): + try: + with open(self.db_source['file'], 'w') as f: + pass + except: + log.e('can not create db file `{}`.\n'.format(self.db_source['file'])) + return False if create_and_init(self, step_begin, step_end): log.v('database created.\n') @@ -154,54 +162,55 @@ class TPDatabase: fields_names: 濡傛灉涓篘one锛屽垯涓嶄慨鏀瑰瓧娈靛悕锛屽惁鍒欏簲璇ユ槸涓涓猯ist锛屽叾涓瘡涓厓绱犳槸鍖呭惈涓や釜str鐨刲ist锛岃〃绀哄皢姝ist绗竴涓寚瀹氱殑瀛楁鏀瑰悕涓虹浜屼釜鎸囧畾鐨勫悕绉 @return: None or Boolean """ + # TODO: 姝ゅ嚱鏁板皻鏈畬鎴 if self.db_source['type'] == self.DB_TYPE_SQLITE: if not isinstance(table_names, list) and field_names is None: log.w('nothing to do.\n') return False - if isinstance(table_names, str): - old_table_name = table_names - new_table_name = table_names - elif isinstance(table_names, list) and len(table_names) == 2: - old_table_name = table_names[0] - new_table_name = table_names[1] - else: - log.w('invalid param.\n') - return False - - if isinstance(field_names, list): - for i in field_names: - if not isinstance(i, list) or 2 != len(i): - log.w('invalid param.\n') - return False - - if field_names is None: - # 浠呮暟鎹〃鏀瑰悕 - return self.exec('ALTER TABLE `{}` RENAME TO `{}`;'.format(old_table_name, new_table_name)) - else: - # sqlite涓嶆敮鎸佸瓧娈垫敼鍚嶏紝鎵浠ラ渶瑕侀氳繃涓存椂琛ㄤ腑杞竴涓 - - # 鍏堣幏鍙栨暟鎹〃鐨勫瓧娈靛悕鍒楄〃 - ret = self.query('SELECT * FROM `sqlite_master` WHERE `type`="table" AND `name`="{}";'.format(old_table_name)) - log.w('-----\n') - log.w(ret[0][4]) - log.w('\n') - - # 鍏堝皢鏁版嵁琛ㄦ敼鍚嶏紝鎴愪负涓涓复鏃惰〃 - # tmp_table_name = '{}_sqlite_tmp'.format(old_table_name) - # ret = self.exec('ALTER TABLE `{}` RENAME TO `{}`;'.format(old_table_name, tmp_table_name)) - # if ret is None or not ret: - # return ret - - pass - elif self.db_source['type'] == self.DB_TYPE_MYSQL: - log.e('mysql not supported yet.\n') - return False - else: - log.e('Unknown database type.\n') - return False - - + if isinstance(table_names, str): + old_table_name = table_names + new_table_name = table_names + elif isinstance(table_names, list) and len(table_names) == 2: + old_table_name = table_names[0] + new_table_name = table_names[1] + else: + log.w('invalid param.\n') + return False + + if isinstance(field_names, list): + for i in field_names: + if not isinstance(i, list) or 2 != len(i): + log.w('invalid param.\n') + return False + + if field_names is None: + # 浠呮暟鎹〃鏀瑰悕 + return self.exec('ALTER TABLE `{}` RENAME TO `{}`;'.format(old_table_name, new_table_name)) + else: + # sqlite涓嶆敮鎸佸瓧娈垫敼鍚嶏紝鎵浠ラ渶瑕侀氳繃涓存椂琛ㄤ腑杞竴涓 + + # 鍏堣幏鍙栨暟鎹〃鐨勫瓧娈靛悕鍒楄〃 + ret = self.query('SELECT * FROM `sqlite_master` WHERE `type`="table" AND `name`="{}";'.format(old_table_name)) + log.w('-----\n') + log.w(ret[0][4]) + log.w('\n') + + # 鍏堝皢鏁版嵁琛ㄦ敼鍚嶏紝鎴愪负涓涓复鏃惰〃 + # tmp_table_name = '{}_sqlite_tmp'.format(old_table_name) + # ret = self.exec('ALTER TABLE `{}` RENAME TO `{}`;'.format(old_table_name, tmp_table_name)) + # if ret is None or not ret: + # return ret + + pass + elif self.db_source['type'] == self.DB_TYPE_MYSQL: + log.e('mysql not supported yet.\n') + return False + else: + log.e('Unknown database type.\n') + return False + + class TPDatabasePool: def __init__(self): self._locker = threading.RLock() @@ -224,7 +233,8 @@ class TPDatabasePool: thread_id = threading.get_ident() if thread_id not in self._connections: _conn = self._do_connect() - self._connections[thread_id] = _conn + if _conn is not None: + self._connections[thread_id] = _conn else: _conn = self._connections[thread_id] @@ -246,6 +256,10 @@ class TPSqlitePool(TPDatabasePool): self._db_file = db_file def _do_connect(self): + if not os.path.exists(self._db_file): + log.e('[sqlite] can not connect, database file not exists.\n') + return None + try: return sqlite3.connect(self._db_file) except: @@ -259,6 +273,7 @@ class TPSqlitePool(TPDatabasePool): db_ret = cursor.fetchall() return db_ret except sqlite3.OperationalError: + # log.e('_do_query() error.\n') return None finally: cursor.close() @@ -270,6 +285,7 @@ class TPSqlitePool(TPDatabasePool): conn.commit() return True except sqlite3.OperationalError: + # log.e('_do_exec() error.\n') return False finally: cursor.close() diff --git a/server/www/teleport/app/eom_app/controller/__init__.py b/server/www/teleport/app/eom_app/controller/__init__.py index 08416ba..9683e1f 100644 --- a/server/www/teleport/app/eom_app/controller/__init__.py +++ b/server/www/teleport/app/eom_app/controller/__init__.py @@ -50,9 +50,6 @@ controllers = [ (r'/user', user.IndexHandler), (r'/user/list', user.GetListHandler), - # add another path to static-path - - # todo: 閲嶆斁鏁版嵁璺緞鏄姩鎬佷粠core鏈嶅姟鐨刯son-rpc鎺ュ彛鑾峰彇鐨勶紝鍥犳杩欓噷鐨勬暟鎹幏鍙栨柟寮忛渶瑕佹敼鍙 #(r"/log/replay/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(cfg.data_path, 'replay')}), (r"/log/replay/(.*)", record.ReplayStaticFileHandler, {"path": os.path.join(cfg.data_path, 'replay')}), diff --git a/server/www/teleport/app/eom_app/controller/host.py b/server/www/teleport/app/eom_app/controller/host.py index 812f11b..cb33257 100644 --- a/server/www/teleport/app/eom_app/controller/host.py +++ b/server/www/teleport/app/eom_app/controller/host.py @@ -453,16 +453,8 @@ class ExportHostHandler(TPBaseAdminAuthHandler): class GetCertList(TPBaseUserAuthJsonHandler): def post(self): - # args = self.get_argument('args', None) - # if args is not None: - # args = json.loads(args) - # # print('args', args) - # else: - # # ret = {'code':-1} - # self.write_json(-1) - # return _certs = host.get_cert_list() - if _certs is None: + if _certs is None or len(_certs) == 0: self.write_json(-1) return else: @@ -900,8 +892,12 @@ class SysUserAdd(TPBaseUserAuthJsonHandler): args['user_pswd'] = return_data['data'] - if host.sys_user_add(args) < 0: - return self.write_json(-1) + user_id = host.sys_user_add(args) + if user_id < 0: + if user_id == -100: + return self.write_json(user_id, '鍚屽悕璐︽埛宸茬粡瀛樺湪锛') + else: + return self.write_json(user_id, '鏁版嵁搴撴搷浣滃け璐ワ紒') return self.write_json(0) diff --git a/server/www/teleport/app/eom_app/controller/maintenance.py b/server/www/teleport/app/eom_app/controller/maintenance.py index 7c9d1d7..dce00dd 100644 --- a/server/www/teleport/app/eom_app/controller/maintenance.py +++ b/server/www/teleport/app/eom_app/controller/maintenance.py @@ -78,7 +78,6 @@ class RpcThreadManage: 'steps': self._threads[task_id]['steps'] } if not self._threads[task_id]['running']: - print('remove task-id', task_id) del self._threads[task_id] return ret else: @@ -153,15 +152,12 @@ thread_mgr = RpcThreadManage() class RpcHandler(TPBaseAdminAuthJsonHandler): def post(self): args = self.get_argument('args', None) - # print('args', args) if args is not None: args = json.loads(args) else: self.write_json(-1) return - # print(args) - cmd = args['cmd'] if cmd == 'create_db': if not get_db().need_create: @@ -176,7 +172,6 @@ class RpcHandler(TPBaseAdminAuthJsonHandler): return self.write_json(0, data={"task_id": task_id}) elif cmd == 'get_task_ret': - # return self.write_json(-1) r = thread_mgr.get_task(args['tid']) if r is None: return self.write_json(0, data={'running': False, 'steps': []}) diff --git a/server/www/teleport/app/eom_app/module/host.py b/server/www/teleport/app/eom_app/module/host.py index 8546662..2046971 100644 --- a/server/www/teleport/app/eom_app/module/host.py +++ b/server/www/teleport/app/eom_app/module/host.py @@ -51,6 +51,8 @@ def get_all_host_info_list(_filter, order, limit, with_pwd=False): '{};'.format(db.table_prefix, db.table_prefix, _where) db_ret = db.query(sql) + if db_ret is None: + return 0, list() total_count = db_ret[0][0] # 淇鍒嗛〉鏁版嵁 @@ -88,7 +90,7 @@ def get_all_host_info_list(_filter, order, limit, with_pwd=False): db_ret = db.query(sql) if db_ret is None: - return 0, None + return 0, list() ret = list() for item in db_ret: @@ -310,9 +312,11 @@ def get_cert_list(): sql = 'SELECT {} FROM `{}key` AS a;'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix) db_ret = db.query(sql) - if db_ret is None: - return None ret = list() + + if db_ret is None: + return ret + for item in db_ret: x = DbItem() diff --git a/server/www/teleport/app/eom_app/module/user.py b/server/www/teleport/app/eom_app/module/user.py index 5c615c6..7508e85 100644 --- a/server/www/teleport/app/eom_app/module/user.py +++ b/server/www/teleport/app/eom_app/module/user.py @@ -19,7 +19,7 @@ def verify_user(name, password): # 鍥犳鍙互鐗瑰埆鍦板鐞嗙敤鎴烽獙璇侊細鐢ㄦ埛鍚峚dmin锛屽瘑鐮乤dmin鍙互鐧诲綍涓虹鐞嗗憳 if cfg.app_mode == APP_MODE_MAINTENANCE: if name == 'admin' and password == 'admin': - return 1, 100, 'admin' + return 1, 100, 'admin', 0 return 0, 0, '', 0 if len(db_ret) != 1: diff --git a/server/www/teleport/app/eom_ver.py b/server/www/teleport/app/eom_ver.py index 625508a..ef4d6f5 100644 --- a/server/www/teleport/app/eom_ver.py +++ b/server/www/teleport/app/eom_ver.py @@ -1,4 +1,4 @@ -# -*- coding: utf8 -*- -TS_VER = "2.2.5.1" -TP_ASSIST_LAST_VER = "2.2.5.1" -TP_ASSIST_REQUIRE = "2.0.0.1" +# -*- coding: utf8 -*- +TS_VER = "2.2.6.1" +TP_ASSIST_LAST_VER = "2.2.5.1" +TP_ASSIST_REQUIRE = "2.0.0.1" diff --git a/server/www/teleport/static/js/ui/admin_host.js b/server/www/teleport/static/js/ui/admin_host.js index 9d44f66..f6f2792 100644 --- a/server/www/teleport/static/js/ui/admin_host.js +++ b/server/www/teleport/static/js/ui/admin_host.js @@ -11,8 +11,8 @@ var g_assist = null; var g_host_table = null; -var g_cert_list = {}; -var g_group_list = {}; +var g_cert_list = []; +var g_group_list = []; var g_dlg_edit_host = null; var g_dlg_edit_host_user = null; var g_dlg_sys_user = null; @@ -669,14 +669,11 @@ ywl.create_host_edit_dlg = function (tbl) { }; dlg_edit_host.on_sys_type_change = function () { dlg_edit_host.sys_type = parseInt($('#auth-sys-type').val()); - console.log('sys-type', dlg_edit_host.sys_type, 'protocol:', dlg_edit_host.protocol); if (dlg_edit_host.sys_type === OS_TYPE_WINDOWS) {// && dlg_edit_host.protocol === 0) { dlg_edit_host.protocol = PROTOCOL_TYPE_RDP; - console.log('--1', dlg_edit_host.protocol); } else if (dlg_edit_host.sys_type === OS_TYPE_LINUX) {// && dlg_edit_host.protocol === 0) { dlg_edit_host.protocol = PROTOCOL_TYPE_SSH; - console.log('--2', dlg_edit_host.protocol); } $('#host-protocol-type').val(dlg_edit_host.protocol); @@ -686,7 +683,6 @@ ywl.create_host_edit_dlg = function (tbl) { dlg_edit_host.on_protocol_change = function () { dlg_edit_host.protocol = parseInt($('#host-protocol-type').val()); - console.log('xx', dlg_edit_host.protocol); if (dlg_edit_host.protocol === PROTOCOL_TYPE_RDP) $('#dlg-edit-host-protocol-port').val('3389'); else if (dlg_edit_host.protocol === PROTOCOL_TYPE_SSH) @@ -1266,7 +1262,7 @@ ywl.create_sys_user = function (tbl) { dlg_sys_user.check_args = function () { dlg_sys_user.auth_mode = parseInt($('#auth-user-type').val()); - dlg_sys_user.user_name = parseInt($('#auth-user-host-username').val()); + dlg_sys_user.user_name = $('#auth-user-host-username').val(); if (dlg_sys_user.auth_mode !== AUTH_NONE && dlg_sys_user.user_name.length === 0) { ywl.notify_error('璇疯緭鍏ョ郴缁熺敤鎴峰悕锛'); diff --git a/server/www/teleport/static/js/ui/log.js b/server/www/teleport/static/js/ui/log.js index 208bb14..1d34d2a 100644 --- a/server/www/teleport/static/js/ui/log.js +++ b/server/www/teleport/static/js/ui/log.js @@ -1,6 +1,4 @@ -/** - * Created by mi on 2016/7/4. - */ +"use strict"; ywl.on_init = function (cb_stack, cb_args) { var dom_id = '#ywl_log_list'; @@ -36,7 +34,6 @@ ywl.on_init = function (cb_stack, cb_args) { fields: {id: 'id'} }, {title: "ID", key: "id"}, -// {title: "Session", key: "session_id"}, {title: "鎿嶄綔鑰", key: "account_name"}, {title: "绯荤粺鐢ㄦ埛", key: "user_name"}, {title: "鍗忚", key: "protocol", render: 'protocol', fields: {protocol: 'protocol'}}, @@ -51,7 +48,7 @@ ywl.on_init = function (cb_stack, cb_args) { width: 160, header_align: 'left', cell_align: 'left', render: 'make_action_btn', - fields: {ID: 'id', sys_type: 'sys_type', cost_time: 'cost_time', protocol: 'protocol'} + fields: {ID: 'id', ret_code:'ret_code', sys_type: 'sys_type', cost_time: 'cost_time', protocol: 'protocol'} } ], paging: {selector: dom_id + " [ywl-paging='log-list']", per_page: paging_normal}, @@ -164,16 +161,14 @@ ywl.on_host_table_created = function (tbl) { //ywl.update_add_to_batch_btn(); }); - } else if (col_key == 'action') { + } else if (col_key === 'action') { var row_data = tbl.get_row(row_id); - //console.log('row_data', row_data); - var protocol = parseInt(row_data.protocol); - if (protocol == 1) { + if (protocol === PROTOCOL_TYPE_RDP) { $(cell_obj).find('[ywl-btn-record]').click(function () { - var ip = window.location.hostname;//ywl.page_options.ts_server.ip; - var port = parseInt(window.location.port);//ywl.page_options.ts_server.port; + var ip = window.location.hostname; + var port = parseInt(window.location.port); var url = 'http://' + ip + ':' + port + '/log/replay/rdp/' + row_data.id; var tail = 'log/replay/rdp/' + prefixInteger(row_data.id, 6); var args = {}; @@ -187,7 +182,7 @@ ywl.on_host_table_created = function (tbl) { ywl.notify_success('RDP 褰曞儚鎾斁鍣ㄦ垚鍔熷惎鍔紒'); }, function (code, msg) { - if (code == TPE_NO_ASSIST) + if (code === TPE_NO_ASSIST) g_assist.alert_assist_not_found(); else { ywl.notify_error(msg); @@ -196,7 +191,7 @@ ywl.on_host_table_created = function (tbl) { }); }); } - else if (protocol == 2) { + else if (protocol === PROTOCOL_TYPE_SSH) { $(cell_obj).find('[ywl-btn-record]').click(function () { window.open('/log/record/' + parseInt(row_data.protocol) + '/' + row_data.id); }); @@ -214,8 +209,8 @@ ywl.on_host_table_created = function (tbl) { var msg = ''; switch (fields.ret_code) { case 0: - // return '姝e湪浣跨敤涓' - return '-'; + return '浣跨敤涓' +// return '-'; case 9999: return '鎴愬姛'; case 1: @@ -261,7 +256,7 @@ ywl.on_host_table_created = function (tbl) { render.cost_time = function (row_id, fields) { if (fields.ret_code == 0) { - return '姝e湪浣跨敤涓'; + return '浣跨敤涓'; } else { return '' + second2str(fields.cost_time) + ''; } @@ -302,19 +297,17 @@ ywl.on_host_table_created = function (tbl) { render.make_action_btn = function (row_id, fields) { var ret = []; - if (fields.protocol == 1) { + if (fields.protocol === PROTOCOL_TYPE_RDP) { ret.push('褰曞儚鏌ョ湅 '); - } else if (fields.protocol == 2) { - if (fields.cost_time > 0) { + } else if (fields.protocol === PROTOCOL_TYPE_SSH) { + if (fields.ret_code === 9999 && fields.cost_time > 0) { ret.push('褰曞儚鏌ョ湅 '); ret.push('鏃ュ織鏌ョ湅 '); } - } return ret.join(''); } - }; }; @@ -378,7 +371,6 @@ ywl.create_table_filter_user_list = function (tbl, selector, on_created) { _tblf_st._on_select = function () { var user_name = $(this).html(); - var cb_stack = CALLBACK_STACK.create(); cb_stack .add(_tblf_st._table_ctrl.load_data) @@ -392,4 +384,3 @@ ywl.create_table_filter_user_list = function (tbl, selector, on_created) { return _tblf_st; }; - diff --git a/server/www/teleport/view/maintenance/install.mako b/server/www/teleport/view/maintenance/install.mako index 88fcacf..7b2c8ad 100644 --- a/server/www/teleport/view/maintenance/install.mako +++ b/server/www/teleport/view/maintenance/install.mako @@ -68,6 +68,11 @@
+ @@ -157,6 +162,7 @@ if (!ret.data.running) { + $('#step2').show('fast'); return; } diff --git a/server/www/teleport/view/maintenance/upgrade.mako b/server/www/teleport/view/maintenance/upgrade.mako index 2fd2b35..061a068 100644 --- a/server/www/teleport/view/maintenance/upgrade.mako +++ b/server/www/teleport/view/maintenance/upgrade.mako @@ -62,6 +62,11 @@
+ @@ -145,6 +150,7 @@ if (!ret.data.running) { + $('#step2').show('fast'); return; } diff --git a/version.in b/version.in index f0acd8c..a089cba 100644 --- a/version.in +++ b/version.in @@ -14,6 +14,6 @@ Build 锛 鏋勫缓鍙枫傛瀯寤哄彿鐢ㄤ簬琛ㄦ槑姝ょ増鏈彂甯冧箣鍓嶈繘琛屼簡澶氬皯 -TELEPORT_SERVER 2.2.5.1 +TELEPORT_SERVER 2.2.6.1 TELEPORT_ASSIST 2.2.5.1 TELEPORT_ASSIST_REQUIRE 2.0.0.1