diff --git a/.gitignore b/.gitignore
index 995382c..fc743ad 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,9 @@
# for CMake
CMakeFiles
+cmake_install.cmake
+Makefile
+cmake-build
# for Python
__pycache__
diff --git a/build.sh b/build.sh
new file mode 100755
index 0000000..de72f3d
--- /dev/null
+++ b/build.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+PATH_ROOT=$(cd "$(dirname "$0")"; pwd)
+PYEXEC=${PATH_ROOT}/external/linux/release/bin/python3.4
+PYSTATIC=${PATH_ROOT}/external/linux/release/lib/libpython3.4m.a
+
+function on_error()
+{
+ echo -e "\033[01m\033[31m"
+ echo "==================[ !! ERROR !! ]=================="
+ echo -e $1
+ echo "==================================================="
+ echo -e "\033[0m"
+ exit 1
+}
+
+if [ ! -f "${PYSTATIC}" ]; then
+ echo "python static not found, now build it..."
+ "${PATH_ROOT}/build/build-py-static.sh"
+
+ if [ ! -f "${PYSTATIC}" ]; then
+ on_error "can not build python static."
+ fi
+fi
+
+
+${PYEXEC} -B "${PATH_ROOT}/build/build.py" $@
diff --git a/build/.gitignore b/build/.gitignore
new file mode 100644
index 0000000..31efa1c
--- /dev/null
+++ b/build/.gitignore
@@ -0,0 +1 @@
+/config.py
diff --git a/build/.idea/.name b/build/.idea/.name
new file mode 100644
index 0000000..c795b05
--- /dev/null
+++ b/build/.idea/.name
@@ -0,0 +1 @@
+build
\ No newline at end of file
diff --git a/build/.idea/build.iml b/build/.idea/build.iml
new file mode 100644
index 0000000..4462c8b
--- /dev/null
+++ b/build/.idea/build.iml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/build/.idea/vcs.xml b/build/.idea/vcs.xml
new file mode 100644
index 0000000..6c0b863
--- /dev/null
+++ b/build/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/build/linux/build-py-static.sh b/build/build-py-static.sh
similarity index 99%
rename from build/linux/build-py-static.sh
rename to build/build-py-static.sh
index c23ee67..2dcfb0b 100755
--- a/build/linux/build-py-static.sh
+++ b/build/build-py-static.sh
@@ -16,7 +16,7 @@ VER_PYTHON_LIB="${VER_PYTHON_SHORT}m"
FILE_PYTHON_STATIC_LIB="libpython${VER_PYTHON_LIB}.a"
-PATH_ROOT=$(cd "$(dirname "$0")"/../..; pwd)
+PATH_ROOT=$(cd "$(dirname "$0")"/..; pwd)
PATH_EXT=${PATH_ROOT}/external
PATH_DOWNLOAD=${PATH_EXT}/_download_
PATH_TMP=${PATH_EXT}/linux/tmp
diff --git a/build/build.py b/build/build.py
new file mode 100644
index 0000000..241ccc6
--- /dev/null
+++ b/build/build.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import getopt
+import json
+import os
+import platform
+import sys
+
+THIS_PATH = os.path.abspath(os.path.dirname(__file__))
+BUILDER_PATH = os.path.join(THIS_PATH, 'builder')
+
+sys.path.append(os.path.join(BUILDER_PATH))
+
+try:
+ import core.colorconsole as cc
+except ImportError:
+ print('can not import color console module.')
+ sys.exit(1)
+
+import core.utils as utils
+
+try:
+ from core.context import *
+except ImportError:
+ cc.e('can not import core context module.')
+ sys.exit(1)
+
+ctx = BuildContext()
+
+if ctx.is_py2:
+ _input = raw_input
+else:
+ _input = input
+
+if ctx.host_os == 'windows':
+ try:
+ import win32api, win32con
+ except:
+ cc.e('can not import module `win32api`.')
+ sys.exit(1)
+
+options = list()
+options_idx = 0
+
+
+def main():
+ cc.set_default(sep='', end='\n')
+
+ action = None
+ argv = sys.argv[1:]
+ if len(argv) >= 1:
+ for i in range(len(argv)):
+ if 'debug' == argv[i]:
+ ctx.set_target(TARGET_DEBUG)
+ elif 'release' == argv[i]:
+ ctx.set_target(TARGET_RELEASE)
+ # elif 'x86' == argv[i]:
+ # ctx.set_bits(BITS_32)
+ # elif 'x64' == argv[i]:
+ # ctx.set_bits(BITS_64)
+ elif argv[i] in ctx.dist_all:
+ ctx.set_dist(argv[i])
+ else:
+ action = argv[i]
+
+ make_options()
+
+ if action is not None:
+ cc.v(action)
+ opt = select_option_by_name(action)
+ if opt is None:
+ cc.e('unknown config: ', action)
+ return
+
+ do_opt(opt)
+ return
+
+ show_logo()
+ while True:
+ x = show_menu()
+ if x == 'q':
+ break
+
+ if x == 'c':
+ clean_all()
+ continue
+
+ try:
+ x = int(x)
+ except:
+ cc.e('invalid input.')
+ continue
+
+ opt = select_option_by_id(int(x))
+ if 'config' == opt['name']:
+ if make_config():
+ make_options()
+ continue
+
+ if opt is None:
+ cc.e('unknown selection: ', x)
+ continue
+
+ do_opt(opt)
+
+ cc.w('\ntask finished, press Enter to continue or Q to quit...', end='')
+ try:
+ x = _input()
+ except EOFError:
+ x = 'q'
+ if x == 'q':
+ break
+
+
+def clean_all():
+ cc.v('remove compiler out path...')
+ utils.remove(os.path.join(ROOT_PATH, 'out'))
+ utils.remove(os.path.join(ROOT_PATH, 'waf_build'))
+ utils.remove(os.path.join(ROOT_PATH, '.lock-waf_linux_build'))
+
+
+def do_opt(opt):
+ cc.v(opt)
+ # PY_EXEC = cfg[opt['bits']]['PY_EXEC']
+
+ arg = ''
+ # if 'pysbase' == opt['name']:
+ # script = 'build-pysbase.py'
+
+ if 'ver' == opt['name']:
+ script = 'build-version.py'
+
+ elif 'pysrt' == opt['name']:
+ script = 'build-pysrt.py'
+
+ elif 'external' == opt['name']:
+ script = 'build-external.py'
+
+ # elif 'agent-runtime' == opt['name']:
+ # script = 'build-agent.py'
+ # arg = '%s %s runtime' % (ctx.target_path, opt['bits'])
+ elif 'server' == opt['name']:
+ script = 'build-server.py'
+ arg = '%s %s server' % (ctx.target_path, opt['bits'])
+
+ elif 'installer' == opt['name']:
+ script = 'build-installer.py'
+ # arg = 'installer'
+ arg = '%s %s installer' % (ctx.dist, opt['bits'])
+
+ elif 'installer-ubuntu' == opt['name']:
+ script = 'build-installer.py'
+ arg = '%s %s installer' % ('ubuntu', opt['bits'])
+
+ elif 'assist-exe' == opt['name']:
+ script = 'build-assist.py'
+ arg = '%s %s exe' % (ctx.target_path, opt['bits'])
+ elif 'assist-rdp' == opt['name']:
+ script = 'build-assist.py'
+ arg = '%s rdp' % (opt['bits'])
+ elif 'assist-installer' == opt['name']:
+ script = 'build-assist.py'
+ arg = '%s %s installer' % (ctx.dist, opt['bits'])
+
+ # elif 'server' == opt['name']:
+ # script = 'build-server.py'
+ # # arg = 'installer'
+ # # arg = '%s %s' % (ctx.dist, ctx.bits_path)
+ # arg = '%s' % (opt['bits'])
+
+ else:
+ cc.e('unknown option: ', opt['name'])
+ return
+
+ # cmd = '%s "%s" %s' % (PY_EXEC, arg, ex_arg)
+ cmd = '"%s" -B "%s/%s" %s' % (utils.cfg.py_exec, BUILDER_PATH, script, arg)
+ cc.i(cmd)
+ cc.v('')
+ os.system(cmd)
+
+
+def select_option_by_name(name):
+ global options
+
+ for o in range(len(options)):
+ if options[o] is None:
+ continue
+
+ if name == options[o]['name']:
+ return options[o]
+
+ return None
+
+
+def select_option_by_id(id):
+ global options
+
+ for o in range(len(options)):
+ if options[o] is None:
+ continue
+ if options[o]['id'] == id:
+ return options[o]
+ return None
+
+
+def add_option(bits, name, disp):
+ global options, options_idx
+ options_idx += 1
+ # if bits is not None:
+ # disp = '[%s] %s' % (bits, disp)
+ options.append({'id': options_idx, 'name': name, 'disp': disp, 'bits': bits})
+
+
+def add_split():
+ global options
+ options.append(None)
+
+
+def make_options():
+ global options, options_idx, cfg
+
+ # options = [{'name': 'config', 'disp': 'Configure'}]
+
+ options = list()
+ options_idx = 0
+ # add_option(None, 'config', 'Configure')
+
+ if ctx.host_os == 'windows':
+ add_option('x86', 'ver', 'Update version setting')
+ add_option('x86', 'pysrt', 'Make Python-Runtime for python%s-x86' % (utils.cfg.py_ver_str))
+ add_split()
+ add_option('x86', 'assist-exe', 'Assist Execute [%s]' % ctx.target_path)
+ # add_option('x86', 'assist-rdp', 'Teleport RDP [%s]' % ctx.target_path)
+ add_option('x86', 'assist-installer', 'Assist Installer')
+ add_split()
+ add_option('x86', 'server', 'Teleport Server [%s]' % ctx.target_path)
+ add_split()
+ add_option('x86', 'installer', 'Teleport Installer for %s' % ctx.host_os)
+ else:
+ add_option('x64', 'ver', 'Update version setting')
+ add_option('x64', 'pysrt', 'Make Python-Runtime for python%s-x64' % (utils.cfg.py_ver_str))
+ add_option('x64', 'external', 'Build external for Teleport-Server')
+ add_split()
+ add_option('x64', 'server', 'Teleport Server [%s]' % ctx.target_path)
+ add_split()
+ add_option('x64', 'installer', 'Teleport Installer for %s' % ctx.host_os)
+
+
+def get_input(msg, log_func=cc.w):
+ log_func(msg, end=' ')
+ try:
+ return _input()
+ except EOFError:
+ return ''
+
+
+def show_logo():
+ cc.v('[]=======================================================[]')
+ cc.o((cc.CR_VERBOSE, ' | '), (cc.CR_INFO, 'Teleport Projects Builder'), (cc.CR_VERBOSE, ' |'))
+ cc.v(' | auth: apexliu@eomsoft.net |')
+ cc.v('[]=======================================================[]')
+
+
+def show_menu():
+ # cc.v(cfg)
+ cc.v('')
+ cc.v('=========================================================')
+ for o in range(len(options)):
+ if options[o] is None:
+ cc.v(' -------------------------------------------------------')
+ continue
+ cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, '%2d' % options[o]['id']), (cc.CR_NORMAL, '] ', options[o]['disp']))
+
+ cc.v(' -------------------------------------------------------')
+ cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' C'), (cc.CR_NORMAL, '] clean build and dist env.'))
+
+ cc.v(' -------------------------------------------------------')
+ cc.o((cc.CR_NORMAL, ' ['), (cc.CR_INFO, ' Q'), (cc.CR_NORMAL, '] exit'))
+
+ cc.w('\nselect action: ', end='')
+ try:
+ x = _input()
+ except EOFError:
+ x = 'q'
+
+ cc.n('')
+ return x.lower()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/build-assist.py b/build/builder/build-assist.py
new file mode 100644
index 0000000..8ed0968
--- /dev/null
+++ b/build/builder/build-assist.py
@@ -0,0 +1,166 @@
+#!/bin/env python3
+# -*- coding: utf-8 -*-
+
+from core import colorconsole as cc
+from core import utils
+from core.context import *
+from core.ver import *
+
+ctx = BuildContext()
+
+ROOT_PATH = utils.cfg.ROOT_PATH
+
+
+class BuilderBase:
+ def __init__(self):
+ self.out_dir = ''
+
+ def build_exe(self):
+ pass
+
+ def build_rdp(self):
+ pass
+
+ def build_installer(self):
+ pass
+
+
+class BuilderWin(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ def build_exe(self):
+ cc.n('build tp_assist...')
+ sln_file = os.path.join(ROOT_PATH, 'tp_assist', 'tp_assist.vs2015.sln')
+ out_file = os.path.join(ROOT_PATH, 'out', 'tp_assist', ctx.bits_path, ctx.target_path, 'tp_assist.exe')
+ if os.path.exists(out_file):
+ utils.remove(out_file)
+ utils.msvc_build(sln_file, 'tp_assist', ctx.target_path, ctx.bits_path, False)
+ utils.ensure_file_exists(out_file)
+
+ # def build_rdp(self):
+ # cc.n('build tp_rdp...')
+ # sln_file = os.path.join(ROOT_PATH, 'tp_rdp', 'tp_rdp.2015.sln')
+ # out_file = os.path.join(ROOT_PATH, 'out', 'tp_rdp', ctx.bits_path, ctx.target_path, 'tp_rdp.exe')
+ # if os.path.exists(out_file):
+ # utils.remove(out_file)
+ # utils.msvc_build(sln_file, 'tp_rdp', ctx.target_path, ctx.bits_path, False)
+ # utils.ensure_file_exists(out_file)
+
+ def build_installer(self):
+ cc.n('build assist package for website...')
+
+ name = 'teleport-assist-windows-{}-{}'.format(ctx.bits_path, VER_TELEPORT_ASSIST)
+ utils.remove(os.path.join(ROOT_PATH, 'dist', '{}.zip'.format(name)))
+ self._build_installer(name)
+
+ last_ver = 'teleport-assist-last-win.zip'
+ if os.path.exists(os.path.join(ROOT_PATH, 'dist', last_ver)):
+ utils.remove(os.path.join(ROOT_PATH, 'dist', last_ver))
+
+ utils.copy_file(os.path.join(ROOT_PATH, 'dist'), os.path.join(ROOT_PATH, 'dist'), ('{}.zip'.format(name), last_ver))
+
+ # cc.n('build assist package for backend...')
+ # name = 'teleport-assist-last-win'
+ # utils.remove(os.path.join(ROOT_PATH, 'dist', '{}.zip'.format(name)))
+ # self._build_installer(name)
+
+ # utils.copy_file(os.path.join(ROOT_PATH, 'dist'), os.path.join(ROOT_PATH, 'web', 'site', 'teleport', 'static', 'download'), 'teleport-assist-win.zip')
+
+ @staticmethod
+ def _build_installer(name):
+ base_path = os.path.join(ROOT_PATH, 'out', 'tp_assist')
+ base_tmp = os.path.join(base_path, '_tmp_')
+ tmp_path = os.path.join(base_tmp, name)
+
+ if os.path.exists(base_tmp):
+ utils.remove(base_tmp)
+
+ utils.makedirs(tmp_path)
+
+ utils.copy_file(os.path.join(ROOT_PATH, 'out', 'tp_assist', ctx.bits_path, ctx.target_path), tmp_path, 'tp_assist.exe')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tp_assist'), tmp_path, ('ssh_client.orig.ini', 'ssh_client.ini'))
+ utils.copy_file(os.path.join(ROOT_PATH, 'tp_assist'), tmp_path, ('scp_client.orig.ini', 'scp_client.ini'))
+ utils.copy_file(os.path.join(ROOT_PATH, 'tp_assist'), tmp_path, ('telnet_client.orig.ini', 'telnet_client.ini'))
+
+ utils.copy_ex(os.path.join(ROOT_PATH, 'tp_assist'), tmp_path, 'site')
+
+ utils.makedirs(os.path.join(tmp_path, 'tools', 'tprdp'))
+ utils.makedirs(os.path.join(tmp_path, 'tools', 'putty'))
+ utils.makedirs(os.path.join(tmp_path, 'tools', 'winscp'))
+ # utils.copy_file(os.path.join(ROOT_PATH, 'out', 'tp_rdp', ctx.bits_path, ctx.target_path), os.path.join(tmp_path, 'tools', 'tprdp'), 'tp_rdp.exe')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tools', 'tprdp'), os.path.join(tmp_path, 'tools', 'tprdp'), 'tprdp-client.exe')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tools', 'tprdp'), os.path.join(tmp_path, 'tools', 'tprdp'), 'tprdp-replay.exe')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tools', 'putty'), os.path.join(tmp_path, 'tools', 'putty'), 'putty.exe')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tools', 'winscp'), os.path.join(tmp_path, 'tools', 'winscp'), 'WinSCP.exe')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tools', 'winscp'), os.path.join(tmp_path, 'tools', 'winscp'), 'license.txt')
+ utils.copy_file(os.path.join(ROOT_PATH, 'tools'), os.path.join(tmp_path, 'tools'), 'securecrt-telnet.vbs')
+
+ # utils.makedirs(os.path.join(tmp_path, 'data'))
+ # utils.copy_file(os.path.join(ROOT_PATH, 'tp_assist'), os.path.join(tmp_path, 'data'), 'ssl.cert')
+
+ out_file = os.path.join(ROOT_PATH, 'dist', '{}.zip'.format(name))
+ utils.make_zip(base_tmp, out_file)
+
+
+class BuilderLinux(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ def build_exe(self):
+ cc.e('not support linux.')
+
+ # def build_rdp(self):
+ # cc.e('not support linux.')
+
+ def build_installer(self):
+ cc.e('not support linux.')
+
+
+def gen_builder(dist):
+ if dist == 'windows':
+ builder = BuilderWin()
+ elif dist == 'linux':
+ builder = BuilderLinux()
+ else:
+ raise RuntimeError('unsupported platform.')
+
+ ctx.set_dist(dist)
+ return builder
+
+
+def main():
+ builder = None
+
+ argv = sys.argv[1:]
+
+ for i in range(len(argv)):
+ if 'debug' == argv[i]:
+ ctx.set_target(TARGET_DEBUG)
+ elif 'x86' == argv[i]:
+ ctx.set_bits(BITS_32)
+ elif 'x64' == argv[i]:
+ ctx.set_bits(BITS_64)
+ elif argv[i] in ctx.dist_all:
+ builder = gen_builder(argv[i])
+
+ if builder is None:
+ builder = gen_builder(ctx.host_os)
+
+ if 'exe' in argv:
+ builder.build_exe()
+ # elif 'rdp' in argv:
+ # builder.build_rdp()
+ elif 'installer' in argv:
+ builder.build_installer()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/build-external.py b/build/builder/build-external.py
new file mode 100644
index 0000000..0ae7a4d
--- /dev/null
+++ b/build/builder/build-external.py
@@ -0,0 +1,382 @@
+#!/bin/env python3
+# -*- coding: utf-8 -*-
+
+import codecs
+import shutil
+import time
+from core import colorconsole as cc
+from core import utils
+from core.context import *
+
+ctx = BuildContext()
+
+ROOT_PATH = utils.cfg.ROOT_PATH
+PATH_EXTERNAL = os.path.join(ROOT_PATH, 'external')
+PATH_DOWNLOAD = os.path.join(PATH_EXTERNAL, '_download_')
+
+OPENSSL_VER = utils.cfg.OPENSSL_VER
+LIBUV_VER = utils.cfg.LIBUV_VER
+MBEDTLS_VER = utils.cfg.MBEDTLS_VER
+SQLITE_VER = utils.cfg.SQLITE_VER
+
+
+class BuilderBase:
+ def __init__(self):
+ self.out_dir = ''
+ if not os.path.exists(PATH_DOWNLOAD):
+ utils.makedirs(PATH_DOWNLOAD)
+
+ self._init_path()
+
+ def _init_path(self):
+ cc.e("this is a pure-virtual function.")
+
+ def build_openssl(self):
+ file_name = 'openssl-{}.tar.gz'.format(OPENSSL_VER)
+ if not self._download_file('openssl source tarball', 'https://www.openssl.org/source/{}'.format(file_name), file_name):
+ return
+ self._build_openssl(file_name)
+
+ def _build_openssl(self, file_name):
+ cc.e("this is a pure-virtual function.")
+
+ def build_libuv(self):
+ file_name = 'libuv-{}.zip'.format(LIBUV_VER)
+ if not self._download_file('libuv source tarball', 'https://github.com/libuv/libuv/archive/v{}.zip'.format(LIBUV_VER), file_name):
+ return
+ self._build_libuv(file_name)
+
+ def _build_libuv(self, file_name):
+ cc.e("this is a pure-virtual function.")
+
+ def build_mbedtls(self):
+ file_name = 'mbedtls-mbedtls-{}.zip'.format(MBEDTLS_VER)
+ if not self._download_file('mbedtls source tarball', 'https://github.com/ARMmbed/mbedtls/archive/mbedtls-{}.zip'.format(MBEDTLS_VER), file_name):
+ return
+ self._build_mbedtls(file_name)
+
+ def _build_mbedtls(self, file_name):
+ cc.e("this is a pure-virtual function.")
+
+ def build_libssh(self):
+ file_name = 'libssh-master.zip'
+ if not self._download_file('mbedtls source tarball', 'https://git.libssh.org/projects/libssh.git/snapshot/master.zip', file_name):
+ return
+ self._build_libssh(file_name)
+
+ def _build_libssh(self, file_name):
+ cc.e("this is a pure-virtual function.")
+
+ def build_sqlite(self):
+ file_name = 'sqlite-autoconf-{}.tar.gz'.format(SQLITE_VER)
+ if not self._download_file('mbedtls source tarball', 'http://sqlite.org/2016/{}'.format(file_name), file_name):
+ return
+ self._build_sqlite(file_name)
+
+ def _build_sqlite(self, file_name):
+ cc.e("this is a pure-virtual function.")
+
+ def _download_file(self, desc, url, file_name):
+ cc.n('downloading {} ...'.format(desc))
+ if os.path.exists(os.path.join(PATH_DOWNLOAD, file_name)):
+ cc.w('already exists, skip.')
+ return True
+
+ os.system('wget --no-check-certificate {} -O "{}/{}"'.format(url, PATH_DOWNLOAD, file_name))
+
+ if not os.path.exists(os.path.join(PATH_DOWNLOAD, file_name)):
+ cc.e('downloading {} from {} failed.'.format(desc, url))
+ return True
+
+ return True
+
+ def fix_output(self):
+ pass
+
+class BuilderWin(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ def _init_path(self):
+ cc.e("build external not works for Windows yet.")
+
+ def _build_openssl(self, file_name):
+ cc.e('build openssl-static for Windows...not supported yet.')
+
+ def fix_output(self):
+ pass
+
+
+class BuilderLinux(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ def _init_path(self):
+ self.PATH_TMP = os.path.join(PATH_EXTERNAL, 'linux', 'tmp')
+ self.PATH_RELEASE = os.path.join(PATH_EXTERNAL, 'linux', 'release')
+ self.OPENSSL_PATH_SRC = os.path.join(self.PATH_TMP, 'openssl-{}'.format(OPENSSL_VER))
+ self.LIBUV_PATH_SRC = os.path.join(self.PATH_TMP, 'libuv-{}'.format(LIBUV_VER))
+ self.MBEDTLS_PATH_SRC = os.path.join(self.PATH_TMP, 'mbedtls-mbedtls-{}'.format(MBEDTLS_VER))
+ self.LIBSSH_PATH_SRC = os.path.join(self.PATH_TMP, 'libssh-master')
+ self.SQLITE_PATH_SRC = os.path.join(self.PATH_TMP, 'sqlite-autoconf-{}'.format(SQLITE_VER))
+
+ if not os.path.exists(self.PATH_TMP):
+ utils.makedirs(self.PATH_TMP)
+
+ def _build_openssl(self, file_name):
+ if not os.path.exists(self.OPENSSL_PATH_SRC):
+ os.system('tar -zxvf "{}/{}" -C "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP))
+
+ cc.n('build openssl static...')
+ if os.path.exists(os.path.join(self.PATH_RELEASE, 'lib', 'libssl.a')):
+ cc.w('already exists, skip.')
+ return
+
+ old_p = os.getcwd()
+ os.chdir(self.OPENSSL_PATH_SRC)
+ os.system('./config --prefix={} --openssldir={}/openssl no-zlib no-shared'.format(self.PATH_RELEASE, self.PATH_RELEASE))
+ os.system('make')
+ os.system('make install')
+ os.chdir(old_p)
+
+ def _build_libuv(self, file_name):
+ if not os.path.exists(self.LIBUV_PATH_SRC):
+ # os.system('tar -zxvf "{}/{}" -C "{}"'.format(PATH_DOWNLOAD, file_name, PATH_TMP))
+ os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP))
+
+ cc.n('build libuv...')
+ if os.path.exists(os.path.join(self.PATH_RELEASE, 'lib', 'libuv.a')):
+ cc.w('already exists, skip.')
+ return
+
+ # we need following...
+ # apt-get install autoconf aptitude libtool gcc-c++
+
+ old_p = os.getcwd()
+ os.chdir(self.LIBUV_PATH_SRC)
+ os.system('sh autogen.sh')
+ os.system('./configure --prefix={}'.format(self.PATH_RELEASE))
+ os.system('make')
+ os.system('make install')
+ os.chdir(old_p)
+
+ def _build_mbedtls(self, file_name):
+ if not os.path.exists(self.MBEDTLS_PATH_SRC):
+ # os.system('tar -zxvf "{}/{}" -C "{}"'.format(PATH_DOWNLOAD, file_name, PATH_TMP))
+ os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP))
+
+ cc.n('build mbedtls...')
+ if os.path.exists(os.path.join(self.PATH_RELEASE, 'lib', 'libmbedtls.a')):
+ cc.w('already exists, skip.')
+ return
+
+ # fix the Makefile
+ mkfile = os.path.join(self.MBEDTLS_PATH_SRC, 'Makefile')
+ f = open(mkfile)
+ fl = f.readlines()
+ f.close()
+
+ fixed = False
+ for i in range(len(fl)):
+ x = fl[i].split('=')
+ if x[0] == 'DESTDIR':
+ fl[i] = 'DESTDIR={}\n'.format(self.PATH_RELEASE)
+ fixed = True
+ break
+
+ if not fixed:
+ cc.e('can not fix Makefile of mbedtls.')
+ return
+
+ f = open(mkfile, 'w')
+ f.writelines(fl)
+ f.close()
+
+ # fix config.h
+ mkfile = os.path.join(self.MBEDTLS_PATH_SRC, 'include', 'mbedtls', 'config.h')
+ f = open(mkfile)
+ fl = f.readlines()
+ f.close()
+
+ for i in range(len(fl)):
+ if fl[i].find('#define MBEDTLS_KEY_EXCHANGE_ECDHE_PSK_ENABLED') >= 0:
+ fl[i] = '//#define MBEDTLS_KEY_EXCHANGE_ECDHE_PSK_ENABLED\n'
+ elif fl[i].find('#define MBEDTLS_KEY_EXCHANGE_ECDHE_RSA_ENABLED') >= 0:
+ fl[i] = '//#define MBEDTLS_KEY_EXCHANGE_ECDHE_RSA_ENABLED\n'
+ elif fl[i].find('#define MBEDTLS_KEY_EXCHANGE_ECDHE_ECDSA_ENABLED') >= 0:
+ fl[i] = '//#define MBEDTLS_KEY_EXCHANGE_ECDHE_ECDSA_ENABLED\n'
+ elif fl[i].find('#define MBEDTLS_KEY_EXCHANGE_ECDH_ECDSA_ENABLED') >= 0:
+ fl[i] = '//#define MBEDTLS_KEY_EXCHANGE_ECDH_ECDSA_ENABLED\n'
+ elif fl[i].find('#define MBEDTLS_KEY_EXCHANGE_ECDH_RSA_ENABLED') >= 0:
+ fl[i] = '//#define MBEDTLS_KEY_EXCHANGE_ECDH_RSA_ENABLED\n'
+ elif fl[i].find('#define MBEDTLS_SELF_TEST') >= 0:
+ fl[i] = '//#define MBEDTLS_SELF_TEST\n'
+ elif fl[i].find('#define MBEDTLS_SSL_RENEGOTIATION') >= 0:
+ fl[i] = '//#define MBEDTLS_SSL_RENEGOTIATION\n'
+ elif fl[i].find('#define MBEDTLS_ECDH_C') >= 0:
+ fl[i] = '//#define MBEDTLS_ECDH_C\n'
+ elif fl[i].find('#define MBEDTLS_ECDSA_C') >= 0:
+ fl[i] = '//#define MBEDTLS_ECDSA_C\n'
+ elif fl[i].find('#define MBEDTLS_ECP_C') >= 0:
+ fl[i] = '//#define MBEDTLS_ECP_C\n'
+ elif fl[i].find('#define MBEDTLS_NET_C') >= 0:
+ fl[i] = '//#define MBEDTLS_NET_C\n'
+
+ elif fl[i].find('#define MBEDTLS_RSA_NO_CRT') >= 0:
+ fl[i] = '#define MBEDTLS_RSA_NO_CRT\n'
+ elif fl[i].find('#define MBEDTLS_SSL_PROTO_SSL3') >= 0:
+ fl[i] = '#define MBEDTLS_SSL_PROTO_SSL3\n'
+
+ f = open(mkfile, 'w')
+ f.writelines(fl)
+ f.close()
+
+ # fix source file
+ utils.ensure_file_exists(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'library', 'rsa.c'))
+ utils.copy_file(os.path.join(PATH_EXTERNAL, 'fix-external', 'mbedtls', 'library'), os.path.join(self.MBEDTLS_PATH_SRC, 'library'), 'rsa.c')
+
+ old_p = os.getcwd()
+ os.chdir(self.MBEDTLS_PATH_SRC)
+ os.system('make lib')
+ os.system('make install')
+ os.chdir(old_p)
+
+ def _build_libssh(self, file_name):
+ if not os.path.exists(self.LIBSSH_PATH_SRC):
+ # os.system('tar -zxvf "{}/{}" -C "{}"'.format(PATH_DOWNLOAD, file_name, PATH_TMP))
+ os.system('unzip "{}/{}" -d "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP))
+ os.rename(os.path.join(self.PATH_TMP, 'master'), os.path.join(self.PATH_TMP, 'libssh-master'))
+
+ cc.n('build libssh...')
+ if os.path.exists(os.path.join(self.PATH_RELEASE, 'lib', 'libssh.a')):
+ cc.w('already exists, skip.')
+ return
+
+ build_path = os.path.join(self.LIBSSH_PATH_SRC, 'build')
+ # utils.makedirs(build_path)
+
+ # here is a bug in cmake v2.8.11 (default on ubuntu14), in FindOpenSSL.cmake,
+ # it parse opensslv.h, use regex like this:
+ # REGEX "^#define[\t ]+OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])+.*")
+ # but in openssl-1.0.2h, the version define line is:
+ # # define OPENSSL_VERSION_NUMBER 0x1000208fL
+ # notice there is a space char between # and define, so find openssl always fail.
+
+ # old_p = os.getcwd()
+ # os.chdir(build_path)
+ # cmd = 'cmake' \
+ # ' -DCMAKE_INSTALL_PREFIX={}' \
+ # ' -D_OPENSSL_VERSION={}' \
+ # ' -DOPENSSL_INCLUDE_DIR={}/include' \
+ # ' -DOPENSSL_LIBRARIES={}/lib' \
+ # ' -DCMAKE_BUILD_TYPE=Release' \
+ # ' -DWITH_GSSAPI=OFF' \
+ # ' -DWITH_ZLIB=OFF' \
+ # ' -DWITH_STATIC_LIB=ON' \
+ # ' -DWITH_PCAP=OFF' \
+ # ' -DWITH_EXAMPLES=OFF' \
+ # ' -DWITH_NACL=OFF' \
+ # ' ..'.format(self.PATH_RELEASE, OPENSSL_VER, self.PATH_RELEASE, self.PATH_RELEASE)
+ # cc.n(cmd)
+ # os.system(cmd)
+ # # os.system('make ssh_static ssh_threads_static')
+ # os.system('make ssh_static')
+ # # os.system('make install')
+ # os.chdir(old_p)
+
+ cmake_define = ' -DCMAKE_INSTALL_PREFIX={}' \
+ ' -D_OPENSSL_VERSION={}' \
+ ' -DOPENSSL_INCLUDE_DIR={}/include' \
+ ' -DOPENSSL_LIBRARIES={}/lib' \
+ ' -DWITH_GSSAPI=OFF' \
+ ' -DWITH_ZLIB=OFF' \
+ ' -DWITH_STATIC_LIB=ON' \
+ ' -DWITH_PCAP=OFF' \
+ ' -DWITH_TESTING=OFF' \
+ ' -DWITH_CLIENT_TESTING=OFF' \
+ ' -DWITH_EXAMPLES=OFF' \
+ ' -DWITH_BENCHMARKS=OFF' \
+ ' -DWITH_NACL=OFF' \
+ ' ..'.format(self.PATH_RELEASE, OPENSSL_VER, self.PATH_RELEASE, self.PATH_RELEASE)
+ utils.cmake(build_path, 'Release', False, cmake_define)
+
+ # because make install will fail because we can not disable ssh_shared target,
+ # so we copy necessary files ourselves.
+ utils.ensure_file_exists(os.path.join(self.LIBSSH_PATH_SRC, 'build', 'src', 'libssh.a'))
+ utils.copy_file(os.path.join(self.LIBSSH_PATH_SRC, 'build', 'src'), os.path.join(self.PATH_RELEASE, 'lib'), 'libssh.a')
+ utils.copy_ex(os.path.join(self.LIBSSH_PATH_SRC, 'include'), os.path.join(self.PATH_RELEASE, 'include'), 'libssh')
+
+
+ def _build_sqlite(self, file_name):
+ if not os.path.exists(self.SQLITE_PATH_SRC):
+ os.system('tar -zxvf "{}/{}" -C "{}"'.format(PATH_DOWNLOAD, file_name, self.PATH_TMP))
+
+ cc.n('build sqlite static...')
+ if os.path.exists(os.path.join(self.PATH_RELEASE, 'lib', 'libsqlite3.a')):
+ cc.w('already exists, skip.')
+ return
+
+ old_p = os.getcwd()
+ os.chdir(self.SQLITE_PATH_SRC)
+ os.system('./configure --prefix={}'.format(self.PATH_RELEASE))
+ os.system('make')
+ os.system('make install')
+ os.chdir(old_p)
+
+ def fix_output(self):
+ # remove .so files, otherwise eom_ts will link to .so but not .a in default.
+ rm = ['libsqlite3.la', 'libsqlite3.so.0', 'libuv.la', 'libuv.so.1', 'libsqlite3.so', 'libsqlite3.so.0.8.6', 'libuv.so', 'libuv.so.1.0.0']
+ for i in rm:
+ _path = os.path.join(self.PATH_RELEASE, 'lib', i)
+ if os.path.exists(_path):
+ utils.remove(_path)
+
+
+def gen_builder(dist):
+ if dist == 'windows':
+ builder = BuilderWin()
+ elif dist == 'linux':
+ builder = BuilderLinux()
+ else:
+ raise RuntimeError('unsupported platform.')
+
+ ctx.set_dist(dist)
+ return builder
+
+
+def main():
+ builder = None
+
+ argv = sys.argv[1:]
+
+ for i in range(len(argv)):
+ if 'debug' == argv[i]:
+ ctx.set_target(TARGET_DEBUG)
+ elif 'x86' == argv[i]:
+ ctx.set_bits(BITS_32)
+ elif 'x64' == argv[i]:
+ ctx.set_bits(BITS_64)
+ elif argv[i] in ctx.dist_all:
+ builder = gen_builder(argv[i])
+
+ if builder is None:
+ builder = gen_builder(ctx.host_os)
+
+ builder.build_openssl()
+ builder.build_libuv()
+ builder.build_mbedtls()
+ builder.build_libssh()
+ builder.build_sqlite()
+
+ builder.fix_output()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/build-installer.py b/build/builder/build-installer.py
new file mode 100644
index 0000000..a751f3c
--- /dev/null
+++ b/build/builder/build-installer.py
@@ -0,0 +1,401 @@
+#!/bin/env python3
+# -*- coding: utf-8 -*-
+
+import shutil
+
+from core import colorconsole as cc
+from core import makepyo
+from core import utils
+from core.context import *
+from core.ver import *
+
+ctx = BuildContext()
+
+ROOT_PATH = utils.cfg.ROOT_PATH
+
+
+# COMMON_MODULES = ['paste', 'pyasn1', 'pymemcache', 'pymysql', 'rsa', 'tornado', 'six.py']
+
+
+class BuilderBase:
+ def __init__(self):
+ self.out_dir = ''
+
+ def build_installer(self):
+ pass
+
+ def _build_web_backend(self, base_path, dist, target_path):
+ cc.n('make Teleport Backend package...')
+ src_path = os.path.join(ROOT_PATH, 'web', 'site', 'backend')
+ pkg_path = os.path.join(ROOT_PATH, 'web', 'packages')
+ tmp_path = os.path.join(base_path, '_tmp_backend_')
+ tmp_app_path = os.path.join(tmp_path, 'app')
+
+ if os.path.exists(tmp_path):
+ utils.remove(tmp_path)
+
+ cc.n(' - make pyo and pack to zip...')
+
+ shutil.copytree(os.path.join(src_path, 'app'), tmp_app_path)
+
+ comm_path = os.path.join(pkg_path, 'common')
+ comm_dir = os.listdir(comm_path)
+
+ for d in comm_dir:
+ s = os.path.join(comm_path, d)
+ t = os.path.join(tmp_app_path, d)
+ if os.path.isdir(s):
+ shutil.copytree(s, t)
+ else:
+ shutil.copy(s, t)
+
+ makepyo.make(tmp_app_path)
+ shutil.make_archive(os.path.join(tmp_path, 'app'), 'zip', tmp_app_path)
+ utils.remove(tmp_app_path)
+
+ cc.n(' - copy packages...')
+ pkgs = ['packages-common', 'packages-{}'.format(dist)]
+ for d in pkgs:
+ s = os.path.join(pkg_path, d)
+ t = os.path.join(tmp_path, 'packages', d)
+ if os.path.isdir(s):
+ shutil.copytree(s, t)
+ else:
+ shutil.copy(s, t)
+
+ makepyo.remove_cache(tmp_path)
+
+ cc.n(' - copy static and view...')
+ miscs = ['static', 'view', 'res', 'tools']
+ for d in miscs:
+ s = os.path.join(src_path, d)
+ t = os.path.join(tmp_path, d)
+ if os.path.isdir(s):
+ shutil.copytree(s, t)
+ else:
+ shutil.copy(s, t)
+
+ # self._create_start_file(os.path.join(tmp_path, 'eom_bootstrap.py'), 'ts-backend')
+
+ shutil.copytree(tmp_path, os.path.join(target_path, 'www', 'backend'))
+ utils.remove(tmp_path)
+
+ # def _create_start_file(self, fname, name):
+ # f = open(fname, 'w')
+ # f.write('# -*- coding: utf-8 -*-\n')
+ # f.write('import os\n')
+ # f.write('import sys\n')
+ # f.write('p = os.path.abspath(os.path.dirname(__file__))\n')
+ # f.write('_p = os.path.join(p, "app.zip")\n')
+ # f.write('sys.path.insert(0, _p)\n')
+ # # f.write('_p = os.path.join(p, "{}", "app", "common.zip")\n'.format(name))
+ # # f.write('sys.path.insert(0, _p)\n')
+ # f.write('def main():\n')
+ # f.write(' try:\n')
+ # f.write(' import eom_main\n')
+ # f.write(' return eom_main.main()\n')
+ # f.write(' except:\n')
+ # f.write(' print("can not start {}.")\n'.format(name))
+ # f.write(' raise\n')
+ # f.write('if __name__ == "__main__":\n')
+ # f.write(' sys.exit(main())\n')
+ #
+ # f.close()
+
+ # def _build_web_frontend(self, base_path, dist, target_path):
+ # cc.n('make Teleport Frontend package...')
+ # src_path = os.path.join(ROOT_PATH, 'web', 'site', 'frontend')
+ # pkg_path = os.path.join(ROOT_PATH, 'web', 'packages')
+ # tmp_path = os.path.join(base_path, '_tmp_frontend_')
+ #
+ # if os.path.exists(tmp_path):
+ # utils.remove(tmp_path)
+ #
+ # shutil.copytree(os.path.join(src_path, 'app'), os.path.join(tmp_path, 'app'))
+ #
+ # pkg_common = os.path.join(pkg_path, 'common')
+ # _s_path = os.listdir(pkg_common)
+ # for d in _s_path:
+ # s = os.path.join(pkg_common, d)
+ # t = os.path.join(tmp_path, 'app', d)
+ # if os.path.isdir(s):
+ # shutil.copytree(s, t)
+ # else:
+ # shutil.copy(s, t)
+ #
+ # cc.n(' - copy packages...')
+ # pkgs = ['packages-common', 'packages-{}'.format(dist)]
+ # for d in pkgs:
+ # s = os.path.join(pkg_path, d)
+ # t = os.path.join(tmp_path, 'packages', d)
+ # if os.path.isdir(s):
+ # shutil.copytree(s, t)
+ # else:
+ # shutil.copy(s, t)
+ #
+ # makepyo.remove_cache(tmp_path)
+ #
+ # cc.n(' - copy static and view...')
+ # miscs = ['static', 'view', 'res']
+ # for d in miscs:
+ # s = os.path.join(src_path, d)
+ # t = os.path.join(tmp_path, d)
+ # if os.path.isdir(s):
+ # shutil.copytree(s, t)
+ # else:
+ # shutil.copy(s, t)
+ #
+ # # if not os.path.exists(os.path.join(tmp_path, 'static', 'download')):
+ # # utils.makedirs(os.path.join(tmp_path, 'static', 'download'))
+ # # utils.copy_file(os.path.join(ROOT_PATH, 'dist'), os.path.join(tmp_path, 'static', 'download'), 'teleport-assist-win.zip')
+ #
+ # shutil.copytree(tmp_path, os.path.join(target_path, 'www', 'frontend'))
+ # utils.remove(tmp_path)
+
+
+ def _build_web(self, base_path, dist, target_path):
+ cc.n('make Teleport Web package...')
+ src_path = os.path.join(ROOT_PATH, 'web', 'site', 'teleport')
+ pkg_path = os.path.join(ROOT_PATH, 'web', 'packages')
+ tmp_path = os.path.join(base_path, '_tmp_web_')
+
+ if os.path.exists(tmp_path):
+ utils.remove(tmp_path)
+
+ shutil.copytree(os.path.join(src_path, 'app'), os.path.join(tmp_path, 'app'))
+
+ pkg_common = os.path.join(pkg_path, 'common')
+ _s_path = os.listdir(pkg_common)
+ for d in _s_path:
+ s = os.path.join(pkg_common, d)
+ t = os.path.join(tmp_path, 'app', d)
+ if os.path.isdir(s):
+ shutil.copytree(s, t)
+ else:
+ shutil.copy(s, t)
+
+ cc.n(' - copy packages...')
+ pkgs = ['packages-common', 'packages-{}'.format(dist)]
+ for d in pkgs:
+ s = os.path.join(pkg_path, d)
+ t = os.path.join(tmp_path, 'packages', d)
+ if os.path.isdir(s):
+ shutil.copytree(s, t)
+ else:
+ shutil.copy(s, t)
+
+ makepyo.remove_cache(tmp_path)
+
+ cc.n(' - copy static and view...')
+ miscs = ['static', 'view', 'res', 'tools']
+ for d in miscs:
+ s = os.path.join(src_path, d)
+ t = os.path.join(tmp_path, d)
+ if os.path.isdir(s):
+ shutil.copytree(s, t)
+ else:
+ shutil.copy(s, t)
+
+ # if not os.path.exists(os.path.join(tmp_path, 'static', 'download')):
+ # utils.makedirs(os.path.join(tmp_path, 'static', 'download'))
+ # utils.copy_file(os.path.join(ROOT_PATH, 'dist'), os.path.join(tmp_path, 'static', 'download'), 'teleport-assist-win.zip')
+
+ shutil.copytree(tmp_path, os.path.join(target_path, 'www', 'teleport'))
+ utils.remove(tmp_path)
+
+
+class BuilderWin(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ # now = time.localtime(time.time())
+ # _ver = '1.0.{:2d}.{:d}{:02d}'.format(now.tm_year - 2000, now.tm_mon, now.tm_mday)
+ # self.name = 'teleport-server-windows-{}-{}'.format(ctx.bits_path, _ver)
+ self.name = 'teleport-server-windows-{}-{}'.format(ctx.bits_path, VER_TELEPORT_SERVER)
+
+ self.base_path = os.path.join(ROOT_PATH, 'dist', 'installer', ctx.dist, 'server')
+ self.base_tmp = os.path.join(self.base_path, '_tmp_')
+ self.tmp_path = os.path.join(self.base_tmp, self.name, 'data', 'teleport')
+
+ def build_installer(self):
+ cc.n('make teleport installer package...')
+
+ if os.path.exists(self.base_tmp):
+ utils.remove(self.base_tmp)
+
+ # self._build_web_backend(self.base_path, 'windows', self.tmp_path)
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'web-backend.conf')
+ #
+ # self._build_web_frontend(self.base_path, 'windows', self.tmp_path)
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'web-frontend.conf')
+
+ self._build_web(self.base_path, 'windows', self.tmp_path)
+ utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'web.conf')
+
+ # out_path = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.target_path, ctx.dist_path)
+ # bin_path = os.path.join(self.tmp_path, 'bin')
+ # utils.copy_file(out_path, bin_path, 'eom_ts')
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'etc'), 'eom_ts.ini')
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'etc'), 'ts_ssh_server.key')
+
+ out_path = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.bits_path, ctx.target_path)
+ bin_path = os.path.join(self.tmp_path, 'bin')
+ utils.copy_ex(out_path, bin_path, 'eom_ts.exe')
+ utils.copy_ex(out_path, bin_path, 'pysrt')
+
+ utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'eom_ts.ini')
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'license.key')
+ utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'ts_ssh_server.key')
+ # utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'ssl')
+
+ # utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'data'), os.path.join(self.tmp_path, 'tmp', 'data'), ('ts_db_release.db', 'ts_db.db'))
+ utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'data'), os.path.join(self.tmp_path, 'tmp', 'data'), 'main.sql')
+
+ # utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'data'), os.path.join(self.tmp_path, 'data'), ('ts_db_release.db', 'ts_db.db'))
+
+ # utils.make_zip(os.path.join(self.tmp_path, '..'), os.path.join(self.tmp_path, '..', '..', 'teleport.zip'))
+ # utils.copy_file(os.path.join(self.tmp_path, '..', '..'), os.path.join(self.tmp_path, '..'), 'teleport.zip')
+ # utils.remove(os.path.join(self.tmp_path, '..', '..', 'teleport.zip'))
+ # utils.remove(self.tmp_path)
+
+ # make final installer.
+ cc.n('pack final server installer...')
+ out_file = os.path.join(ROOT_PATH, 'dist', '{}.zip'.format(self.name))
+
+ if os.path.exists(out_file):
+ utils.remove(out_file)
+
+ # # copy installer scripts.
+ # for i in ['daemon', 'install.sh', 'start.sh', 'stop.sh', 'status.sh']:
+ # shutil.copy(os.path.join(self.base_path, 'script', i), os.path.join(self.base_tmp, self.name, i))
+
+ for i in ['install.bat', 'uninst.bat']:
+ shutil.copy(os.path.join(self.base_path, 'script', i), os.path.abspath(os.path.join(self.tmp_path, '..', '..', i)))
+
+ # for i in ['7z.exe']:
+ # shutil.copy(os.path.join(self.base_path, 'script', i), os.path.abspath(os.path.join(self.tmp_path, '..', '..', 'data', i)))
+
+ utils.make_zip(os.path.join(self.base_tmp, self.name, '..'), out_file)
+
+
+class BuilderLinux(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ # now = time.localtime(time.time())
+ # _ver = '1.0.{:2d}.{:d}{:02d}'.format(now.tm_year - 2000, now.tm_mon, now.tm_mday)
+ # self.name = 'teleport-server-linux-{}-{}'.format(ctx.bits_path, _ver)
+ self.name = 'teleport-server-linux-{}-{}'.format(ctx.bits_path, VER_TELEPORT_SERVER)
+
+ self.base_path = os.path.join(ROOT_PATH, 'dist', 'installer', ctx.dist, 'server')
+ self.base_tmp = os.path.join(self.base_path, '_tmp_')
+ self.tmp_path = os.path.join(self.base_tmp, self.name, 'data', 'teleport')
+
+ # self.server_path = os.path.join(ROOT_PATH, 'dist', 'installer', ctx.dist, 'server')
+ # self.script_path = self.tmp_path = os.path.join(self.server_path, 'script')
+ # self.src_path = os.path.join(ROOT_PATH, 'source')
+ # self.out_tmp_path = os.path.join(self.tmp_path, self.name, 'server')
+
+ def build_installer(self):
+ cc.n('make teleport installer package...')
+
+ if os.path.exists(self.base_tmp):
+ utils.remove(self.base_tmp)
+
+ # self._build_web_backend(self.base_path, 'linux', self.tmp_path)
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'web-backend.conf')
+ #
+ # self._build_web_frontend(self.base_path, 'linux', self.tmp_path)
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'web-frontend.conf')
+
+ self._build_web(self.base_path, 'linux', self.tmp_path)
+ utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'web.conf')
+
+ # out_path = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.target_path, ctx.dist_path)
+ # out_path = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.bits_path, 'bin')
+ # bin_path = os.path.join(self.tmp_path, 'bin')
+ # utils.copy_file(out_path, bin_path, 'eom_ts')
+
+ out_path = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.bits_path, 'bin')
+ bin_path = os.path.join(self.tmp_path, 'bin')
+ utils.copy_ex(out_path, bin_path, 'eom_ts')
+
+ # utils.copy_ex(out_path, bin_path, 'pysrt')
+ utils.copy_ex(os.path.join(ROOT_PATH, 'dist', 'pysrt'), bin_path, (ctx.dist_path, 'pysrt'))
+
+ utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'eom_ts.ini')
+ # utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'license.key')
+ utils.copy_file(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'ts_ssh_server.key')
+ utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'etc'), os.path.join(self.tmp_path, 'tmp', 'etc'), 'ssl')
+
+ # utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'data'), os.path.join(self.tmp_path, 'tmp', 'data'), ('ts_db_release.db', 'ts_db.db'))
+ utils.copy_ex(os.path.join(ROOT_PATH, 'share', 'data'), os.path.join(self.tmp_path, 'tmp', 'data'), 'main.sql')
+
+ # utils.make_zip(self.tmp_path, os.path.join(self.tmp_path, '..', 'eom_ts.zip'))
+ utils.make_targz(os.path.join(self.tmp_path, '..'), 'teleport', 'teleport.tar.gz')
+ utils.remove(self.tmp_path)
+
+ # make final installer.
+ cc.n('pack final server installer...')
+ # out_file = os.path.join(ROOT_PATH, 'dist', '{}.zip'.format(self.name))
+ out_file = os.path.join(ROOT_PATH, 'dist', '{}.tar.gz'.format(self.name))
+
+ if os.path.exists(out_file):
+ utils.remove(out_file)
+
+ # # copy installer scripts.
+ for i in ['daemon', 'start.sh', 'stop.sh', 'status.sh']:
+ shutil.copy(os.path.join(self.base_path, 'script', i), os.path.abspath(os.path.join(self.tmp_path, '..', i)))
+ for i in ['install.sh']:
+ shutil.copy(os.path.join(self.base_path, 'script', i), os.path.abspath(os.path.join(self.tmp_path, '..', '..', i)))
+
+ # utils.make_zip(os.path.join(self.base_tmp, self.name), out_file)
+ utils.make_targz(self.base_tmp, self.name, out_file)
+
+
+ # utils.remove(self.base_tmp)
+
+
+def gen_builder(dist):
+ if dist == 'windows':
+ builder = BuilderWin()
+ elif dist == 'linux':
+ builder = BuilderLinux()
+ else:
+ raise RuntimeError('unsupported platform.')
+
+ ctx.set_dist(dist)
+ return builder
+
+
+def main():
+ builder = None
+
+ argv = sys.argv[1:]
+
+ for i in range(len(argv)):
+ if 'debug' == argv[i]:
+ ctx.set_target(TARGET_DEBUG)
+ elif 'x86' == argv[i]:
+ ctx.set_bits(BITS_32)
+ elif 'x64' == argv[i]:
+ ctx.set_bits(BITS_64)
+ elif argv[i] in ctx.dist_all:
+ builder = gen_builder(argv[i])
+
+ if builder is None:
+ builder = gen_builder(ctx.host_os)
+
+ if 'installer' in argv:
+ builder.build_installer()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/build-pysrt.py b/build/builder/build-pysrt.py
new file mode 100644
index 0000000..ef187e7
--- /dev/null
+++ b/build/builder/build-pysrt.py
@@ -0,0 +1,248 @@
+# -*- coding: utf-8 -*-
+
+import shutil
+import struct
+
+from core import colorconsole as cc
+from core import makepyo
+from core import utils
+from core.context import *
+
+ctx = BuildContext()
+
+#PY_VER = platform.python_version_tuple()
+
+ROOT_PATH = utils.cfg.ROOT_PATH
+PY_EXEC = utils.cfg.py_exec
+
+MODULES_WIN = ['_bz2', '_ctypes', '_hashlib', '_lzma', '_overlapped', '_socket', '_sqlite3', '_ssl', 'select', 'sqlite3', 'unicodedata']
+PY_LIB_REMOVE_WIN = ['ctypes/test', 'curses', 'dbm', 'distutils', 'email/test', 'ensurepip', 'idlelib', 'lib2to3',
+ 'lib-dynload', 'pydoc_data', 'site-packages', 'sqlite3/test', 'test', 'tkinter', 'turtledemo',
+ 'unittest', 'venv', 'wsgiref', 'dis.py', 'doctest.py', 'pdb.py', 'py_compile.py', 'pydoc.py',
+ 'this.py', 'wave.py', 'webbrowser.py', 'zipapp.py']
+PY_LIB_REMOVE_LINUX = ['ctypes/test', 'curses', 'config-3.4m-x86_64-linux-gnu', 'dbm', 'distutils', 'ensurepip', 'idlelib', 'lib2to3',
+ 'lib-dynload', 'pydoc_data', 'site-packages', 'sqlite3/test', 'test', 'tkinter', 'turtledemo', 'unittest', 'venv',
+ 'wsgiref', 'dis.py', 'doctest.py', 'pdb.py', 'py_compile.py', 'pydoc_data', 'pydoc.py', 'this.py', 'wave.py',
+ 'webbrowser.py', 'zipapp.py']
+
+
+class PYSBase:
+ def __init__(self):
+ self.base_path = os.path.join(ROOT_PATH, 'out', 'pysrt')#, ctx.dist_path)
+
+ self.py_dll_path = ''
+ self.py_lib_path = ''
+
+ self.modules = list()
+ self.py_lib_remove = list()
+ self.PY_STATIC_PATH = ''
+
+ def build(self):
+ self.py_dll_path = self._locate_dll_path()
+ self.py_lib_path = self._locate_lib_path()
+
+ cc.v('python dll path :', self.py_dll_path)
+ cc.v('python lib path :', self.py_lib_path)
+
+ self._make_base()
+ self._make_python_zip()
+ self._make_py_ver_file()
+
+ def _locate_dev_inc_path(self):
+ return ''
+
+ def _locate_dll_path(self):
+ return ''
+
+ def _locate_lib_path(self):
+ return ''
+
+ def _make_base(self):
+ pass
+
+ def _copy_modules(self):
+ cc.n('copy python extension dll...')
+ mod_path = os.path.join(self.base_path, 'modules')
+ utils.makedirs(mod_path)
+
+ ext = utils.extension_suffixes()
+ cc.v('extension ext:', ext)
+ for m in self.modules:
+ for n in ext:
+ s = os.path.join(self.py_dll_path, m) + n
+ if os.path.exists(s):
+ cc.v('copy %s' % s)
+ cc.v(' -> %s' % os.path.join(mod_path, m) + n)
+ shutil.copy(s, os.path.join(mod_path, m) + n)
+
+ def _make_python_zip(self):
+ cc.n('make python.zip...')
+
+ out_file = os.path.join(self.base_path, 'python.zip')
+ if os.path.exists(out_file):
+ utils.remove(out_file)
+
+ _tmp_ = os.path.join(self.base_path, '_tmp_')
+ if os.path.exists(_tmp_):
+ cc.v('clear up temp folder...')
+ utils.remove(_tmp_)
+
+ cc.v('copying Python `Lib` folder...')
+ shutil.copytree(self.py_lib_path, _tmp_)
+
+ cc.v('remove useless folders and files...')
+ for i in self.py_lib_remove:
+ utils.remove(_tmp_, i)
+
+ cc.v('generate *.pyo...')
+ makepyo.make(_tmp_)
+
+ cc.v('compress into python.zip...')
+ utils.make_zip(_tmp_, out_file)
+ utils.ensure_file_exists(out_file)
+
+ cc.v('remove temp folder...')
+ utils.remove(_tmp_)
+
+ def _make_py_ver_file(self):
+ pass
+
+ def _get_py_dll_name(self):
+ return ''
+
+
+class PYSBaseWin(PYSBase):
+ def __init__(self):
+ super().__init__()
+ self.modules = MODULES_WIN
+ self.py_lib_remove = PY_LIB_REMOVE_WIN
+
+ def _locate_dev_inc_path(self):
+ for p in sys.path:
+ if os.path.exists(os.path.join(p, 'include', 'pyctype.h')):
+ return os.path.join(p, 'include')
+ cc.e('\ncan not locate python development include path in:')
+ for p in sys.path:
+ cc.e(' ', p)
+ raise RuntimeError()
+
+ def _locate_dll_path(self):
+ for p in sys.path:
+ if os.path.exists(os.path.join(p, 'DLLs', '_ctypes.pyd')):
+ return os.path.join(p, 'DLLs')
+ cc.e('\nCan not locate python DLLs path in:')
+ for p in sys.path:
+ cc.e(' ', p)
+ raise RuntimeError()
+
+ def _locate_lib_path(self):
+ for p in sys.path:
+ if os.path.exists(os.path.join(p, 'Lib', 'ctypes', 'wintypes.py')):
+ return os.path.join(p, 'Lib')
+ cc.e('\nCan not locate python lib path in:')
+ for p in sys.path:
+ cc.e(' ', p)
+ raise RuntimeError()
+
+ def _make_base(self):
+ if os.path.exists(self.base_path):
+ cc.v('an exists version found, clean up...', self.base_path)
+ utils.remove(self.base_path)
+
+ cc.v('make pysbase folder...')
+ utils.makedirs(self.base_path)
+
+ cc.v('copy python core dll...')
+ _win_system_path = os.path.join(os.getenv('SystemRoot'), 'system32')
+ if ctx.bits == BITS_32 and ctx.host_os_is_win_x64:
+ _win_system_path = os.path.join(os.getenv('SystemRoot'), 'SysWOW64')
+
+ if not os.path.exists(_win_system_path):
+ raise RuntimeError('can not locate windows system folder at:', _win_system_path)
+
+ pydll = self._get_py_dll_name()
+ shutil.copy(os.path.join(_win_system_path, pydll), os.path.join(self.base_path, pydll))
+
+ if ctx.py_ver == '34':
+ msvcrdll = 'msvcr100.dll'
+ else:
+ raise RuntimeError('unknown msvc runtime for this python version.')
+ shutil.copy(os.path.join(_win_system_path, msvcrdll), os.path.join(self.base_path, msvcrdll))
+
+ super()._copy_modules()
+
+ def _make_py_ver_file(self):
+ # 在python.zip尾部追加一个字符串(补零到64字节),指明python动态库的文件名,这样壳在加载时才知道如何加载python动态库
+ out_file = os.path.join(self.base_path, 'python.ver')
+ _data = struct.pack('=64s', self._get_py_dll_name().encode())
+ f = open(out_file, 'wb')
+ f.write(_data)
+ f.close()
+
+ def _get_py_dll_name(self):
+ #return 'python{}{}.dll'.format(PY_VER[0], PY_VER[1])
+ return 'python{}.dll'.format(utils.cfg.py_ver_str)
+
+
+class PYSBaseLinux(PYSBase):
+ def __init__(self):
+ super().__init__()
+
+ self.PY_STATIC_PATH = os.path.join(os.path.join(ROOT_PATH, 'external', 'linux', 'release'))
+ if not os.path.exists(self.PY_STATIC_PATH):
+ raise RuntimeError('can not locate py-static release folder.')
+
+ self.py_lib_remove = PY_LIB_REMOVE_LINUX
+
+ def _locate_dll_path(self):
+ _path = os.path.join(self.PY_STATIC_PATH, 'lib', 'python3.4', 'lib-dynload')
+ if os.path.exists(_path):
+ return _path
+
+ cc.e('\ncan not locate python DLLs path at [{}]'.format(_path))
+ raise RuntimeError()
+
+ def _locate_lib_path(self):
+ _path = os.path.join(self.PY_STATIC_PATH, 'lib', 'python3.4')
+ if os.path.exists(os.path.join(_path, 'ctypes', 'wintypes.py')):
+ return _path
+
+ cc.e('\ncan not locate python lib path at [{}]'.format(_path))
+ raise RuntimeError()
+
+ def _make_base(self):
+ if os.path.exists(self.base_path):
+ cc.v('an exists version found, clean up...', self.base_path)
+ utils.remove(self.base_path)
+
+ cc.v('make pysrt folder...')
+ utils.makedirs(self.base_path)
+
+ cc.n('copy python extension dll...')
+ utils.copy_ex(self.py_dll_path, os.path.join(self.base_path, 'modules'))
+
+ def _make_py_ver_file(self):
+ # do nothing.
+ pass
+
+
+def main():
+ if ctx.host_os == 'windows':
+ x = PYSBaseWin()
+ elif ctx.host_os == 'linux':
+ x = PYSBaseLinux()
+ else:
+ raise RuntimeError('unsupported platform:', ctx.host_os)
+
+ x.build()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/build-server.py b/build/builder/build-server.py
new file mode 100644
index 0000000..780a05e
--- /dev/null
+++ b/build/builder/build-server.py
@@ -0,0 +1,128 @@
+#!/bin/env python3
+# -*- coding: utf-8 -*-
+
+import codecs
+import shutil
+import time
+from core import colorconsole as cc
+from core import makepyo
+from core import utils
+from core.context import *
+
+ctx = BuildContext()
+
+ROOT_PATH = utils.cfg.ROOT_PATH
+
+
+class BuilderBase:
+ def __init__(self):
+ self.out_dir = ''
+
+ def build_server(self):
+ pass
+
+
+class BuilderWin(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ def build_server(self):
+ cc.n('build eom_ts...')
+ sln_file = os.path.join(ROOT_PATH, 'teleport-server', 'src', 'eom_ts.vs2015.sln')
+ out_file = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.bits_path, ctx.target_path, 'eom_ts.exe')
+ if os.path.exists(out_file):
+ utils.remove(out_file)
+ utils.msvc_build(sln_file, 'eom_ts', ctx.target_path, ctx.bits_path, False)
+ utils.ensure_file_exists(out_file)
+ #
+ # s = os.path.join(ROOT_PATH, 'out', 'console', ctx.bits_path, ctx.target_path, 'console.exe')
+ # t = os.path.join(ROOT_PATH, 'out', 'eom_agent', ctx.target_path, ctx.dist_path, 'eom_agent.com')
+ # shutil.copy(s, t)
+ # utils.ensure_file_exists(t)
+
+
+class BuilderLinux(BuilderBase):
+ def __init__(self):
+ super().__init__()
+
+ def build_server(self):
+ cc.n('build tp_web...')
+
+ ###################
+ # out_path = os.path.join(ROOT_PATH, 'out', 'eom_ts', ctx.target_path, ctx.dist_path)
+ out_path = os.path.join(ROOT_PATH, 'out', 'server', ctx.bits_path, 'bin')
+ out_file = os.path.join(out_path, 'tp_web')
+
+ if os.path.exists(out_file):
+ utils.remove(out_file)
+
+ utils.makedirs(out_path)
+
+ utils.cmake(os.path.join(ROOT_PATH, 'server', 'cmake-build'), ctx.target_path, False)
+ utils.strip(out_file)
+
+
+ # wscript_file = os.path.join(ROOT_PATH, 'wscript')
+ # utils.waf_build(wscript_file, ctx.target_path, False)
+
+ # chk_file = os.path.join(ROOT_PATH, 'waf_build', ctx.target_path, 'eom_ts')
+ # utils.ensure_file_exists(chk_file)
+ # os.chmod(chk_file, 0o777)
+
+ # shutil.copy(chk_file, out_file)
+ utils.ensure_file_exists(out_file)
+
+
+
+def gen_builder(dist):
+ if dist == 'windows':
+ builder = BuilderWin()
+ elif dist == 'linux':
+ builder = BuilderLinux()
+ else:
+ raise RuntimeError('unsupported platform.')
+
+ ctx.set_dist(dist)
+ return builder
+
+
+def main():
+ builder = None
+
+ argv = sys.argv[1:]
+
+ for i in range(len(argv)):
+ if 'debug' == argv[i]:
+ ctx.set_target(TARGET_DEBUG)
+ elif 'x86' == argv[i]:
+ ctx.set_bits(BITS_32)
+ elif 'x64' == argv[i]:
+ ctx.set_bits(BITS_64)
+ elif argv[i] in ctx.dist_all:
+ builder = gen_builder(argv[i])
+
+ if builder is None:
+ builder = gen_builder(ctx.host_os)
+
+ if 'server' in argv:
+ builder.build_server()
+
+ # if 'app' in argv:
+ # builder.build_app()
+
+ # if 'installer' in argv:
+ # builder.build_installer()
+
+ # if 'runtime' in argv:
+ # builder.build_runtime()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/build-version.py b/build/builder/build-version.py
new file mode 100644
index 0000000..fadfac4
--- /dev/null
+++ b/build/builder/build-version.py
@@ -0,0 +1,266 @@
+#!/bin/env python3
+# -*- coding: utf-8 -*-
+
+import codecs
+
+from core import colorconsole as cc
+from core import utils
+from core.context import *
+
+ctx = BuildContext()
+
+ROOT_PATH = utils.cfg.ROOT_PATH
+
+
+class Builder:
+ def __init__(self):
+ self.ver_in = os.path.join(ROOT_PATH, 'version.in')
+
+ self.VER_TELEPORT_SERVER = ''
+ self.VER_TELEPORT_ASSIST = ''
+ self.VER_TELEPORT_ASSIST_REQUIRE = ''
+ self.VER_TELEPORT_MAKECERT = ''
+
+ def build(self):
+ cc.n('update version...')
+
+ if not os.path.exists(self.ver_in):
+ raise RuntimeError('file `version.in` not exists.')
+ with codecs.open(self.ver_in, 'r', 'utf-8') as f:
+ lines = f.readlines()
+ for l in lines:
+ if l.startswith('TELEPORT_SERVER '):
+ x = l.split(' ')
+ self.VER_TELEPORT_SERVER = x[1].strip()
+ elif l.startswith('TELEPORT_ASSIST '):
+ x = l.split(' ')
+ self.VER_TELEPORT_ASSIST = x[1].strip()
+ elif l.startswith('TELEPORT_ASSIST_REQUIRE '):
+ x = l.split(' ')
+ self.VER_TELEPORT_ASSIST_REQUIRE = x[1].strip()
+ elif l.startswith('TELEPORT_MAKECERT '):
+ x = l.split(' ')
+ self.VER_TELEPORT_MAKECERT = x[1].strip()
+
+ #
+ cc.v('new version:')
+ cc.v(' TELEPORT-Server : ', self.VER_TELEPORT_SERVER)
+ cc.v(' TELEPORT-Assist : ', self.VER_TELEPORT_ASSIST)
+ cc.v(' TELEPORT-Assist-require : ', self.VER_TELEPORT_ASSIST_REQUIRE)
+ cc.v(' TELEPORT-MakeCert : ', self.VER_TELEPORT_MAKECERT)
+ cc.v('')
+
+ self.make_build_ver()
+ self.make_assist_ver()
+ self.make_eom_ts_ver()
+ self.make_web_ver()
+
+ def make_build_ver(self):
+ ver_file = os.path.join(ROOT_PATH, 'build', 'builder', 'core', 'ver.py')
+ ver_content = '# -*- coding: utf8 -*-\nVER_TELEPORT_SERVER = "{}"\nVER_TELEPORT_ASSIST = "{}"\nVER_TELEPORT_MAKECERT = "{}"\n'.format(self.VER_TELEPORT_SERVER, self.VER_TELEPORT_ASSIST, self.VER_TELEPORT_MAKECERT)
+
+ rewrite = False
+ if not os.path.exists(ver_file):
+ rewrite = True
+ else:
+ old_content = ''
+ with open(ver_file, 'r') as f:
+ old_content = f.read()
+ if old_content != ver_content:
+ rewrite = True
+
+ if rewrite:
+ cc.v(' update {}...'.format(ver_file))
+ with open(ver_file, 'w') as f:
+ f.write(ver_content)
+
+ def make_web_ver(self):
+ ver_file = os.path.join(ROOT_PATH, 'web', 'site', 'teleport', 'app', 'eom_ver.py')
+ # ver_content = '# -*- coding: utf8 -*-\n\nTS_VER = "{}"\n'.format(self.VER_TELEPORT_SERVER)
+ ver_content = '# -*- coding: utf8 -*-\nTS_VER = "{}"\nTP_ASSIST_LAST_VER = "{}"\nTP_ASSIST_REQUIRE = "{}"\n'.format(self.VER_TELEPORT_SERVER, self.VER_TELEPORT_ASSIST, self.VER_TELEPORT_ASSIST_REQUIRE)
+
+ rewrite = False
+ if not os.path.exists(ver_file):
+ rewrite = True
+ else:
+ old_content = ''
+ with open(ver_file, 'r') as f:
+ old_content = f.read()
+ if old_content != ver_content:
+ rewrite = True
+
+ if rewrite:
+ cc.v(' update {}...'.format(ver_file))
+ with open(ver_file, 'w') as f:
+ f.write(ver_content)
+
+ def make_assist_ver(self):
+ ver_file = os.path.join(ROOT_PATH, 'tp_assist', 'ts_ver.h')
+ ver_content = '#ifndef __TS_ASSIST_VER_H__\n#define __TS_ASSIST_VER_H__\n\n#define TP_ASSIST_VER\tL"{}"\n\n#endif // __TS_ASSIST_VER_H__\n'.format(self.VER_TELEPORT_ASSIST)
+
+ rewrite = False
+ if not os.path.exists(ver_file):
+ rewrite = True
+ else:
+ old_content = ''
+ with open(ver_file, 'r') as f:
+ old_content = f.read()
+ if old_content != ver_content:
+ rewrite = True
+
+ if rewrite:
+ cc.v(' update {}...'.format(ver_file))
+ with open(ver_file, 'w') as f:
+ f.write(ver_content)
+
+ rc_file = os.path.join(ROOT_PATH, 'tp_assist', 'tp_assist.rc')
+ self._update_vs_rc(rc_file, self.VER_TELEPORT_ASSIST)
+
+ def make_eom_ts_ver(self):
+ ver_file = os.path.join(ROOT_PATH, 'teleport-server', 'src', 'ts_ver.h')
+ ver_content = '#ifndef __TS_SERVER_VER_H__\n#define __TS_SERVER_VER_H__\n\n#define TP_SERVER_VER\tL"{}"\n\n#endif // __TS_SERVER_VER_H__\n'.format(self.VER_TELEPORT_SERVER)
+
+ rewrite = False
+ if not os.path.exists(ver_file):
+ rewrite = True
+ else:
+ old_content = ''
+ with open(ver_file, 'r') as f:
+ old_content = f.read()
+ if old_content != ver_content:
+ rewrite = True
+
+ if rewrite:
+ cc.v(' update {}...'.format(ver_file))
+ with open(ver_file, 'w') as f:
+ f.write(ver_content)
+
+ rc_file = os.path.join(ROOT_PATH, 'teleport-server', 'src', 'eom_ts.rc')
+ self._update_vs_rc(rc_file, self.VER_TELEPORT_SERVER)
+
+ def _update_vs_rc(self, rcFilePath, ver):
+ """ update rc file version info """
+
+ t_ver = ver.split('.')
+ if len(t_ver) != 4:
+ raise RuntimeError('Invalid version for assist.')
+
+ bOK = False
+ try:
+ # open rc file
+ rcFile = codecs.open(rcFilePath, 'r', 'utf16')
+ # read out all lines of rc file
+ rcLines = rcFile.readlines()
+ rcFile.close()
+
+ for x in range(len(rcLines)):
+ rcline = rcLines[x]
+
+ if rcline.find(" FILEVERSION") != -1: # find " FILEVERSION"
+ # cc.v('[ver] old ver: %s' % rcLines[x])
+ pos1 = rcline.find(' FILEVERSION ')
+ pos2 = rcline.rfind('\\0"')
+ _ver = rcline[pos1 + 13: pos2].strip()
+
+ rcSplitList = _ver.split(",")
+ if (len(rcSplitList) < 4):
+ rcSplitList = _ver.split(".")
+ if (len(rcSplitList) < 4):
+ raise RuntimeError('Invalid .rc file.')
+ if '.'.join(rcSplitList) == ver:
+ continue
+
+ rcline = '%s%s,%s,%s,%s\n' % (rcline[0:pos1 + 13], t_ver[0], t_ver[1], t_ver[2], t_ver[3])
+
+ rcLines[x] = ""
+ rcLines[x] = rcline
+ # cc.v('[ver] new ver: %s' % rcLines[x])
+ bOK = True
+
+ elif rcline.find("VALUE \"FileVersion\",") != -1: # find "VALUE FileVersion"
+ # cc.v('[ver] old ver: %s' % rcLines[x])
+ pos1 = rcline.find('\"FileVersion\", \"')
+ pos2 = rcline.rfind('\\0"')
+ _ver = rcline[pos1 + 16: pos2].strip()
+
+ rcSplitList = _ver.split(".")
+ if (len(rcSplitList) < 4):
+ rcSplitList = _ver.split(",")
+ if (len(rcSplitList) < 4):
+ raise RuntimeError('Invalid .rc file.')
+ if '.'.join(rcSplitList) == ver:
+ continue
+
+ rcline = '%s%s.%s.%s.%s\\0\"\n' % (rcline[0:pos1 + 16], t_ver[0], t_ver[1], t_ver[2], t_ver[3])
+
+ rcLines[x] = ""
+ rcLines[x] = rcline
+ # cc.v('[ver] new ver: %s' % rcLines[x])
+ bOK = True
+
+ elif rcline.find(" PRODUCTVERSION") != -1:
+ # cc.v('[ver] old ver: %s' % rcLines[x])
+ pos1 = rcline.find(' PRODUCTVERSION ')
+ pos2 = rcline.rfind('\\0"')
+ _ver = rcline[pos1 + 16: pos2].strip()
+
+ rcSplitList = _ver.split(",")
+ if (len(rcSplitList) < 4):
+ rcSplitList = _ver.split(".")
+ if (len(rcSplitList) < 4):
+ raise RuntimeError('Invalid .rc file.')
+ if '.'.join(rcSplitList) == ver:
+ continue
+
+ rcline = '%s%s,%s,%s,%s\n' % (rcline[0:pos1 + 16], t_ver[0], t_ver[1], t_ver[2], t_ver[3])
+
+ rcLines[x] = ""
+ rcLines[x] = rcline
+ # cc.v('[ver] new ver: %s' % rcLines[x])
+ bOK = True
+
+ elif rcline.find("VALUE \"ProductVersion\",") != -1:
+ # cc.v('[ver] old ver: %s' % rcLines[x])
+ pos1 = rcline.find('\"ProductVersion\", \"')
+ pos2 = rcline.rfind('\\0"')
+ _ver = rcline[pos1 + 19: pos2].strip()
+
+ rcSplitList = _ver.split(".")
+ if (len(rcSplitList) < 4):
+ rcSplitList = _ver.split(",")
+ if (len(rcSplitList) < 4):
+ raise RuntimeError('Invalid .rc file.')
+ if '.'.join(rcSplitList) == ver:
+ continue
+
+ rcline = '%s%s.%s.%s.%s\\0\"\n' % (rcline[0:pos1 + 19], t_ver[0], t_ver[1], t_ver[2], t_ver[3])
+
+ rcLines[x] = ""
+ rcLines[x] = rcline
+ # cc.v('[ver] new ver: %s' % rcLines[x])
+ bOK = True
+
+ if bOK:
+ cc.v(' update {}...'.format(rcFilePath))
+ wrcFile = codecs.open(rcFilePath, 'w', 'utf16')
+ wrcFile.writelines(rcLines)
+ wrcFile.close()
+
+ except IOError:
+ raise RuntimeError('can not open rc file.')
+
+
+def main():
+ builder = Builder()
+ builder.build()
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+ except RuntimeError as e:
+ cc.e(e.__str__())
+ except:
+ cc.f('got exception.')
diff --git a/build/builder/core/__init__.py b/build/builder/core/__init__.py
new file mode 100644
index 0000000..b8a2ace
--- /dev/null
+++ b/build/builder/core/__init__.py
@@ -0,0 +1,2 @@
+# -*- coding: utf-8 -*-
+
diff --git a/build/builder/core/colorconsole.py b/build/builder/core/colorconsole.py
new file mode 100644
index 0000000..31e9395
--- /dev/null
+++ b/build/builder/core/colorconsole.py
@@ -0,0 +1,479 @@
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+import platform
+import traceback
+
+__all__ = ['v', 'i', 'w', 'e', 'f']
+
+# ======================================
+# 颜色
+# ======================================
+CR_RESTORE = 0 # 恢复正常 - 浅灰色
+# BOLD = "[1m" # 高亮显示
+# UNDERSCORE = "[4m" # 下划线
+# REVERSE = "[7m" # 反白显示
+CR_BLACK = 1 # 黑色
+CR_LIGHT_GRAY = 2 # 浅灰色 - 普通文字
+CR_GRAY = 3 # 深灰色 - 捕获别的命令的输出
+CR_WHITE = 4 # 白色
+CR_RED = 5 # 红色
+CR_GREEN = 6 # 绿色
+CR_YELLOW = 7 # 黄色 - Windows平台称之为棕色(Brown)
+CR_BLUE = 8 # 蓝色
+CR_MAGENTA = 9 # 紫红
+CR_CYAN = 10 # 青色
+CR_LIGHT_RED = 11 # 亮红色 - 失败
+CR_LIGHT_GREEN = 12 # 亮绿色 - 成功
+CR_LIGHT_YELLOW = 13 # 亮黄色 - 重要
+CR_LIGHT_BLUE = 14 # 亮蓝色 - 其实在黑色背景上还是比较深
+CR_LIGHT_MAGENTA = 15 # 亮紫色 - 警告
+CR_LIGHT_CYAN = 16 # 亮青色
+
+CR_VERBOSE = CR_LIGHT_GRAY
+CR_NORMAL = CR_WHITE
+CR_INFO = CR_GREEN
+CR_WARN = CR_LIGHT_YELLOW
+CR_ERROR = CR_LIGHT_RED
+
+COLORS = {
+ # 常量定义 Linux色彩 WinConsole色彩
+ CR_RESTORE: ('[0m', 7), # 7 = 浅灰色 - 普通文字
+ CR_BLACK: ('[0;30m', 0), # 0 = 黑色
+ CR_RED: ("[0;31m", 4), # 红色
+ CR_GREEN: ("[0;32m", 2), # 绿色
+ CR_YELLOW: ("[0;33m", 6), # 黄色 - Windows平台称之为棕色(Brown)
+ CR_BLUE: ("[0;34m", 1), # 蓝色
+ CR_MAGENTA: ("[0;35m", 5), # 紫红
+ CR_CYAN: ("[0;36m", 3), # 青色
+ CR_LIGHT_GRAY: ('[0;37m', 7), # 浅灰色 - 普通文字
+ CR_GRAY: ("[1;30m", 8), # 深灰色 - 捕获别的命令的输出
+ CR_LIGHT_RED: ("[1;31m", 12), # 亮红色 - 失败
+ CR_LIGHT_GREEN: ("[1;32m", 10), # 亮绿色 - 成功
+ CR_LIGHT_YELLOW: ("[1;33m", 14), # 亮黄色 - 重要
+ CR_LIGHT_BLUE: ("[1;34m", 9), # 亮蓝色 - 其实在黑色背景上还是比较深
+ CR_LIGHT_MAGENTA: ("[1;35m", 13), # 亮紫色 - 警告
+ CR_LIGHT_CYAN: ("[1;36m", 11), # 亮青色
+ CR_WHITE: ("[1;37m", 15) # 白色
+}
+
+
+# env = eomcore.env.get_env()
+
+
+class ColorConsole:
+ """
+ :type _win_color : Win32ColorConsole
+ """
+
+ def __init__(self):
+
+ # self._log_console = self._console_default # 输出到控制台的方式,为None时表示不输出到控制台
+ # self._console_set_color = self._console_set_color_default
+
+ self._sep = ' '
+ self._end = '\n'
+
+ self._win_color = None
+
+ self.o = self._func_output
+ self.v = self._func_verbose
+ self.n = self._func_normal
+ self.i = self._func_info
+ self.w = self._func_warn
+ self.e = self._func_error
+ self.f = self._func_fail
+
+ if sys.stdout is None:
+ self.o = self._func_pass
+ self.v = self._func_pass
+ self.n = self._func_pass
+ self.i = self._func_pass
+ self.w = self._func_pass
+ self.e = self._func_pass
+ self.f = self._func_pass
+ # self._log_console = self._func_pass
+ # self._console_set_color = self._console_set_color_default
+
+ else:
+ # python2.7 on Ubuntu, sys.platform is 'linux2', so we use platform.system() instead.
+
+ _platform = platform.system().lower()
+
+ if _platform == 'linux' or _platform == 'darwin':
+ self._console_set_color = self._console_set_color_linux
+ self._console_restore_color = self._console_restore_color_linux
+ elif _platform == 'windows':
+ if 'TERM' in os.environ and os.environ['TERM'] in ['xterm']:
+ self._console_set_color = self._console_set_color_linux
+ self._console_restore_color = self._console_restore_color_linux
+
+ else:
+ self._win_color = Win32ColorConsole()
+ if self._win_color.available():
+ self._console_set_color = self._console_set_color_win
+ self._console_restore_color = self._console_restore_color_win
+
+ else:
+ self._console_set_color = self._func_pass
+ self._console_restore_color = self._func_pass
+
+ def set_default(self, *args, **kwargs):
+ if 'sep' in kwargs:
+ self._sep = kwargs['sep']
+ if 'end' in kwargs:
+ self._end = kwargs['end']
+
+ def _func_pass(self, *args, **kwargs):
+ # do nothing.
+ pass
+
+ def _func_output(self, *args, **kwargs):
+ sep = kwargs['sep'] if 'sep' in kwargs else self._sep
+ end = kwargs['end'] if 'end' in kwargs else self._end
+
+ first = True
+ for x in args:
+ if not first:
+ sys.stdout.writelines(sep)
+
+ if isinstance(x, tuple):
+ cl = x[0]
+ z = x[1:]
+ self._console_set_color(cl)
+ self._console_output(*z, sep='', end='')
+ sys.stdout.flush()
+
+ elif isinstance(x, str):
+ self._console_output(x, sep='', end='')
+ sys.stdout.flush()
+
+ else:
+ raise RuntimeError('Invalid param.')
+
+ sys.stdout.writelines(end)
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ def _func_verbose(self, *args, **kwargs):
+ self._console_set_color(CR_VERBOSE)
+ self._console_output(*args, **kwargs)
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ # 普通的日志数据
+ def _func_normal(self, *args, **kwargs):
+ self._console_set_color(CR_NORMAL)
+ self._console_output(*args, **kwargs)
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ # 重要信息
+ def _func_info(self, *args, **kwargs):
+ self._console_set_color(CR_INFO)
+ self._console_output(*args, **kwargs)
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ # 警告
+ def _func_warn(self, *args, **kwargs):
+ self._console_set_color(CR_WARN)
+ self._console_output(*args, **kwargs)
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ def _func_error(self, *args, **kwargs):
+ self._console_set_color(CR_ERROR)
+ self._console_output(*args, **kwargs)
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ def _func_fail(self, *args, **kwargs):
+ self._console_set_color(CR_ERROR)
+ self._console_output('[FAIL] ', end='')
+ self._console_output(*args, **kwargs)
+
+ _type, _value, _tb = sys.exc_info()
+ if _type is not None:
+ x = traceback.format_exception_only(_type, _value)
+ self._console_output('[EXCEPTION] ', end='')
+ self._console_output(x[0], end='')
+
+ x = traceback.extract_tb(_tb)
+ c = len(x)
+ self._console_set_color(CR_RED)
+ for i in range(0, c):
+ self._console_output(os.path.abspath(x[i][0]), '(', x[i][1], '): ', x[i][3], sep='')
+ else:
+ s = traceback.extract_stack()
+ c = len(s)
+ self._console_set_color(CR_RED)
+ for i in range(2, c):
+ self._console_output(' ', os.path.abspath(s[c - i - 1][0]), '(', s[c - i - 1][1], '): ', s[c - i - 1][3], sep='')
+
+ self._console_restore_color()
+ sys.stdout.flush()
+
+ # def _do_log(self, msg, color=None):
+ # self._log_console(msg, color)
+ #
+ # def _console_default(self, msg, color=None):
+ # if msg is None:
+ # return
+ #
+ # sys.stdout.writelines(msg)
+ # sys.stdout.flush()
+ #
+ # def _console_win(self, msg, color=None):
+ # if msg is None:
+ # return
+ #
+ # # 这里的问题很复杂,日常使用没有问题,但是当在工作机上使用时,部分内容是捕获另一个脚本执行的结果再输出
+ # # 如果结果中有中文,这里就会显示乱码。如果尝试编码转换,会抛出异常。目前暂时采用显示乱码的方式了。
+ #
+ # # if CONSOLE_WIN_CMD == self.console_type:
+ # # try:
+ # # _msg = unicode(msg, 'utf-8')
+ # # except:
+ # # _msg = msg
+ # # else:
+ # # _msg = msg
+ # # _msg = None
+ # # if isinstance(msg, unicode):
+ # # _msg = msg
+ # # else:
+ # # # _msg = unicode(msg, 'utf-8')
+ # # try:
+ # # _msg = unicode(msg, 'utf-8')
+ # # except:
+ # # _msg = unicode(msg, 'gb2312')
+ # # # _msg = msg
+ # #
+ # # # if CONSOLE_WIN_CMD == self.console_type:
+ # # # sys.stdout.writelines(msg.encode('gb2312'))
+ # # # else:
+ # # # sys.stdout.writelines(msg.encode('utf-8'))
+ # #
+ # #
+ # # # try:
+ # # # _msg = unicode(msg, 'utf-8')
+ # # # except:
+ # # _msg = msg
+ #
+ # if color is None:
+ # sys.stdout.writelines(msg)
+ # else:
+ # self._win_color.set_color(COLORS[color][1])
+ # sys.stdout.writelines(msg)
+ # sys.stdout.flush()
+ # self._win_color.set_color(COLORS[CR_RESTORE][1])
+ #
+ # sys.stdout.flush()
+ #
+ # def _console_linux(self, msg, cr=None):
+ # if msg is None:
+ # return
+ #
+ # if cr is None:
+ # sys.stdout.writelines(msg)
+ # else:
+ # sys.stdout.writelines('\x1B')
+ # sys.stdout.writelines(COLORS[cr][0])
+ # sys.stdout.writelines(msg)
+ # sys.stdout.writelines('\x1B[0m')
+ #
+ # sys.stdout.flush()
+
+ # def _console_set_color_default(self, cr=None):
+ # pass
+
+ def _console_set_color_win(self, cr=None):
+ if cr is None:
+ return
+ self._win_color.set_color(COLORS[cr][1])
+ sys.stdout.flush()
+
+ def _console_set_color_linux(self, cr=None):
+ if cr is None:
+ return
+ sys.stdout.writelines('\x1B')
+ sys.stdout.writelines(COLORS[cr][0])
+ sys.stdout.flush()
+
+ def _console_restore_color_win(self):
+ self._win_color.set_color(COLORS[CR_RESTORE][1])
+ sys.stdout.flush()
+
+ def _console_restore_color_linux(self):
+ sys.stdout.writelines('\x1B[0m')
+ sys.stdout.flush()
+
+ def _console_output(self, *args, **kwargs):
+ sep = kwargs['sep'] if 'sep' in kwargs else self._sep
+ end = kwargs['end'] if 'end' in kwargs else self._end
+ first = True
+ for x in args:
+ if not first:
+ sys.stdout.writelines(sep)
+
+ first = False
+ if isinstance(x, str):
+ sys.stdout.writelines(x)
+ continue
+
+ else:
+ sys.stdout.writelines(x.__str__())
+
+ sys.stdout.writelines(end)
+ sys.stdout.flush()
+
+ # def _test(self):
+ # self.log('###################', CR_RESTORE)
+ # self.log(' CR_RESTORE\n')
+ # self.log('###################', CR_BLACK)
+ # self.log(' CR_BLACK\n')
+ # self.log('###################', CR_LIGHT_GRAY)
+ # self.log(' CR_LIGHT_GRAY\n')
+ # self.log('###################', CR_GRAY)
+ # self.log(' CR_GRAY\n')
+ # self.log('###################', CR_WHITE)
+ # self.log(' CR_WHITE\n')
+ # self.log('###################', CR_RED)
+ # self.log(' CR_RED\n')
+ # self.log('###################', CR_GREEN)
+ # self.log(' CR_GREEN\n')
+ # self.log('###################', CR_YELLOW)
+ # self.log(' CR_YELLOW\n')
+ # self.log('###################', CR_BLUE)
+ # self.log(' CR_BLUE\n')
+ # self.log('###################', CR_MAGENTA)
+ # self.log(' CR_MAGENTA\n')
+ # self.log('###################', CR_CYAN)
+ # self.log(' CR_CYAN\n')
+ # self.log('###################', CR_LIGHT_RED)
+ # self.log(' CR_LIGHT_RED\n')
+ # self.log('###################', CR_LIGHT_GREEN)
+ # self.log(' CR_LIGHT_GREEN\n')
+ # self.log('###################', CR_LIGHT_YELLOW)
+ # self.log(' CR_LIGHT_YELLOW\n')
+ # self.log('###################', CR_LIGHT_BLUE)
+ # self.log(' CR_LIGHT_BLUE\n')
+ # self.log('###################', CR_LIGHT_MAGENTA)
+ # self.log(' CR_LIGHT_MAGENTA\n')
+ # self.log('###################', CR_LIGHT_CYAN)
+ # self.log(' CR_LIGHT_CYAN\n')
+ # # data = b'This is a test string and you can see binary format data here.'
+ # # self.bin('Binary Data:\n', data)
+ # # data = b''
+ # # self.bin('Empty binary\n', data)
+ # # self.bin('This is string\n\n', 'data')
+ #
+ # # self.d('This is DEBUG message.\n')
+ # self.v('This is VERBOSE message.\n')
+ # self.i('This is INFORMATION message.\n')
+ # self.w('This is WARNING message.\n')
+ # self.e('This is ERROR message.\n')
+ # self.f('This is FAIL message.\n')
+ #
+ # self.v('test auto\nsplit lines.\nYou should see\nmulti-lines.\n')
+
+
+class Win32ColorConsole:
+ def __init__(self):
+ from ctypes import WINFUNCTYPE, windll
+ from ctypes.wintypes import BOOL, HANDLE, DWORD, WORD
+
+ self.__original_stderr = sys.stderr
+ self.__stdout = None
+ self.__SetConsoleTextAttribute = None
+
+ # Work around .
+ # codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)
+
+ # Make Unicode console output work independently of the current code page.
+ # This also fixes .
+ # Credit to Michael Kaplan
+ # and TZOmegaTZIOY
+ # .
+ try:
+ #
+ # HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
+ # returns INVALID_HANDLE_VALUE, NULL, or a valid handle
+ #
+ #
+ # DWORD WINAPI GetFileType(DWORD hFile);
+ #
+ #
+ # BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
+
+ STD_OUTPUT_HANDLE = DWORD(-11)
+ INVALID_HANDLE_VALUE = DWORD(-1).value
+
+ GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(("GetStdHandle", windll.kernel32))
+
+ self.__SetConsoleTextAttribute = WINFUNCTYPE(BOOL, HANDLE, WORD)(("SetConsoleTextAttribute", windll.kernel32))
+
+ self.__stdout = GetStdHandle(STD_OUTPUT_HANDLE)
+ if self.__stdout == INVALID_HANDLE_VALUE:
+ self.__stdout = None
+
+ except Exception as e:
+ self.__stdout = None
+ self._complain("exception %r while fixing up sys.stdout and sys.stderr\n" % (str(e),))
+
+ # If any exception occurs in this code, we'll probably try to print it on stderr,
+ # which makes for frustrating debugging if stderr is directed to our wrapper.
+ # So be paranoid about catching errors and reporting them to original_stderr,
+ # so that we can at least see them.
+ @staticmethod
+ def _complain(message):
+ # print >> self.__original_stderr, message if isinstance(message, str) else repr(message)
+ sys.stderr.writelines(message)
+
+ def available(self):
+ if self.__stdout is None or self.__SetConsoleTextAttribute is None:
+ return False
+ else:
+ return True
+
+ def set_color(self, color):
+ # if not self.available():
+ # return
+ self.__SetConsoleTextAttribute(self.__stdout, color)
+
+
+_cc = ColorConsole()
+del ColorConsole
+
+
+def set_default(*args, **kwargs):
+ _cc.set_default(*args, **kwargs)
+
+
+def o(*args, **kwargs):
+ _cc.o(*args, **kwargs)
+
+
+def v(*args, **kwargs):
+ _cc.v(*args, **kwargs)
+
+
+def n(*args, **kwargs):
+ _cc.n(*args, **kwargs)
+
+
+def i(*args, **kwargs):
+ _cc.i(*args, **kwargs)
+
+
+def w(*args, **kwargs):
+ _cc.w(*args, **kwargs)
+
+
+def e(*args, **kwargs):
+ _cc.e(*args, **kwargs)
+
+
+def f(*args, **kwargs):
+ _cc.f(*args, **kwargs)
diff --git a/build/builder/core/configs.py b/build/builder/core/configs.py
new file mode 100644
index 0000000..20e6307
--- /dev/null
+++ b/build/builder/core/configs.py
@@ -0,0 +1,124 @@
+# -*- coding: utf8 -*-
+
+import os
+import sys
+import platform
+from . import colorconsole as cc
+
+__all__ = ['cfg']
+
+
+class TpDict(dict):
+ """
+ 可以像属性一样访问字典的 Key,var.key 等同于 var['key']
+ """
+
+ def __getattr__(self, name):
+ try:
+ return self[name]
+ except KeyError:
+ # print(self.__class__.__name__)
+ raise
+
+ def __setattr__(self, name, val):
+ self[name] = val
+
+
+class ConfigFile(TpDict):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ # self.__file_name = None
+ # self.__save_indent = 0
+ # self.__loaded = False
+
+ def init(self, cfg_file):
+ if not self.load(cfg_file, True):
+ return False
+
+ self['ROOT_PATH'] = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+
+ self['py_exec'] = sys.executable
+
+ _py_ver = platform.python_version_tuple()
+ self['py_ver'] = _py_ver
+ self['py_ver_str'] = '%s%s' % (_py_ver[0], _py_ver[1])
+ self['is_py2'] = sys.version_info[0] == 2
+ self['is_py3'] = sys.version_info[0] == 3
+
+ _bits = platform.architecture()[0]
+ if _bits == '64bit':
+ self['is_x64'] = True
+ self['is_x86'] = False
+ else:
+ self['is_x64'] = False
+ self['is_x86'] = True
+
+ _os = platform.system().lower()
+
+ self['dist'] = ''
+ if _os == 'windows':
+ self['dist'] = 'windows'
+ elif _os == 'linux':
+ self['dist'] = 'linux'
+ elif _os == 'darwin':
+ self['dist'] = 'macos'
+ else:
+ cc.e('not support this OS: {}'.format(platform.system()))
+ return False
+
+ return True
+
+ def load_str(self, module, code):
+ m = type(sys)(module)
+ m.__module_class__ = type(sys)
+ m.__file__ = module
+
+ try:
+ exec(compile(code, module, 'exec'), m.__dict__)
+ except Exception as e:
+ cc.e('%s\n' % str(e))
+ # print(str(e))
+ # if eom_dev_conf.debug:
+ # raise
+ return False
+
+ for y in m.__dict__:
+ if '__' == y[:2]:
+ continue
+ if isinstance(m.__dict__[y], dict):
+ self[y] = TpDict()
+ self._assign_dict(m.__dict__[y], self[y])
+ else:
+ self[y] = m.__dict__[y]
+
+ return True
+
+ def load(self, full_path, must_exists=True):
+ try:
+ f = open(full_path, encoding='utf8')
+ code = f.read()
+ f.close()
+ self.__loaded = True
+ except IOError:
+ if must_exists:
+ cc.e('Can not load config file: %s\n' % full_path)
+ return False
+
+ module = os.path.basename(full_path)
+ if not self.load_str(module, code):
+ return False
+
+ self.__file_name = full_path
+ return True
+
+ def _assign_dict(self, _from, _to):
+ for y in _from:
+ if isinstance(_from[y], dict):
+ _to[y] = TpDict()
+ self._assign_dict(_from[y], _to[y])
+ else:
+ _to[y] = _from[y]
+
+
+cfg = ConfigFile()
+del ConfigFile
diff --git a/build/builder/core/context.py b/build/builder/core/context.py
new file mode 100644
index 0000000..d313735
--- /dev/null
+++ b/build/builder/core/context.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+
+import os
+import platform
+import sys
+
+# __all__ = ['BuildContext', 'BITS_32', 'BITS_64', 'TARGET_DEBUG', 'TARGET_RELEASE']
+
+BITS_UNKNOWN = 0
+BITS_32 = 32
+BITS_64 = 64
+
+TARGET_UNKNOWN = 0
+TARGET_DEBUG = 1
+TARGET_RELEASE = 2
+
+
+class BuildContext(object):
+ def __init__(self):
+ # self.dist_linux = ['ubuntu', 'centos', 'redhat']
+ # self.dist_all = self.dist_linux + ['windows', 'macos']
+ self.dist_all = ['windows', 'linux', 'macos']
+
+ self.is_py2 = sys.version_info[0] == 2
+ self.is_py3 = sys.version_info[0] == 3
+
+ self.target = TARGET_RELEASE
+ self.target_path = 'release'
+
+ _py_ver = platform.python_version_tuple()
+ self.py_ver = '%s%s' % (_py_ver[0], _py_ver[1])
+
+ self.bits = BITS_32
+ self.bits_path = 'x86'
+
+ _bits = platform.architecture()[0]
+ if _bits == '64bit':
+ self.bits = BITS_64
+ self.bits_path = 'x64'
+
+ _os = platform.system().lower()
+
+ self.dist = ''
+ if _os == 'windows':
+ self.dist = 'windows'
+ elif _os == 'linux':
+ self.dist = 'linux'
+ # (dist, ver, sys_id) = platform.dist()
+ # dist = dist.lower()
+ # if dist in self.dist_linux:
+ # self.dist = dist
+ # else:
+ # raise RuntimeError('unsupported linux dist: %s' % dist)
+ elif _os == 'darwin':
+ self.dist = 'macos'
+
+ self.host_os = self.dist
+ if self.host_os == 'windows':
+ self.host_os_is_win_x64 = 'PROGRAMFILES(X86)' in os.environ
+
+
+ self.make_dist_path()
+
+ def make_dist_path(self):
+ self.dist_path = '%s-py%s-%s' % (self.dist, self.py_ver, self.bits_path)
+
+ def set_target(self, target):
+ self.target = target
+ if target == TARGET_DEBUG or target == 'debug':
+ self.target_path = 'debug'
+ elif target == TARGET_RELEASE or target == 'release':
+ self.target_path = 'release'
+ else:
+ raise RuntimeError('unknown target mode.')
+
+ def set_bits(self, bits):
+ if bits == BITS_32 or bits == 'x86':
+ self.bits = BITS_32
+ self.bits_path = 'x86'
+ elif bits == BITS_64 or bits == 'x64':
+ self.bits = BITS_64
+ self.bits_path = 'x64'
+ else:
+ raise RuntimeError('unknown bits.')
+
+ self.make_dist_path()
+
+ def set_dist(self, dist):
+ if dist in self.dist_all:
+ self.dist = dist
+ else:
+ raise RuntimeError('unsupported OS: %s' % dist)
+
+ self.make_dist_path()
+
+
+if __name__ == '__main__':
+ pass
diff --git a/build/builder/core/makepyo.py b/build/builder/core/makepyo.py
new file mode 100644
index 0000000..30900ca
--- /dev/null
+++ b/build/builder/core/makepyo.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+
+import os
+import py_compile
+import shutil
+import sys
+import time
+import platform
+
+from . import colorconsole as cc
+
+rm_file_every_level = ['.pyc', '.pyo']
+
+PY_VER = platform.python_version_tuple()
+cpython_mid_name = 'cpython-{}{}'.format(PY_VER[0], PY_VER[1])
+
+def make(tmp_path):
+ cc.v('Remove all old .pyc/.pyo files...')
+ clean_folder(tmp_path)
+ time.sleep(0.5)
+ cc.v('Compile all .py into .pyo...')
+ compile_files(tmp_path)
+ time.sleep(0.5)
+ cc.v('Remove all .py files...')
+ fix_pyo(tmp_path)
+ time.sleep(0.5)
+ cc.v('Remove all `__pycache__` folders...')
+ remove_cache(tmp_path)
+
+
+def clean_folder(path):
+ for parent, dir_list, file_list in os.walk(path):
+ for d in dir_list:
+ clean_folder(os.path.join(parent, d))
+
+ for filename in file_list:
+ _, ext = os.path.splitext(filename)
+ # fileNameSplitList = filename.split(".")
+ # ext = fileNameSplitList[len(fileNameSplitList) - 1].lower()
+ if ext in rm_file_every_level:
+ os.remove(os.path.join(parent, filename))
+
+
+def remove_cache(path):
+ for parent, dir_list, file_list in os.walk(path):
+ for d in dir_list:
+ d = d.lower()
+ if d == '__pycache__':
+ shutil.rmtree(os.path.join(parent, d))
+ continue
+ remove_cache(os.path.join(parent, d))
+
+
+def compile_files(path):
+ for parent, dir_list, file_list in os.walk(path):
+ for d in dir_list:
+ compile_files(os.path.join(parent, d))
+
+ for filename in file_list:
+ _, ext = os.path.splitext(filename)
+ # fileNameSplitList = filename.split(".")
+ # ext = fileNameSplitList[len(fileNameSplitList) - 1].lower()
+ if ext == '.py':
+ compile_py(os.path.join(parent, filename))
+
+
+def compile_py(filename):
+ py_compile.compile(filename, optimize=2)
+
+
+def fix_pyo(path):
+ for parent, dir_list, file_list in os.walk(path):
+ for d in dir_list:
+ fix_pyo(os.path.join(parent, d))
+
+ for filename in file_list:
+ fileNameSplitList = filename.split(".")
+ ext = fileNameSplitList[len(fileNameSplitList) - 1].lower()
+ if ext == 'py':
+ os.remove(os.path.join(parent, filename))
+ elif ext == 'pyo':
+ cpython = fileNameSplitList[len(fileNameSplitList) - 2].lower()
+ if cpython == cpython_mid_name:
+ del fileNameSplitList[len(fileNameSplitList) - 2]
+ else:
+ continue
+ t_name = os.path.abspath(os.path.join(parent, '..', '.'.join(fileNameSplitList)))
+ f_name = os.path.join(parent, filename)
+ shutil.copy(f_name, t_name)
+
+
+if __name__ == '__main__':
+ make(sys.argv[1])
diff --git a/build/builder/core/utils.py b/build/builder/core/utils.py
new file mode 100644
index 0000000..b7c9a1b
--- /dev/null
+++ b/build/builder/core/utils.py
@@ -0,0 +1,453 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import time
+
+from . import colorconsole as cc
+
+from .configs import cfg
+try:
+ CONFIG_FILE = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')), 'config.py')
+ if not cfg.init(CONFIG_FILE):
+ sys.exit(1)
+except:
+ cc.e('can not load configuration.\n\nplease copy `config.py.in` into `config.py` and modify it to fit your condition and try again.')
+ sys.exit(1)
+
+# PY_VER = platform.python_version_tuple()
+#IS_PY2 = sys.version_info[0] == 2
+#IS_PY3 = sys.version_info[0] == 3
+
+if cfg.is_py2:
+ import imp
+elif cfg.is_py3:
+ import importlib
+ import importlib.machinery
+
+ if sys.platform == 'win32':
+ import winreg
+
+
+def extension_suffixes():
+ # imp.get_suffixes()
+ # 返回3元组列表(suffix, mode, type), 获得特殊模块的描述
+ # .suffix为文件后缀名;
+ # mode为打开文件模式;
+ # type为文件类型, 1代表PY_SOURCE, 2代表PY_COMPILED, 3代表C_EXTENSION
+
+ EXTENSION_SUFFIXES = list()
+ if cfg.is_py2:
+ suf = imp.get_suffixes()
+ for s in suf:
+ if s[2] == 3:
+ EXTENSION_SUFFIXES.append(s[0])
+ else:
+ EXTENSION_SUFFIXES = importlib.machinery.EXTENSION_SUFFIXES
+
+ if cfg.dist == 'windows':
+ if '.dll' not in EXTENSION_SUFFIXES:
+ EXTENSION_SUFFIXES.append('.dll')
+
+ elif cfg.dist == 'linux':
+ if '.so' not in EXTENSION_SUFFIXES:
+ EXTENSION_SUFFIXES.append('.so')
+
+ elif cfg.dist == 'macos':
+ raise RuntimeError('not support MacOS now.')
+
+ # cc.v(EXTENSION_SUFFIXES)
+ return EXTENSION_SUFFIXES
+
+
+def remove(*args):
+ path = os.path.join(*args)
+
+ cc.v('remove [%s] ...' % path, end='')
+ if not os.path.exists(path):
+ cc.v('not exists, skip.')
+ return
+
+ for i in range(5):
+ cc.v('.', end='')
+ try:
+ if os.path.isdir(path):
+ shutil.rmtree(path, ignore_errors=True)
+ time.sleep(0.5)
+ else:
+ os.unlink(path)
+ except:
+ pass
+
+ if os.path.exists(path):
+ time.sleep(1)
+ else:
+ break
+
+ if os.path.exists(path):
+ cc.e('failed')
+ raise RuntimeError('')
+ else:
+ cc.i('done')
+
+
+def makedirs(path, exist_ok=True):
+ if os.path.exists(path):
+ if not exist_ok:
+ raise RuntimeError('path already exists: %s' % path)
+ else:
+ return
+
+ for i in range(5):
+ try:
+ os.makedirs(path)
+ except:
+ time.sleep(1)
+ pass
+
+ if not os.path.exists(path):
+ time.sleep(1)
+ else:
+ break
+
+ if not os.path.exists(path):
+ raise RuntimeError('can not create: %s' % path)
+
+
+def copy_file(s_path, t_path, f_name, force=True):
+ if isinstance(f_name, str):
+ f_from = f_name
+ f_to = f_name
+ elif isinstance(f_name, tuple):
+ f_from = f_name[0]
+ f_to = f_name[1]
+ else:
+ raise RuntimeError('utils.copy_file() got invalid param.')
+
+ s = os.path.join(s_path, f_from)
+ t = os.path.join(t_path, f_to)
+ if os.path.exists(t):
+ if force:
+ cc.v(' an exists version found, clean up...')
+ remove(t)
+ else:
+ cc.w(' an exists version found, skip copy.')
+ return
+
+ if not os.path.exists(t_path):
+ makedirs(t_path)
+ cc.v('copy [%s]\n -> [%s]' % (s, t))
+ shutil.copy(s, t)
+
+
+def copy_ex(s_path, t_path, item_name=None, force=True):
+ if item_name is None:
+ s = s_path
+ t = t_path
+ else:
+ if isinstance(item_name, str):
+ f_from = item_name
+ f_to = item_name
+ elif isinstance(item_name, tuple):
+ f_from = item_name[0]
+ f_to = item_name[1]
+ else:
+ raise RuntimeError('utils.copy_ex() got invalid param.')
+
+ s = os.path.join(s_path, f_from)
+ t = os.path.join(t_path, f_to)
+
+ if os.path.exists(t):
+ if force:
+ remove(t)
+ else:
+ cc.w(t, 'already exists, skip copy.')
+ return
+
+ if os.path.isdir(s):
+ cc.v('copy [%s]\n -> [%s]' % (s, t))
+ shutil.copytree(s, t)
+ else:
+ if not os.path.exists(t_path):
+ os.makedirs(t_path)
+ cc.v('copy [%s]\n -> [%s]' % (s, t))
+ shutil.copy(s, t)
+
+
+def update_file(s_path, t_path, f_name):
+ if isinstance(f_name, str):
+ f_from = f_name
+ f_to = f_name
+ elif isinstance(f_name, tuple):
+ f_from = f_name[0]
+ f_to = f_name[1]
+ else:
+ raise RuntimeError('utils.update_file() got invalid param.')
+
+ s = os.path.join(s_path, f_from)
+ t = os.path.join(t_path, f_to)
+ if not os.path.exists(s):
+ cc.w('try update file `%s` but not exists, skip.' % f_from)
+ return
+
+ # TODO: check file MD5 and update time.
+
+ if os.path.exists(t):
+ remove(t)
+
+ if not os.path.exists(t_path):
+ makedirs(t_path)
+ cc.v('update [%s]\n -> [%s]' % (s, t))
+ shutil.copy(os.path.join(s_path, f_from), t)
+
+
+def ensure_file_exists(filename):
+ if not os.path.exists(filename):
+ raise RuntimeError('file not exists: {}'.format(filename))
+ if not os.path.isfile(filename):
+ raise RuntimeError('path exists but not a file: {}'.format(filename))
+
+
+# def root_path():
+# return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+
+
+def python_exec():
+ if not os.path.exists(sys.executable):
+ raise RuntimeError('Can not locate Python execute file.')
+ return sys.executable
+
+
+g_msbuild_path = None
+
+
+def msbuild_path():
+ global g_msbuild_path
+ if g_msbuild_path is not None:
+ return g_msbuild_path
+
+ # 14.0 = VS2015
+ # 12.0 = VS2012
+ # 4.0 = VS2008
+ chk = ['14.0', '4.0', '12.0']
+
+ msp = None
+ for c in chk:
+ msp = winreg_read("SOFTWARE\\Microsoft\\MSBuild\\ToolsVersions\\{}".format(c), 'MSBuildToolsPath')
+ if msp is not None:
+ break
+
+ if msp is None:
+ raise RuntimeError('Can not locate MSBuild.')
+
+ msb = os.path.join(msp[0], 'MSBuild.exe')
+ if not os.path.exists(msb):
+ raise RuntimeError('Can not locate MSBuild at {}'.format(msp))
+
+ g_msbuild_path = msb
+ return msb
+
+
+g_nsis_path = None
+
+
+def nsis_path():
+ global g_nsis_path
+ if g_nsis_path is not None:
+ return g_nsis_path
+
+ if 'nsis' in cfg:
+ g_nsis_path = cfg['nsis']
+ return g_nsis_path
+
+ p = winreg_read_wow64_32(r'SOFTWARE\NSIS\Unicode', '')
+ if p is None:
+ raise RuntimeError('Can not locate unicode version of NSIS.')
+
+ p = os.path.join(p[0], 'makensis.exe')
+ if not os.path.exists(p):
+ raise RuntimeError('Can not locate NSIS at {}'.format(p))
+
+ g_nsis_path = p
+ return p
+
+
+def winreg_read(path, key):
+ try:
+ hkey = winreg.CreateKeyEx(winreg.HKEY_LOCAL_MACHINE, path, 0, winreg.KEY_READ)
+ value = winreg.QueryValueEx(hkey, key)
+ except OSError:
+ return None
+
+ return value
+
+
+def winreg_read_wow64_32(path, key):
+ try:
+ hkey = winreg.CreateKeyEx(winreg.HKEY_LOCAL_MACHINE, path, 0, winreg.KEY_READ | winreg.KEY_WOW64_32KEY)
+ value = winreg.QueryValueEx(hkey, key)
+ except OSError:
+ return None
+
+ return value
+
+
+def sys_exec(cmd, direct_output=False, output_codec=None):
+ # 注意:output_codec在windows默认为gb2312,其他平台默认utf8
+ _os = platform.system().lower()
+ if output_codec is None:
+ if _os == 'windows':
+ output_codec = 'gb2312'
+ else:
+ output_codec = 'utf8'
+
+ p = None
+ if _os == 'windows':
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, shell=True)
+
+ else:
+ p = subprocess.Popen(cmd, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, shell=True)
+
+ output = ''
+ f = p.stdout
+ while True:
+ line = f.readline()
+ if 0 == len(line):
+ break
+
+ if direct_output:
+ # cc.v(line.decode(output_codec))
+ cc.v(line, end='')
+
+ output += line
+
+ ret = p.wait()
+
+ return (ret, output)
+
+
+def msvc_build(sln_file, proj_name, target, platform, force_rebuild):
+ msbuild = msbuild_path()
+
+ if force_rebuild:
+ cmd = '"{}" "{}" "/target:clean" "/property:Configuration={};Platform={}"'.format(msbuild, sln_file, target, platform)
+ ret, _ = sys_exec(cmd, direct_output=True)
+ cc.v('ret:', ret)
+
+ cmd = '"{}" "{}" "/target:{}" "/property:Configuration={};Platform={}"'.format(msbuild, sln_file, proj_name, target, platform)
+ ret, _ = sys_exec(cmd, direct_output=True)
+ if ret != 0:
+ raise RuntimeError('build MSVC project `{}` failed.'.format(proj_name))
+
+
+def nsis_build(nsi_file, _define=''):
+ nsis = nsis_path()
+ cmd = '"{}" /V2 {} /X"SetCompressor /SOLID /FINAL lzma" "{}"'.format(nsis, _define, nsi_file)
+ ret, _ = sys_exec(cmd, direct_output=True)
+ if ret != 0:
+ raise RuntimeError('make installer with nsis failed. [{}]'.format(nsi_file))
+
+
+def cmake(work_path, target, force_rebuild, cmake_define=''):
+ # because cmake v2.8 shipped with Ubuntu 14.04LTS, but we need 3.5.
+ # so we copy a v3.5 cmake from CLion and put to $WORK/eomsoft/toolchain/cmake.
+ #CMAKE = os.path.abspath(os.path.join(root_path(), 'toolchain', 'cmake', 'bin', 'cmake'))
+ if 'cmake' not in cfg:
+ raise RuntimeError('please set `cmake` path.')
+ if not os.path.exists(cfg['cmake']):
+ raise RuntimeError('`cmake` does not exists, please check your config.py and try again.')
+
+ CMAKE = cfg['cmake']
+
+ cc.n('make by cmake', target, sep=': ')
+ old_p = os.getcwd()
+ # new_p = os.path.dirname(wscript_file)
+
+ # work_path = os.path.join(root_path(), 'cmake-build')
+ if os.path.exists(work_path):
+ if force_rebuild:
+ remove(work_path)
+ if not os.path.exists(work_path):
+ makedirs(work_path)
+
+ os.chdir(work_path)
+ if target == 'debug':
+ target = 'Debug'
+ else:
+ target = 'Release'
+ cmd = '"{}" -DCMAKE_BUILD_TYPE={} {} ..;make'.format(CMAKE, target, cmake_define)
+ ret, _ = sys_exec(cmd, direct_output=True)
+ os.chdir(old_p)
+ if ret != 0:
+ raise RuntimeError('build with cmake failed, ret={}. [{}]'.format(ret, target))
+
+
+def strip(filename):
+ cc.n('strip binary file', filename)
+ if not os.path.exists(filename):
+ return False
+ cmd = 'strip {}'.format(filename)
+ ret, _ = sys_exec(cmd, direct_output=True)
+ if ret != 0:
+ raise RuntimeError('failed to strip binary file [{}], ret={}.'.format(filename, ret))
+ return True
+
+
+def make_zip(src_path, to_file):
+ cc.v('compress folder into .zip...')
+ n, _ = os.path.splitext(to_file)
+ # x = os.path.split(to_file)[1].split('.')
+ p = os.path.dirname(to_file)
+ shutil.make_archive(os.path.join(p, n), 'zip', src_path)
+ ensure_file_exists(to_file)
+
+
+def make_targz(work_path, folder, to_file):
+ cc.v('compress folder into .tar.gz...')
+ old_p = os.getcwd()
+
+ os.chdir(work_path)
+ cmd = 'tar zcf "{}" "{}"'.format(to_file, folder)
+ ret, _ = sys_exec(cmd, direct_output=True)
+ ensure_file_exists(to_file)
+ os.chdir(old_p)
+
+
+def fix_extension_files(s_path, t_path):
+ cc.n('\nfix extension files...')
+ # 遍历s_path目录下的所有Python扩展文件(动态库),并将其移动到t_path目录下,同时改名。
+ # 例如, s_path/abc/def.pyd -> t_path/abc.def.pyd
+
+ s_path = os.path.abspath(s_path)
+ t_path = os.path.abspath(t_path)
+
+ ext = extension_suffixes()
+ s_path_len = len(s_path)
+
+ def _fix_(s_path, t_path, sub_path):
+ for parent, dir_list, file_list in os.walk(sub_path):
+ for d in dir_list:
+ _fix_(s_path, t_path, os.path.join(parent, d))
+
+ for filename in file_list:
+ _, e = os.path.splitext(filename)
+ if e in ext:
+ f_from = os.path.join(parent, filename)
+ f_to = f_from[s_path_len + 1:]
+ f_to = f_to.replace('\\', '.')
+ f_to = f_to.replace('/', '.')
+ f_to = os.path.join(t_path, f_to)
+
+ cc.v('move: ', f_from, '\n -> ', f_to)
+ shutil.move(f_from, f_to)
+
+ _fix_(s_path, t_path, s_path)
+
+
+if __name__ == '__main__':
+ # test()
+ pass
diff --git a/build/builder/core/ver.py b/build/builder/core/ver.py
new file mode 100644
index 0000000..b7ed8dd
--- /dev/null
+++ b/build/builder/core/ver.py
@@ -0,0 +1,4 @@
+# -*- coding: utf8 -*-
+VER_TELEPORT_SERVER = "1.6.225.1"
+VER_TELEPORT_ASSIST = "1.5.46.5"
+VER_TELEPORT_MAKECERT = "1.0.1"
diff --git a/build/config.py.in b/build/config.py.in
new file mode 100644
index 0000000..4c33c98
--- /dev/null
+++ b/build/config.py.in
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+OPENSSL_VER = '1.0.2h'
+LIBUV_VER = '1.9.1'
+MBEDTLS_VER = '2.3.0'
+SQLITE_VER = '3160200'
+
+
+# ============================================
+# for windows
+# ============================================
+
+# if not set nsis path, builder will try to get it by read register.
+nsis = 'C:\\Program Files (x86)\\NSIS\\Unicode\\makensis.exe'
+
+
+# ============================================
+# for linux
+# ============================================
+
+cmake = '/opt/cmake/bin/cmake'
+# pyexec = os.path.join(ROOT_PATH, 'external', 'linux', 'release', 'bin', 'python3.4')
diff --git a/common/libex/include/ex/ex_platform.h b/common/libex/include/ex/ex_platform.h
index 626049b..a30ff36 100644
--- a/common/libex/include/ex/ex_platform.h
+++ b/common/libex/include/ex/ex_platform.h
@@ -77,7 +77,9 @@
# endif
# define PATH_MAX 1024
#elif defined(EX_OS_LINUX)
-# define PATH_MAX 1024
+# ifndef PATH_MAX
+# define PATH_MAX 1024
+# endif
#elif defined(EX_OS_MACOS)
# define PATH_MAX 1024 /* Recommended value for OSX. */
#endif
diff --git a/common/libex/include/ex/ex_thread.h b/common/libex/include/ex/ex_thread.h
index 70e29dd..8c78c8c 100644
--- a/common/libex/include/ex/ex_thread.h
+++ b/common/libex/include/ex/ex_thread.h
@@ -1,7 +1,6 @@
#ifndef __EX_THREAD_H__
#define __EX_THREAD_H__
-//#include "ts_common.h"
#include
#include
@@ -43,9 +42,6 @@ protected:
static void* _thread_func(void * pParam);
#endif
- // ̹߳ms
- // void _sleep_ms(int ms);
-
protected:
ex_astr m_thread_name;
EX_THREAD_HANDLE m_handle;
diff --git a/common/libex/include/ex/ex_util.h b/common/libex/include/ex/ex_util.h
index 3f36cfc..eaaf3d0 100644
--- a/common/libex/include/ex/ex_util.h
+++ b/common/libex/include/ex/ex_util.h
@@ -11,6 +11,7 @@
// #include
#else
// #include
+# include
# include
#endif
diff --git a/common/libex/src/ex_path.cpp b/common/libex/src/ex_path.cpp
index ea4d206..434bbac 100644
--- a/common/libex/src/ex_path.cpp
+++ b/common/libex/src/ex_path.cpp
@@ -2,12 +2,6 @@
#include
#include
-//#ifdef EX_OS_WIN32
-//# include
-//#else
-//# include
-//#endif
-
static void _wstr_replace(ex_wstr& inout_str, const wchar_t* sfrom, const wchar_t* sto)
{
ex_wstr::size_type pos = 0;
diff --git a/common/libex/src/ex_thread.cpp b/common/libex/src/ex_thread.cpp
index 9bcb68c..e7f1b95 100644
--- a/common/libex/src/ex_thread.cpp
+++ b/common/libex/src/ex_thread.cpp
@@ -80,7 +80,6 @@ bool ExThreadBase::stop(void)
return false;
}
#endif
-// EXLOGV(" # thread [%s] end.\n", m_thread_name.c_str());
return true;
}
@@ -94,21 +93,6 @@ bool ExThreadBase::terminate(void)
#endif
}
-// void ExThreadBase::_thread_loop(void)
-// {
-// EXLOGE("--------thread-loop-not-impl-------\n");
-// }
-
-// void ExThreadBase::_sleep_ms(int ms)
-// {
-// #ifdef EX_OS_WIN32
-// Sleep(ms);
-// #else
-// usleep(ms * 1000);
-// #endif
-// }
-
-
//=========================================================
//
//=========================================================
@@ -133,7 +117,6 @@ void ExThreadManager::stop_all(void)
for (; it != m_threads.end(); ++it)
{
(*it)->stop();
- //delete (*it);
}
m_threads.clear();
}
diff --git a/common/libex/src/ex_util.cpp b/common/libex/src/ex_util.cpp
index cb85692..c8c879d 100644
--- a/common/libex/src/ex_util.cpp
+++ b/common/libex/src/ex_util.cpp
@@ -5,10 +5,6 @@
// #include
-#ifndef EX_OS_WIN32
-# include
-#endif
-
EX_BOOL ex_initialize(const char* lc_ctype)
{
#ifdef EX_OS_UNIX
diff --git a/server/.idea/encodings.xml b/server/.idea/encodings.xml
new file mode 100644
index 0000000..a752419
--- /dev/null
+++ b/server/.idea/encodings.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/server/.idea/server.iml b/server/.idea/server.iml
new file mode 100644
index 0000000..16ab77f
--- /dev/null
+++ b/server/.idea/server.iml
@@ -0,0 +1,98 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/server/CMakeLists.txt b/server/CMakeLists.txt
index 90776e2..72a527c 100644
--- a/server/CMakeLists.txt
+++ b/server/CMakeLists.txt
@@ -8,5 +8,5 @@ cmake_minimum_required(VERSION 3.5)
#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${teleport_SOURCE_DIR}/../out/server/x64/bin")
add_subdirectory(tp_web/src)
-#add_subdirectory(tp_core/core)
+add_subdirectory(tp_core/core)
#add_subdirectory(tp_core/protocol/ssh)
diff --git a/server/tp_core/core/CMakeLists.txt b/server/tp_core/core/CMakeLists.txt
new file mode 100644
index 0000000..c5b05dd
--- /dev/null
+++ b/server/tp_core/core/CMakeLists.txt
@@ -0,0 +1,37 @@
+cmake_minimum_required(VERSION 3.5)
+project(tpcore)
+
+#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${Project_SOURCE_DIR}/../out/server/x64/bin")
+
+
+ADD_DEFINITIONS(
+ -DMG_ENABLE_THREADS
+ -DMG_DISABLE_HTTP_DIGEST_AUTH
+ -DMG_DISABLE_MQTT
+ -DMG_DISABLE_SSI
+ -DHAVE_CONFIG_H
+ -DCS_COMMON_MD5_H_
+ -DDISABLE_MD5
+)
+
+aux_source_directory(. DIR_SRCS)
+aux_source_directory(../../../common/libex/src DIR_SRCS)
+aux_source_directory(../../../external/mongoose DIR_SRCS)
+aux_source_directory(../../../external/jsoncpp/src/lib_json DIR_SRCS)
+
+#list(REMOVE_ITEM DIR_SRCS "./src/ts_win_service_helper.cpp")
+
+include_directories(
+ ../../../common/libex/include
+ ../../../external/mongoose
+ ../../../external/jsoncpp/include
+ ../../../external/linux/release/include
+# ../../pyshell/pys/include
+# ../../py-static/release/include/python3.4m
+)
+
+link_directories(../../../external/linux/release/lib)
+
+add_executable(tp_core ${DIR_SRCS})
+target_link_libraries(tp_core ssl crypto mbedx509 mbedtls mbedcrypto sqlite3 dl pthread rt util)
diff --git a/server/tp_core/core/main.cpp b/server/tp_core/core/main.cpp
index a5db448..c43ad85 100644
--- a/server/tp_core/core/main.cpp
+++ b/server/tp_core/core/main.cpp
@@ -302,7 +302,7 @@ VOID WINAPI service_main(DWORD argc, wchar_t** argv)
#else
// not EX_OS_WIN32
-#include "ts_util.h"
+//#include "ts_util.h"
#include
#include
@@ -317,10 +317,10 @@ int main(int argc, char** argv)
act.sa_flags = SA_SIGINFO;
sigaction(SIGINT, &act, NULL);
- wchar_t** wargv = ts_make_argv(argc, argv);
+ wchar_t** wargv = ex_make_wargv(argc, argv);
int ret = _app_main(argc, wargv);
- ts_free_argv(argc, wargv);
+ ex_free_wargv(argc, wargv);
return ret;
}
diff --git a/server/tp_core/core/ts_db.cpp b/server/tp_core/core/ts_db.cpp
index b11e40e..6f4272b 100644
--- a/server/tp_core/core/ts_db.cpp
+++ b/server/tp_core/core/ts_db.cpp
@@ -274,7 +274,7 @@ bool TsDB::get_host_count(int& count)
if (sql_exec == NULL)
return false;
- char* szSQL = "select count(*) from ts_host_info;";
+ const char* szSQL = "select count(*) from ts_host_info;";
result = sqlite3_get_table(sql_exec, szSQL, &dbResult, &nRow, &nColumn, &errmsg);
if (result != 0)
@@ -308,7 +308,7 @@ bool TsDB::update_reset_log()
if (sql_exec == NULL)
return false;
- char* szSQL = "UPDATE ts_log SET ret_code=7 WHERE ret_code=0;";
+ const char* szSQL = "UPDATE ts_log SET ret_code=7 WHERE ret_code=0;";
result = sqlite3_exec(sql_exec, szSQL, NULL, NULL, &errmsg);
if (result != 0)
{
@@ -428,7 +428,7 @@ bool TsDB::get_auth_id_list_by_all(AuthInfo2Vec& auth_info_list)
return false;
}
- char* szSQL = "SELECT auth_id,a.host_id as host_id, \
+ const char* szSQL = "SELECT auth_id,a.host_id as host_id, \
host_ip,host_pro_type as pro_type,host_lock,host_auth_mode as auth_mode \
FROM ts_auth as a LEFT JOIN ts_host_info as b ON a.host_id = b.host_id";
@@ -578,7 +578,7 @@ bool TsDB::get_auth_info_list_by_all(AuthInfo3Vec& auth_info_list)
if (sql_exec == NULL)
return false;
- char* szSQL =
+ const char* szSQL =
"SELECT host_id ,host_ip,host_user_name, \
host_user_pwd, host_auth_mode as auth_mode,a.cert_id as cert_id, \
cert_pri,cert_name,cert_pub from ts_host_info as a LEFT JOIN ts_cert as b \
@@ -598,7 +598,7 @@ ON a.cert_id = b.cert_id;";
mapStringKey mapstringKey;
for (j = 0; j < nColumn; j++)
{
- ex_astr temp = dbResult[j];
+ //ex_astr temp = dbResult[j];
if (dbResult[index] == NULL)
mapstringKey[dbResult[j]] = "";
else
@@ -608,7 +608,7 @@ ON a.cert_id = b.cert_id;";
}
TS_DB_AUTH_INFO_3 info;
- mapStringKey::iterator it = mapstringKey.find("host_id");
+ mapStringKey::iterator it = mapstringKey.find("host_id");
if (it != mapstringKey.end())
info.host_id = atoi(it->second.c_str());
diff --git a/server/tp_core/core/ts_env.cpp b/server/tp_core/core/ts_env.cpp
index 2a04ab5..26194a7 100644
--- a/server/tp_core/core/ts_env.cpp
+++ b/server/tp_core/core/ts_env.cpp
@@ -19,9 +19,9 @@ bool TsEnv::init(void)
ex_dirname(m_exec_path);
- // λ log, etc ·
- // Ĭ£Ŀ¼λڱִг ../ λã
- // ڣǿģʽԴԴֿĿ¼µshareĿ¼вҡ
+ // ��λ log, etc ·��
+ // Ĭ������£���������Ŀ¼��λ�ڱ���ִ�г���� ../ ���λ�ã�
+ // ��������ڣ�������ǿ�������ģʽ�����Դ�Դ����ֿ��Ŀ¼�µ�shareĿ¼�в��ҡ�
ex_wstr base_path = m_exec_path;
ex_path_join(base_path, true, L"..", NULL);
@@ -61,7 +61,7 @@ bool TsEnv::init(void)
if (!ps->GetStr(L"log-file", log_file))
{
ex_wstr log_path = base_path;
- ex_path_join(log_path, false, _T("log"), NULL);
+ ex_path_join(log_path, false, L"log", NULL);
EXLOG_FILE(L"tpcore.log", log_path.c_str());
}
else
diff --git a/server/tp_core/core/ts_main.cpp b/server/tp_core/core/ts_main.cpp
index f0e27c4..51060a6 100644
--- a/server/tp_core/core/ts_main.cpp
+++ b/server/tp_core/core/ts_main.cpp
@@ -3,50 +3,62 @@
#include "ts_http_rpc.h"
#include "ts_db.h"
#include "ts_env.h"
-#include "ts_http_client.h"
-#include "ts_ver.h"
-#include "ts_crypto.h"
+//#include "ts_http_client.h"
+//#include "ts_ver.h"
+//#include "ts_crypto.h"
-#include "../common/protocol_interface.h"
+//#include "../common/protocol_interface.h"
+//#if defined(MBEDTLS_PLATFORM_C)
+//#include "mbedtls/platform.h"
+////#else
+////#include
+////#include
+////#define mbedtls_time time
+////#define mbedtls_time_t time_t
+////#define mbedtls_fprintf fprintf
+////#define mbedtls_printf printf
+//#endif
+
+#include
#include
-#include
+//#include
bool g_exit_flag = false;
-static unsigned char ToHex(unsigned char x)
-{
- return x > 9 ? x + 55 : x + 48;
-}
-
-ex_astr UrlEncode(const ex_astr& str)
-{
- ex_astr strTemp = "";
- size_t length = str.length();
- for (size_t i = 0; i < length; i++)
- {
- if (isalnum((unsigned char)str[i]) ||
- (str[i] == '-') ||
- (str[i] == '_') ||
- (str[i] == '.') ||
- (str[i] == '~'))
- {
- strTemp += str[i];
- }
- else if (str[i] == ' ')
- {
- strTemp += "+";
- }
- else
- {
- strTemp += '%';
- strTemp += ToHex((unsigned char)str[i] >> 4);
- strTemp += ToHex((unsigned char)str[i] % 16);
- }
- }
-
- return strTemp;
-}
+//static unsigned char ToHex(unsigned char x)
+//{
+// return x > 9 ? x + 55 : x + 48;
+//}
+//
+//ex_astr UrlEncode(const ex_astr& str)
+//{
+// ex_astr strTemp = "";
+// size_t length = str.length();
+// for (size_t i = 0; i < length; i++)
+// {
+// if (isalnum((unsigned char)str[i]) ||
+// (str[i] == '-') ||
+// (str[i] == '_') ||
+// (str[i] == '.') ||
+// (str[i] == '~'))
+// {
+// strTemp += str[i];
+// }
+// else if (str[i] == ' ')
+// {
+// strTemp += "+";
+// }
+// else
+// {
+// strTemp += '%';
+// strTemp += ToHex((unsigned char)str[i] >> 4);
+// strTemp += ToHex((unsigned char)str[i] % 16);
+// }
+// }
+//
+// return strTemp;
+//}
bool tpp_take_session(const ex_astr& sid, TS_SESSION_INFO& info)
{
@@ -134,9 +146,15 @@ bool TppManager::load_tpp(const ex_wstr& libname)
return false;
}
+#ifdef EX_OS_WIN32
lib->init = (TPP_INIT_FUNC)GetProcAddress(lib->dylib, "tpp_init");
lib->start = (TPP_START_FUNC)GetProcAddress(lib->dylib, "tpp_start");
lib->stop = (TPP_STOP_FUNC)GetProcAddress(lib->dylib, "tpp_stop");
+#else
+ lib->init = (TPP_INIT_FUNC)dlsym(lib->dylib, "tpp_init");
+ lib->start = (TPP_START_FUNC)dlsym(lib->dylib, "tpp_start");
+ lib->stop = (TPP_STOP_FUNC)dlsym(lib->dylib, "tpp_stop");
+#endif
if (lib->init == NULL || lib->start == NULL || lib->stop == NULL)
{