diff --git a/server/www/packages/packages-common/mako/__init__.py b/server/www/packages/packages-common/mako/__init__.py index 74526ec..eaa24dc 100644 --- a/server/www/packages/packages-common/mako/__init__.py +++ b/server/www/packages/packages-common/mako/__init__.py @@ -1,8 +1,8 @@ # mako/__init__.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -__version__ = '1.0.3' +__version__ = '1.0.6' diff --git a/server/www/packages/packages-common/mako/_ast_util.py b/server/www/packages/packages-common/mako/_ast_util.py index cc298d5..8d19b0d 100644 --- a/server/www/packages/packages-common/mako/_ast_util.py +++ b/server/www/packages/packages-common/mako/_ast_util.py @@ -1,5 +1,5 @@ # mako/_ast_util.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/ast.py b/server/www/packages/packages-common/mako/ast.py index c55b29c..8d2d150 100644 --- a/server/www/packages/packages-common/mako/ast.py +++ b/server/www/packages/packages-common/mako/ast.py @@ -1,5 +1,5 @@ # mako/ast.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/cache.py b/server/www/packages/packages-common/mako/cache.py index c7aabd2..94f3870 100644 --- a/server/www/packages/packages-common/mako/cache.py +++ b/server/www/packages/packages-common/mako/cache.py @@ -1,5 +1,5 @@ # mako/cache.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/cmd.py b/server/www/packages/packages-common/mako/cmd.py index 50d47fc..dd1f833 100644 --- a/server/www/packages/packages-common/mako/cmd.py +++ b/server/www/packages/packages-common/mako/cmd.py @@ -1,5 +1,5 @@ # mako/cmd.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/codegen.py b/server/www/packages/packages-common/mako/codegen.py index bf86d79..d4ecbe8 100644 --- a/server/www/packages/packages-common/mako/codegen.py +++ b/server/www/packages/packages-common/mako/codegen.py @@ -1,5 +1,5 @@ # mako/codegen.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/compat.py b/server/www/packages/packages-common/mako/compat.py index db22b99..a2ab243 100644 --- a/server/www/packages/packages-common/mako/compat.py +++ b/server/www/packages/packages-common/mako/compat.py @@ -5,6 +5,7 @@ py3k = sys.version_info >= (3, 0) py33 = sys.version_info >= (3, 3) py2k = sys.version_info < (3,) py26 = sys.version_info >= (2, 6) +py27 = sys.version_info >= (2, 7) jython = sys.platform.startswith('java') win32 = sys.platform.startswith('win') pypy = hasattr(sys, 'pypy_version_info') diff --git a/server/www/packages/packages-common/mako/exceptions.py b/server/www/packages/packages-common/mako/exceptions.py index 84d2297..cb6fb3f 100644 --- a/server/www/packages/packages-common/mako/exceptions.py +++ b/server/www/packages/packages-common/mako/exceptions.py @@ -1,5 +1,5 @@ # mako/exceptions.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/ext/autohandler.py b/server/www/packages/packages-common/mako/ext/autohandler.py index 9ee780a..9d1c911 100644 --- a/server/www/packages/packages-common/mako/ext/autohandler.py +++ b/server/www/packages/packages-common/mako/ext/autohandler.py @@ -1,5 +1,5 @@ # ext/autohandler.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/ext/babelplugin.py b/server/www/packages/packages-common/mako/ext/babelplugin.py index 53d62ba..0b5e84f 100644 --- a/server/www/packages/packages-common/mako/ext/babelplugin.py +++ b/server/www/packages/packages-common/mako/ext/babelplugin.py @@ -1,5 +1,5 @@ # ext/babelplugin.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/ext/preprocessors.py b/server/www/packages/packages-common/mako/ext/preprocessors.py index 5624f70..9b700d1 100644 --- a/server/www/packages/packages-common/mako/ext/preprocessors.py +++ b/server/www/packages/packages-common/mako/ext/preprocessors.py @@ -1,5 +1,5 @@ # ext/preprocessors.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/ext/pygmentplugin.py b/server/www/packages/packages-common/mako/ext/pygmentplugin.py index 1121c5d..4057caa 100644 --- a/server/www/packages/packages-common/mako/ext/pygmentplugin.py +++ b/server/www/packages/packages-common/mako/ext/pygmentplugin.py @@ -1,5 +1,5 @@ # ext/pygmentplugin.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/ext/turbogears.py b/server/www/packages/packages-common/mako/ext/turbogears.py index 2e7d039..eaa2d78 100644 --- a/server/www/packages/packages-common/mako/ext/turbogears.py +++ b/server/www/packages/packages-common/mako/ext/turbogears.py @@ -1,5 +1,5 @@ # ext/turbogears.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/filters.py b/server/www/packages/packages-common/mako/filters.py index 525aeb8..c082690 100644 --- a/server/www/packages/packages-common/mako/filters.py +++ b/server/www/packages/packages-common/mako/filters.py @@ -1,5 +1,5 @@ # mako/filters.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/lexer.py b/server/www/packages/packages-common/mako/lexer.py index 2fa08e4..cf4187f 100644 --- a/server/www/packages/packages-common/mako/lexer.py +++ b/server/www/packages/packages-common/mako/lexer.py @@ -1,5 +1,5 @@ # mako/lexer.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -95,31 +95,37 @@ class Lexer(object): # (match and "TRUE" or "FALSE") return match - def parse_until_text(self, *text): + def parse_until_text(self, watch_nesting, *text): startpos = self.match_position text_re = r'|'.join(text) brace_level = 0 + paren_level = 0 + bracket_level = 0 while True: match = self.match(r'#.*\n') if match: continue - match = self.match(r'(\"\"\"|\'\'\'|\"|\')((? 0: - brace_level -= 1 - continue + if match and not (watch_nesting + and (brace_level > 0 or paren_level > 0 + or bracket_level > 0)): return \ self.text[startpos: self.match_position - len(match.group(1))],\ match.group(1) - match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) + elif not match: + match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) if match: brace_level += match.group(1).count('{') brace_level -= match.group(1).count('}') + paren_level += match.group(1).count('(') + paren_level -= match.group(1).count(')') + bracket_level += match.group(1).count('[') + bracket_level -= match.group(1).count(']') continue raise exceptions.SyntaxException( "Expected: %s" % @@ -368,7 +374,7 @@ class Lexer(object): match = self.match(r"<%(!)?") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(r'%>') + text, end = self.parse_until_text(False, r'%>') # the trailing newline helps # compiler.parse() not complain about indentation text = adjust_whitespace(text) + "\n" @@ -384,9 +390,9 @@ class Lexer(object): match = self.match(r"\${") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(r'\|', r'}') + text, end = self.parse_until_text(True, r'\|', r'}') if end == '|': - escapes, end = self.parse_until_text(r'}') + escapes, end = self.parse_until_text(True, r'}') else: escapes = "" text = text.replace('\r\n', '\n') diff --git a/server/www/packages/packages-common/mako/lookup.py b/server/www/packages/packages-common/mako/lookup.py index 5cfd16c..0d3f304 100644 --- a/server/www/packages/packages-common/mako/lookup.py +++ b/server/www/packages/packages-common/mako/lookup.py @@ -1,5 +1,5 @@ # mako/lookup.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -96,7 +96,7 @@ class TemplateLookup(TemplateCollection): .. sourcecode:: python lookup = TemplateLookup(["/path/to/templates"]) - some_template = lookup.get_template("/admin_index.mako") + some_template = lookup.get_template("/index.html") The :class:`.TemplateLookup` can also be given :class:`.Template` objects programatically using :meth:`.put_string` or :meth:`.put_template`: @@ -180,7 +180,8 @@ class TemplateLookup(TemplateCollection): enable_loop=True, input_encoding=None, preprocessor=None, - lexer_cls=None): + lexer_cls=None, + include_error_handler=None): self.directories = [posixpath.normpath(d) for d in util.to_list(directories, ()) @@ -203,6 +204,7 @@ class TemplateLookup(TemplateCollection): self.template_args = { 'format_exceptions': format_exceptions, 'error_handler': error_handler, + 'include_error_handler': include_error_handler, 'disable_unicode': disable_unicode, 'bytestring_passthrough': bytestring_passthrough, 'output_encoding': output_encoding, diff --git a/server/www/packages/packages-common/mako/parsetree.py b/server/www/packages/packages-common/mako/parsetree.py index e7af4bc..879882e 100644 --- a/server/www/packages/packages-common/mako/parsetree.py +++ b/server/www/packages/packages-common/mako/parsetree.py @@ -1,5 +1,5 @@ # mako/parsetree.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/pygen.py b/server/www/packages/packages-common/mako/pygen.py index 5d87bbd..8514e02 100644 --- a/server/www/packages/packages-common/mako/pygen.py +++ b/server/www/packages/packages-common/mako/pygen.py @@ -1,5 +1,5 @@ # mako/pygen.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/pyparser.py b/server/www/packages/packages-common/mako/pyparser.py index 96e5335..15d0da6 100644 --- a/server/www/packages/packages-common/mako/pyparser.py +++ b/server/www/packages/packages-common/mako/pyparser.py @@ -1,5 +1,5 @@ # mako/pyparser.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/mako/runtime.py b/server/www/packages/packages-common/mako/runtime.py index 8d2f4a9..769541c 100644 --- a/server/www/packages/packages-common/mako/runtime.py +++ b/server/www/packages/packages-common/mako/runtime.py @@ -1,5 +1,5 @@ # mako/runtime.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -749,7 +749,16 @@ def _include_file(context, uri, calling_uri, **kwargs): (callable_, ctx) = _populate_self_namespace( context._clean_inheritance_tokens(), template) - callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs)) + kwargs = _kwargs_for_include(callable_, context._data, **kwargs) + if template.include_error_handler: + try: + callable_(ctx, **kwargs) + except Exception: + result = template.include_error_handler(ctx, compat.exception_as()) + if not result: + compat.reraise(*sys.exc_info()) + else: + callable_(ctx, **kwargs) def _inherit_from(context, uri, calling_uri): diff --git a/server/www/packages/packages-common/mako/template.py b/server/www/packages/packages-common/mako/template.py index facb7e5..c3e0c25 100644 --- a/server/www/packages/packages-common/mako/template.py +++ b/server/www/packages/packages-common/mako/template.py @@ -1,5 +1,5 @@ # mako/template.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -109,6 +109,11 @@ class Template(object): completes. Is used to provide custom error-rendering functions. + .. seealso:: + + :paramref:`.Template.include_error_handler` - include-specific + error handler function + :param format_exceptions: if ``True``, exceptions which occur during the render phase of this template will be caught and formatted into an HTML error page, which then becomes the @@ -129,6 +134,16 @@ class Template(object): import will not appear as the first executed statement in the generated code and will therefore not have the desired effect. + :param include_error_handler: An error handler that runs when this template + is included within another one via the ``<%include>`` tag, and raises an + error. Compare to the :paramref:`.Template.error_handler` option. + + .. versionadded:: 1.0.6 + + .. seealso:: + + :paramref:`.Template.error_handler` - top-level error handler function + :param input_encoding: Encoding of the template's source code. Can be used in lieu of the coding comment. See :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for @@ -171,7 +186,7 @@ class Template(object): from mako.template import Template mytemplate = Template( - filename="admin_index.mako", + filename="index.html", module_directory="/path/to/modules", module_writer=module_writer ) @@ -243,7 +258,8 @@ class Template(object): future_imports=None, enable_loop=True, preprocessor=None, - lexer_cls=None): + lexer_cls=None, + include_error_handler=None): if uri: self.module_id = re.sub(r'\W', "_", uri) self.uri = uri @@ -329,6 +345,7 @@ class Template(object): self.callable_ = self.module.render_body self.format_exceptions = format_exceptions self.error_handler = error_handler + self.include_error_handler = include_error_handler self.lookup = lookup self.module_directory = module_directory @@ -475,6 +492,14 @@ class Template(object): return DefTemplate(self, getattr(self.module, "render_%s" % name)) + def list_defs(self): + """return a list of defs in the template. + + .. versionadded:: 1.0.4 + + """ + return [i[7:] for i in dir(self.module) if i[:7] == 'render_'] + def _get_def_callable(self, name): return getattr(self.module, "render_%s" % name) @@ -520,6 +545,7 @@ class ModuleTemplate(Template): cache_type=None, cache_dir=None, cache_url=None, + include_error_handler=None, ): self.module_id = re.sub(r'\W', "_", module._template_uri) self.uri = module._template_uri @@ -551,6 +577,7 @@ class ModuleTemplate(Template): self.callable_ = self.module.render_body self.format_exceptions = format_exceptions self.error_handler = error_handler + self.include_error_handler = include_error_handler self.lookup = lookup self._setup_cache_args( cache_impl, cache_enabled, cache_args, @@ -571,6 +598,7 @@ class DefTemplate(Template): self.encoding_errors = parent.encoding_errors self.format_exceptions = parent.format_exceptions self.error_handler = parent.error_handler + self.include_error_handler = parent.include_error_handler self.enable_loop = parent.enable_loop self.lookup = parent.lookup self.bytestring_passthrough = parent.bytestring_passthrough diff --git a/server/www/packages/packages-common/mako/util.py b/server/www/packages/packages-common/mako/util.py index c7dad65..2f089ff 100644 --- a/server/www/packages/packages-common/mako/util.py +++ b/server/www/packages/packages-common/mako/util.py @@ -1,5 +1,5 @@ # mako/util.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/server/www/packages/packages-common/pymemcache/__init__.py b/server/www/packages/packages-common/pymemcache/__init__.py deleted file mode 100644 index 5b8f37a..0000000 --- a/server/www/packages/packages-common/pymemcache/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '1.3.5' diff --git a/server/www/packages/packages-common/pymemcache/client/__init__.py b/server/www/packages/packages-common/pymemcache/client/__init__.py deleted file mode 100644 index a6ff93b..0000000 --- a/server/www/packages/packages-common/pymemcache/client/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# API Backwards compatibility - -from pymemcache.client.base import Client # noqa -from pymemcache.client.base import PooledClient # noqa - -from pymemcache.exceptions import MemcacheError # noqa -from pymemcache.exceptions import MemcacheClientError # noqa -from pymemcache.exceptions import MemcacheUnknownCommandError # noqa -from pymemcache.exceptions import MemcacheIllegalInputError # noqa -from pymemcache.exceptions import MemcacheServerError # noqa -from pymemcache.exceptions import MemcacheUnknownError # noqa -from pymemcache.exceptions import MemcacheUnexpectedCloseError # noqa diff --git a/server/www/packages/packages-common/pymemcache/client/base.py b/server/www/packages/packages-common/pymemcache/client/base.py deleted file mode 100644 index 4eab540..0000000 --- a/server/www/packages/packages-common/pymemcache/client/base.py +++ /dev/null @@ -1,1095 +0,0 @@ -# Copyright 2012 Pinterest.com -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -__author__ = "Charles Gordon" - -import errno -import socket -import six - -from pymemcache import pool - -from pymemcache.exceptions import ( - MemcacheClientError, - MemcacheUnknownCommandError, - MemcacheIllegalInputError, - MemcacheServerError, - MemcacheUnknownError, - MemcacheUnexpectedCloseError -) - - -RECV_SIZE = 4096 -VALID_STORE_RESULTS = { - b'set': (b'STORED',), - b'add': (b'STORED', b'NOT_STORED'), - b'replace': (b'STORED', b'NOT_STORED'), - b'append': (b'STORED', b'NOT_STORED'), - b'prepend': (b'STORED', b'NOT_STORED'), - b'cas': (b'STORED', b'EXISTS', b'NOT_FOUND'), -} - - -# Some of the values returned by the "stats" command -# need mapping into native Python types -STAT_TYPES = { - # General stats - b'version': six.binary_type, - b'rusage_user': lambda value: float(value.replace(b':', b'.')), - b'rusage_system': lambda value: float(value.replace(b':', b'.')), - b'hash_is_expanding': lambda value: int(value) != 0, - b'slab_reassign_running': lambda value: int(value) != 0, - - # Settings stats - b'inter': six.binary_type, - b'evictions': lambda value: value == b'on', - b'growth_factor': float, - b'stat_key_prefix': six.binary_type, - b'umask': lambda value: int(value, 8), - b'detail_enabled': lambda value: int(value) != 0, - b'cas_enabled': lambda value: int(value) != 0, - b'auth_enabled_sasl': lambda value: value == b'yes', - b'maxconns_fast': lambda value: int(value) != 0, - b'slab_reassign': lambda value: int(value) != 0, - b'slab_automove': lambda value: int(value) != 0, -} - -# Common helper functions. - - -def _check_key(key, key_prefix=b''): - """Checks key and add key_prefix.""" - if isinstance(key, six.text_type): - try: - key = key.encode('ascii') - except UnicodeEncodeError: - raise MemcacheIllegalInputError("No ascii key: %r" % (key,)) - key = key_prefix + key - if b' ' in key: - raise MemcacheIllegalInputError("Key contains spaces: %r" % (key,)) - if len(key) > 250: - raise MemcacheIllegalInputError("Key is too long: %r" % (key,)) - return key - - -class Client(object): - """ - A client for a single memcached server. - - *Keys and Values* - - Keys must have a __str__() method which should return a str with no more - than 250 ASCII characters and no whitespace or control characters. Unicode - strings must be encoded (as UTF-8, for example) unless they consist only - of ASCII characters that are neither whitespace nor control characters. - - Values must have a __str__() method to convert themselves to a byte - string. Unicode objects can be a problem since str() on a Unicode object - will attempt to encode it as ASCII (which will fail if the value contains - code points larger than U+127). You can fix this with a serializer or by - just calling encode on the string (using UTF-8, for instance). - - If you intend to use anything but str as a value, it is a good idea to use - a serializer and deserializer. The pymemcache.serde library has some - already implemented serializers, including one that is compatible with - the python-memcache library. - - *Serialization and Deserialization* - - The constructor takes two optional functions, one for "serialization" of - values, and one for "deserialization". The serialization function takes - two arguments, a key and a value, and returns a tuple of two elements, the - serialized value, and an integer in the range 0-65535 (the "flags"). The - deserialization function takes three parameters, a key, value and flags - and returns the deserialized value. - - Here is an example using JSON for non-str values: - - .. code-block:: python - - def serialize_json(key, value): - if type(value) == str: - return value, 1 - return json.dumps(value), 2 - - def deserialize_json(key, value, flags): - if flags == 1: - return value - - if flags == 2: - return json.loads(value) - - raise Exception("Unknown flags for value: {1}".format(flags)) - - *Error Handling* - - All of the methods in this class that talk to memcached can throw one of - the following exceptions: - - * MemcacheUnknownCommandError - * MemcacheClientError - * MemcacheServerError - * MemcacheUnknownError - * MemcacheUnexpectedCloseError - * MemcacheIllegalInputError - * socket.timeout - * socket.error - - Instances of this class maintain a persistent connection to memcached - which is terminated when any of these exceptions are raised. The next - call to a method on the object will result in a new connection being made - to memcached. - """ - - def __init__(self, - server, - serializer=None, - deserializer=None, - connect_timeout=None, - timeout=None, - no_delay=False, - ignore_exc=False, - socket_module=socket, - key_prefix=b'', - default_noreply=True): - """ - Constructor. - - Args: - server: tuple(hostname, port) - serializer: optional function, see notes in the class docs. - deserializer: optional function, see notes in the class docs. - connect_timeout: optional float, seconds to wait for a connection to - the memcached server. Defaults to "forever" (uses the underlying - default socket timeout, which can be very long). - timeout: optional float, seconds to wait for send or recv calls on - the socket connected to memcached. Defaults to "forever" (uses the - underlying default socket timeout, which can be very long). - no_delay: optional bool, set the TCP_NODELAY flag, which may help - with performance in some cases. Defaults to False. - ignore_exc: optional bool, True to cause the "get", "gets", - "get_many" and "gets_many" calls to treat any errors as cache - misses. Defaults to False. - socket_module: socket module to use, e.g. gevent.socket. Defaults to - the standard library's socket module. - key_prefix: Prefix of key. You can use this as namespace. Defaults - to b''. - default_noreply: bool, the default value for 'noreply' as passed to - store commands (except from cas, incr, and decr, which default to - False). - - Notes: - The constructor does not make a connection to memcached. The first - call to a method on the object will do that. - """ - self.server = server - self.serializer = serializer - self.deserializer = deserializer - self.connect_timeout = connect_timeout - self.timeout = timeout - self.no_delay = no_delay - self.ignore_exc = ignore_exc - self.socket_module = socket_module - self.sock = None - if isinstance(key_prefix, six.text_type): - key_prefix = key_prefix.encode('ascii') - if not isinstance(key_prefix, bytes): - raise TypeError("key_prefix should be bytes.") - self.key_prefix = key_prefix - self.default_noreply = default_noreply - - def check_key(self, key): - """Checks key and add key_prefix.""" - return _check_key(key, key_prefix=self.key_prefix) - - def _connect(self): - sock = self.socket_module.socket(self.socket_module.AF_INET, - self.socket_module.SOCK_STREAM) - sock.settimeout(self.connect_timeout) - sock.connect(self.server) - sock.settimeout(self.timeout) - if self.no_delay: - sock.setsockopt(self.socket_module.IPPROTO_TCP, - self.socket_module.TCP_NODELAY, 1) - self.sock = sock - - def close(self): - """Close the connection to memcached, if it is open. The next call to a - method that requires a connection will re-open it.""" - if self.sock is not None: - try: - self.sock.close() - except Exception: - pass - self.sock = None - - def set(self, key, value, expire=0, noreply=None): - """ - The memcached "set" command. - - Args: - key: str, see class docs for details. - value: str, see class docs for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - If no exception is raised, always returns True. If an exception is - raised, the set may or may not have occurred. If noreply is True, - then a successful return does not guarantee a successful set. - """ - if noreply is None: - noreply = self.default_noreply - return self._store_cmd(b'set', key, expire, noreply, value) - - def set_many(self, values, expire=0, noreply=None): - """ - A convenience function for setting multiple values. - - Args: - values: dict(str, str), a dict of keys and values, see class docs - for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - If no exception is raised, always returns True. Otherwise all, some - or none of the keys have been successfully set. If noreply is True - then a successful return does not guarantee that any keys were - successfully set (just that the keys were successfully sent). - """ - - # TODO: make this more performant by sending all the values first, then - # waiting for all the responses. - for key, value in six.iteritems(values): - self.set(key, value, expire, noreply) - return True - - set_multi = set_many - - def add(self, key, value, expire=0, noreply=None): - """ - The memcached "add" command. - - Args: - key: str, see class docs for details. - value: str, see class docs for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - If noreply is True, the return value is always True. Otherwise the - return value is True if the value was stgored, and False if it was - not (because the key already existed). - """ - if noreply is None: - noreply = self.default_noreply - return self._store_cmd(b'add', key, expire, noreply, value) - - def replace(self, key, value, expire=0, noreply=None): - """ - The memcached "replace" command. - - Args: - key: str, see class docs for details. - value: str, see class docs for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - If noreply is True, always returns True. Otherwise returns True if - the value was stored and False if it wasn't (because the key didn't - already exist). - """ - if noreply is None: - noreply = self.default_noreply - return self._store_cmd(b'replace', key, expire, noreply, value) - - def append(self, key, value, expire=0, noreply=None): - """ - The memcached "append" command. - - Args: - key: str, see class docs for details. - value: str, see class docs for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - True. - """ - if noreply is None: - noreply = self.default_noreply - return self._store_cmd(b'append', key, expire, noreply, value) - - def prepend(self, key, value, expire=0, noreply=None): - """ - The memcached "prepend" command. - - Args: - key: str, see class docs for details. - value: str, see class docs for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - True. - """ - if noreply is None: - noreply = self.default_noreply - return self._store_cmd(b'prepend', key, expire, noreply, value) - - def cas(self, key, value, cas, expire=0, noreply=False): - """ - The memcached "cas" command. - - Args: - key: str, see class docs for details. - value: str, see class docs for details. - cas: int or str that only contains the characters '0'-'9'. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, False to wait for the reply (the default). - - Returns: - If noreply is True, always returns True. Otherwise returns None if - the key didn't exist, False if it existed but had a different cas - value and True if it existed and was changed. - """ - return self._store_cmd(b'cas', key, expire, noreply, value, cas) - - def get(self, key): - """ - The memcached "get" command, but only for one key, as a convenience. - - Args: - key: str, see class docs for details. - - Returns: - The value for the key, or None if the key wasn't found. - """ - return self._fetch_cmd(b'get', [key], False).get(key, None) - - def get_many(self, keys): - """ - The memcached "get" command. - - Args: - keys: list(str), see class docs for details. - - Returns: - A dict in which the keys are elements of the "keys" argument list - and the values are values from the cache. The dict may contain all, - some or none of the given keys. - """ - if not keys: - return {} - - return self._fetch_cmd(b'get', keys, False) - - get_multi = get_many - - def gets(self, key): - """ - The memcached "gets" command for one key, as a convenience. - - Args: - key: str, see class docs for details. - - Returns: - A tuple of (key, cas), or (None, None) if the key was not found. - """ - return self._fetch_cmd(b'gets', [key], True).get(key, (None, None)) - - def gets_many(self, keys): - """ - The memcached "gets" command. - - Args: - keys: list(str), see class docs for details. - - Returns: - A dict in which the keys are elements of the "keys" argument list and - the values are tuples of (value, cas) from the cache. The dict may - contain all, some or none of the given keys. - """ - if not keys: - return {} - - return self._fetch_cmd(b'gets', keys, True) - - def delete(self, key, noreply=None): - """ - The memcached "delete" command. - - Args: - key: str, see class docs for details. - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - If noreply is True, always returns True. Otherwise returns True if - the key was deleted, and False if it wasn't found. - """ - if noreply is None: - noreply = self.default_noreply - cmd = b'delete ' + self.check_key(key) - if noreply: - cmd += b' noreply' - cmd += b'\r\n' - result = self._misc_cmd(cmd, b'delete', noreply) - if noreply: - return True - return result == b'DELETED' - - def delete_many(self, keys, noreply=None): - """ - A convenience function to delete multiple keys. - - Args: - keys: list(str), the list of keys to delete. - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - True. If an exception is raised then all, some or none of the keys - may have been deleted. Otherwise all the keys have been sent to - memcache for deletion and if noreply is False, they have been - acknowledged by memcache. - """ - if not keys: - return True - - if noreply is None: - noreply = self.default_noreply - - # TODO: make this more performant by sending all keys first, then - # waiting for all values. - for key in keys: - self.delete(key, noreply) - - return True - - delete_multi = delete_many - - def incr(self, key, value, noreply=False): - """ - The memcached "incr" command. - - Args: - key: str, see class docs for details. - value: int, the amount by which to increment the value. - noreply: optional bool, False to wait for the reply (the default). - - Returns: - If noreply is True, always returns None. Otherwise returns the new - value of the key, or None if the key wasn't found. - """ - key = self.check_key(key) - cmd = b'incr ' + key + b' ' + six.text_type(value).encode('ascii') - if noreply: - cmd += b' noreply' - cmd += b'\r\n' - result = self._misc_cmd(cmd, b'incr', noreply) - if noreply: - return None - if result == b'NOT_FOUND': - return None - return int(result) - - def decr(self, key, value, noreply=False): - """ - The memcached "decr" command. - - Args: - key: str, see class docs for details. - value: int, the amount by which to increment the value. - noreply: optional bool, False to wait for the reply (the default). - - Returns: - If noreply is True, always returns None. Otherwise returns the new - value of the key, or None if the key wasn't found. - """ - key = self.check_key(key) - cmd = b'decr ' + key + b' ' + six.text_type(value).encode('ascii') - if noreply: - cmd += b' noreply' - cmd += b'\r\n' - result = self._misc_cmd(cmd, b'decr', noreply) - if noreply: - return None - if result == b'NOT_FOUND': - return None - return int(result) - - def touch(self, key, expire=0, noreply=None): - """ - The memcached "touch" command. - - Args: - key: str, see class docs for details. - expire: optional int, number of seconds until the item is expired - from the cache, or zero for no expiry (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - True if the expiration time was updated, False if the key wasn't - found. - """ - if noreply is None: - noreply = self.default_noreply - key = self.check_key(key) - cmd = b'touch ' + key + b' ' + six.text_type(expire).encode('ascii') - if noreply: - cmd += b' noreply' - cmd += b'\r\n' - result = self._misc_cmd(cmd, b'touch', noreply) - if noreply: - return True - return result == b'TOUCHED' - - def stats(self, *args): - """ - The memcached "stats" command. - - The returned keys depend on what the "stats" command returns. - A best effort is made to convert values to appropriate Python - types, defaulting to strings when a conversion cannot be made. - - Args: - *arg: extra string arguments to the "stats" command. See the - memcached protocol documentation for more information. - - Returns: - A dict of the returned stats. - """ - result = self._fetch_cmd(b'stats', args, False) - - for key, value in six.iteritems(result): - converter = STAT_TYPES.get(key, int) - try: - result[key] = converter(value) - except Exception: - pass - - return result - - def version(self): - """ - The memcached "version" command. - - Returns: - A string of the memcached version. - """ - cmd = b"version\r\n" - result = self._misc_cmd(cmd, b'version', False) - - if not result.startswith(b'VERSION '): - raise MemcacheUnknownError("Received unexpected response: %s" % (result, )) - - return result[8:] - - def flush_all(self, delay=0, noreply=None): - """ - The memcached "flush_all" command. - - Args: - delay: optional int, the number of seconds to wait before flushing, - or zero to flush immediately (the default). - noreply: optional bool, True to not wait for the reply (defaults to - self.default_noreply). - - Returns: - True. - """ - if noreply is None: - noreply = self.default_noreply - cmd = b'flush_all ' + six.text_type(delay).encode('ascii') - if noreply: - cmd += b' noreply' - cmd += b'\r\n' - result = self._misc_cmd(cmd, b'flush_all', noreply) - if noreply: - return True - return result == b'OK' - - def quit(self): - """ - The memcached "quit" command. - - This will close the connection with memcached. Calling any other - method on this object will re-open the connection, so this object can - be re-used after quit. - """ - cmd = b"quit\r\n" - self._misc_cmd(cmd, b'quit', True) - self.close() - - def _raise_errors(self, line, name): - if line.startswith(b'ERROR'): - raise MemcacheUnknownCommandError(name) - - if line.startswith(b'CLIENT_ERROR'): - error = line[line.find(b' ') + 1:] - raise MemcacheClientError(error) - - if line.startswith(b'SERVER_ERROR'): - error = line[line.find(b' ') + 1:] - raise MemcacheServerError(error) - - def _fetch_cmd(self, name, keys, expect_cas): - checked_keys = dict((self.check_key(k), k) for k in keys) - cmd = name + b' ' + b' '.join(checked_keys) + b'\r\n' - - try: - if not self.sock: - self._connect() - - self.sock.sendall(cmd) - - buf = b'' - result = {} - while True: - buf, line = _readline(self.sock, buf) - self._raise_errors(line, name) - if line == b'END': - return result - elif line.startswith(b'VALUE'): - if expect_cas: - _, key, flags, size, cas = line.split() - else: - try: - _, key, flags, size = line.split() - except Exception as e: - raise ValueError("Unable to parse line %s: %s" - % (line, str(e))) - - buf, value = _readvalue(self.sock, buf, int(size)) - key = checked_keys[key] - - if self.deserializer: - value = self.deserializer(key, value, int(flags)) - - if expect_cas: - result[key] = (value, cas) - else: - result[key] = value - elif name == b'stats' and line.startswith(b'STAT'): - _, key, value = line.split() - result[key] = value - else: - raise MemcacheUnknownError(line[:32]) - except Exception: - self.close() - if self.ignore_exc: - return {} - raise - - def _store_cmd(self, name, key, expire, noreply, data, cas=None): - key = self.check_key(key) - if not self.sock: - self._connect() - - if self.serializer: - data, flags = self.serializer(key, data) - else: - flags = 0 - - if not isinstance(data, six.binary_type): - try: - data = six.text_type(data).encode('ascii') - except UnicodeEncodeError as e: - raise MemcacheIllegalInputError(str(e)) - - extra = b'' - if cas is not None: - extra += b' ' + cas - if noreply: - extra += b' noreply' - - cmd = (name + b' ' + key + b' ' + six.text_type(flags).encode('ascii') - + b' ' + six.text_type(expire).encode('ascii') - + b' ' + six.text_type(len(data)).encode('ascii') + extra - + b'\r\n' + data + b'\r\n') - - try: - self.sock.sendall(cmd) - - if noreply: - return True - - buf = b'' - buf, line = _readline(self.sock, buf) - self._raise_errors(line, name) - - if line in VALID_STORE_RESULTS[name]: - if line == b'STORED': - return True - if line == b'NOT_STORED': - return False - if line == b'NOT_FOUND': - return None - if line == b'EXISTS': - return False - else: - raise MemcacheUnknownError(line[:32]) - except Exception: - self.close() - raise - - def _misc_cmd(self, cmd, cmd_name, noreply): - if not self.sock: - self._connect() - - try: - self.sock.sendall(cmd) - - if noreply: - return - - _, line = _readline(self.sock, b'') - self._raise_errors(line, cmd_name) - - return line - except Exception: - self.close() - raise - - def __setitem__(self, key, value): - self.set(key, value, noreply=True) - - def __getitem__(self, key): - value = self.get(key) - if value is None: - raise KeyError - return value - - def __delitem__(self, key): - self.delete(key, noreply=True) - - -class PooledClient(object): - """A thread-safe pool of clients (with the same client api). - - Args: - max_pool_size: maximum pool size to use (going about this amount - triggers a runtime error), by default this is 2147483648L - when not provided (or none). - lock_generator: a callback/type that takes no arguments that will - be called to create a lock or sempahore that can - protect the pool from concurrent access (for example a - eventlet lock or semaphore could be used instead) - - Further arguments are interpreted as for :py:class:`.Client` constructor. - """ - - def __init__(self, - server, - serializer=None, - deserializer=None, - connect_timeout=None, - timeout=None, - no_delay=False, - ignore_exc=False, - socket_module=socket, - key_prefix=b'', - max_pool_size=None, - lock_generator=None): - self.server = server - self.serializer = serializer - self.deserializer = deserializer - self.connect_timeout = connect_timeout - self.timeout = timeout - self.no_delay = no_delay - self.ignore_exc = ignore_exc - self.socket_module = socket_module - if isinstance(key_prefix, six.text_type): - key_prefix = key_prefix.encode('ascii') - if not isinstance(key_prefix, bytes): - raise TypeError("key_prefix should be bytes.") - self.key_prefix = key_prefix - self.client_pool = pool.ObjectPool( - self._create_client, - after_remove=lambda client: client.close(), - max_size=max_pool_size, - lock_generator=lock_generator) - - def check_key(self, key): - """Checks key and add key_prefix.""" - return _check_key(key, key_prefix=self.key_prefix) - - def _create_client(self): - client = Client(self.server, - serializer=self.serializer, - deserializer=self.deserializer, - connect_timeout=self.connect_timeout, - timeout=self.timeout, - no_delay=self.no_delay, - # We need to know when it fails *always* so that we - # can remove/destroy it from the pool... - ignore_exc=False, - socket_module=self.socket_module, - key_prefix=self.key_prefix) - return client - - def close(self): - self.client_pool.clear() - - def set(self, key, value, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.set(key, value, expire=expire, noreply=noreply) - - def set_many(self, values, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.set_many(values, expire=expire, noreply=noreply) - - set_multi = set_many - - def replace(self, key, value, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.replace(key, value, expire=expire, noreply=noreply) - - def append(self, key, value, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.append(key, value, expire=expire, noreply=noreply) - - def prepend(self, key, value, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.prepend(key, value, expire=expire, noreply=noreply) - - def cas(self, key, value, cas, expire=0, noreply=False): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.cas(key, value, cas, - expire=expire, noreply=noreply) - - def get(self, key): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - try: - return client.get(key) - except Exception: - if self.ignore_exc: - return None - else: - raise - - def get_many(self, keys): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - try: - return client.get_many(keys) - except Exception: - if self.ignore_exc: - return {} - else: - raise - - get_multi = get_many - - def gets(self, key): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - try: - return client.gets(key) - except Exception: - if self.ignore_exc: - return (None, None) - else: - raise - - def gets_many(self, keys): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - try: - return client.gets_many(keys) - except Exception: - if self.ignore_exc: - return {} - else: - raise - - def delete(self, key, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.delete(key, noreply=noreply) - - def delete_many(self, keys, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.delete_many(keys, noreply=noreply) - - delete_multi = delete_many - - def add(self, key, value, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.add(key, value, expire=expire, noreply=noreply) - - def incr(self, key, value, noreply=False): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.incr(key, value, noreply=noreply) - - def decr(self, key, value, noreply=False): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.decr(key, value, noreply=noreply) - - def touch(self, key, expire=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.touch(key, expire=expire, noreply=noreply) - - def stats(self, *args): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - try: - return client.stats(*args) - except Exception: - if self.ignore_exc: - return {} - else: - raise - - def version(self): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.version() - - def flush_all(self, delay=0, noreply=True): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - return client.flush_all(delay=delay, noreply=noreply) - - def quit(self): - with self.client_pool.get_and_release(destroy_on_fail=True) as client: - try: - client.quit() - finally: - self.client_pool.destroy(client) - - def __setitem__(self, key, value): - self.set(key, value, noreply=True) - - def __getitem__(self, key): - value = self.get(key) - if value is None: - raise KeyError - return value - - def __delitem__(self, key): - self.delete(key, noreply=True) - - -def _readline(sock, buf): - """Read line of text from the socket. - - Read a line of text (delimited by "\r\n") from the socket, and - return that line along with any trailing characters read from the - socket. - - Args: - sock: Socket object, should be connected. - buf: String, zero or more characters, returned from an earlier - call to _readline or _readvalue (pass an empty string on the - first call). - - Returns: - A tuple of (buf, line) where line is the full line read from the - socket (minus the "\r\n" characters) and buf is any trailing - characters read after the "\r\n" was found (which may be an empty - string). - - """ - chunks = [] - last_char = b'' - - while True: - # We're reading in chunks, so "\r\n" could appear in one chunk, - # or across the boundary of two chunks, so we check for both - # cases. - - # This case must appear first, since the buffer could have - # later \r\n characters in it and we want to get the first \r\n. - if last_char == b'\r' and buf[0:1] == b'\n': - # Strip the last character from the last chunk. - chunks[-1] = chunks[-1][:-1] - return buf[1:], b''.join(chunks) - elif buf.find(b'\r\n') != -1: - before, sep, after = buf.partition(b"\r\n") - chunks.append(before) - return after, b''.join(chunks) - - if buf: - chunks.append(buf) - last_char = buf[-1:] - - buf = _recv(sock, RECV_SIZE) - if not buf: - raise MemcacheUnexpectedCloseError() - - -def _readvalue(sock, buf, size): - """Read specified amount of bytes from the socket. - - Read size bytes, followed by the "\r\n" characters, from the socket, - and return those bytes and any trailing bytes read after the "\r\n". - - Args: - sock: Socket object, should be connected. - buf: String, zero or more characters, returned from an earlier - call to _readline or _readvalue (pass an empty string on the - first call). - size: Integer, number of bytes to read from the socket. - - Returns: - A tuple of (buf, value) where value is the bytes read from the - socket (there will be exactly size bytes) and buf is trailing - characters read after the "\r\n" following the bytes (but not - including the \r\n). - - """ - chunks = [] - rlen = size + 2 - while rlen - len(buf) > 0: - if buf: - rlen -= len(buf) - chunks.append(buf) - buf = _recv(sock, RECV_SIZE) - if not buf: - raise MemcacheUnexpectedCloseError() - - # Now we need to remove the \r\n from the end. There are two cases we care - # about: the \r\n is all in the last buffer, or only the \n is in the last - # buffer, and we need to remove the \r from the penultimate buffer. - - if rlen == 1: - # replace the last chunk with the same string minus the last character, - # which is always '\r' in this case. - chunks[-1] = chunks[-1][:-1] - else: - # Just remove the "\r\n" from the latest chunk - chunks.append(buf[:rlen - 2]) - - return buf[rlen:], b''.join(chunks) - - -def _recv(sock, size): - """sock.recv() with retry on EINTR""" - while True: - try: - return sock.recv(size) - except IOError as e: - if e.errno != errno.EINTR: - raise diff --git a/server/www/packages/packages-common/pymemcache/client/hash.py b/server/www/packages/packages-common/pymemcache/client/hash.py deleted file mode 100644 index 55b6287..0000000 --- a/server/www/packages/packages-common/pymemcache/client/hash.py +++ /dev/null @@ -1,333 +0,0 @@ -import socket -import time -import logging - -from pymemcache.client.base import Client, PooledClient, _check_key -from pymemcache.client.rendezvous import RendezvousHash - -logger = logging.getLogger(__name__) - - -class HashClient(object): - """ - A client for communicating with a cluster of memcached servers - """ - def __init__( - self, - servers, - hasher=RendezvousHash, - serializer=None, - deserializer=None, - connect_timeout=None, - timeout=None, - no_delay=False, - socket_module=socket, - key_prefix=b'', - max_pool_size=None, - lock_generator=None, - retry_attempts=2, - retry_timeout=1, - dead_timeout=60, - use_pooling=False, - ignore_exc=False, - ): - """ - Constructor. - - Args: - servers: list(tuple(hostname, port)) - hasher: optional class three functions ``get_node``, ``add_node``, - and ``remove_node`` - defaults to Rendezvous (HRW) hash. - - use_pooling: use py:class:`.PooledClient` as the default underlying - class. ``max_pool_size`` and ``lock_generator`` can - be used with this. default: False - - retry_attempts: Amount of times a client should be tried before it - is marked dead and removed from the pool. - retry_timeout (float): Time in seconds that should pass between retry - attempts. - dead_timeout (float): Time in seconds before attempting to add a node - back in the pool. - - Further arguments are interpreted as for :py:class:`.Client` - constructor. - - The default ``hasher`` is using a pure python implementation that can - be significantly improved performance wise by switching to a C based - version. We recommend using ``python-clandestined`` if having a C - dependency is acceptable. - """ - self.clients = {} - self.retry_attempts = retry_attempts - self.retry_timeout = retry_timeout - self.dead_timeout = dead_timeout - self.use_pooling = use_pooling - self.key_prefix = key_prefix - self.ignore_exc = ignore_exc - self._failed_clients = {} - self._dead_clients = {} - self._last_dead_check_time = time.time() - - self.hasher = hasher() - - self.default_kwargs = { - 'connect_timeout': connect_timeout, - 'timeout': timeout, - 'no_delay': no_delay, - 'socket_module': socket_module, - 'key_prefix': key_prefix, - 'serializer': serializer, - 'deserializer': deserializer, - } - - if use_pooling is True: - self.default_kwargs.update({ - 'max_pool_size': max_pool_size, - 'lock_generator': lock_generator - }) - - for server, port in servers: - self.add_server(server, port) - - def add_server(self, server, port): - key = '%s:%s' % (server, port) - - if self.use_pooling: - client = PooledClient( - (server, port), - **self.default_kwargs - ) - else: - client = Client((server, port), **self.default_kwargs) - - self.clients[key] = client - self.hasher.add_node(key) - - def remove_server(self, server, port): - dead_time = time.time() - self._failed_clients.pop((server, port)) - self._dead_clients[(server, port)] = dead_time - key = '%s:%s' % (server, port) - self.hasher.remove_node(key) - - def _get_client(self, key): - _check_key(key, self.key_prefix) - if len(self._dead_clients) > 0: - current_time = time.time() - ldc = self._last_dead_check_time - # we have dead clients and we have reached the - # timeout retry - if current_time - ldc > self.dead_timeout: - for server, dead_time in self._dead_clients.items(): - if current_time - dead_time > self.dead_timeout: - logger.debug( - 'bringing server back into rotation %s', - server - ) - self.add_server(*server) - self._last_dead_check_time = current_time - - server = self.hasher.get_node(key) - # We've ran out of servers to try - if server is None: - if self.ignore_exc is True: - return - raise Exception('All servers seem to be down right now') - - client = self.clients[server] - return client - - def _safely_run_func(self, client, func, default_val, *args, **kwargs): - try: - if client.server in self._failed_clients: - # This server is currently failing, lets check if it is in - # retry or marked as dead - failed_metadata = self._failed_clients[client.server] - - # we haven't tried our max amount yet, if it has been enough - # time lets just retry using it - if failed_metadata['attempts'] < self.retry_attempts: - failed_time = failed_metadata['failed_time'] - if time.time() - failed_time > self.retry_timeout: - logger.debug( - 'retrying failed server: %s', client.server - ) - result = func(*args, **kwargs) - # we were successful, lets remove it from the failed - # clients - self._failed_clients.pop(client.server) - return result - return default_val - else: - # We've reached our max retry attempts, we need to mark - # the sever as dead - logger.debug('marking server as dead: %s', client.server) - self.remove_server(*client.server) - - result = func(*args, **kwargs) - return result - - # Connecting to the server fail, we should enter - # retry mode - except socket.error: - # This client has never failed, lets mark it for failure - if ( - client.server not in self._failed_clients and - self.retry_attempts > 0 - ): - self._failed_clients[client.server] = { - 'failed_time': time.time(), - 'attempts': 0, - } - # We aren't allowing any retries, we should mark the server as - # dead immediately - elif ( - client.server not in self._failed_clients and - self.retry_attempts <= 0 - ): - self._failed_clients[client.server] = { - 'failed_time': time.time(), - 'attempts': 0, - } - logger.debug("marking server as dead %s", client.server) - self.remove_server(*client.server) - # This client has failed previously, we need to update the metadata - # to reflect that we have attempted it again - else: - failed_metadata = self._failed_clients[client.server] - failed_metadata['attempts'] += 1 - failed_metadata['failed_time'] = time.time() - self._failed_clients[client.server] = failed_metadata - - # if we haven't enabled ignore_exc, don't move on gracefully, just - # raise the exception - if not self.ignore_exc: - raise - - return default_val - except: - # any exceptions that aren't socket.error we need to handle - # gracefully as well - if not self.ignore_exc: - raise - - return default_val - - def _run_cmd(self, cmd, key, default_val, *args, **kwargs): - client = self._get_client(key) - - if client is None: - return False - - func = getattr(client, cmd) - args = list(args) - args.insert(0, key) - return self._safely_run_func( - client, func, default_val, *args, **kwargs - ) - - def set(self, key, *args, **kwargs): - return self._run_cmd('set', key, False, *args, **kwargs) - - def get(self, key, *args, **kwargs): - return self._run_cmd('get', key, None, *args, **kwargs) - - def incr(self, key, *args, **kwargs): - return self._run_cmd('incr', key, False, *args, **kwargs) - - def decr(self, key, *args, **kwargs): - return self._run_cmd('decr', key, False, *args, **kwargs) - - def set_many(self, values, *args, **kwargs): - client_batches = {} - end = [] - - for key, value in values.items(): - client = self._get_client(key) - - if client is None: - end.append(False) - continue - - if client.server not in client_batches: - client_batches[client.server] = {} - - client_batches[client.server][key] = value - - for server, values in client_batches.items(): - client = self.clients['%s:%s' % server] - new_args = list(args) - new_args.insert(0, values) - result = self._safely_run_func( - client, - client.set_many, False, *new_args, **kwargs - ) - end.append(result) - - return all(end) - - set_multi = set_many - - def get_many(self, keys, *args, **kwargs): - client_batches = {} - end = {} - - for key in keys: - client = self._get_client(key) - - if client is None: - end[key] = False - continue - - if client.server not in client_batches: - client_batches[client.server] = [] - - client_batches[client.server].append(key) - - for server, keys in client_batches.items(): - client = self.clients['%s:%s' % server] - new_args = list(args) - new_args.insert(0, keys) - result = self._safely_run_func( - client, - client.get_many, {}, *new_args, **kwargs - ) - end.update(result) - - return end - - get_multi = get_many - - def gets(self, key, *args, **kwargs): - return self._run_cmd('gets', key, None, *args, **kwargs) - - def add(self, key, *args, **kwargs): - return self._run_cmd('add', key, False, *args, **kwargs) - - def prepend(self, key, *args, **kwargs): - return self._run_cmd('prepend', key, False, *args, **kwargs) - - def append(self, key, *args, **kwargs): - return self._run_cmd('append', key, False, *args, **kwargs) - - def delete(self, key, *args, **kwargs): - return self._run_cmd('delete', key, False, *args, **kwargs) - - def delete_many(self, keys, *args, **kwargs): - for key in keys: - self._run_cmd('delete', key, False, *args, **kwargs) - return True - - delete_multi = delete_many - - def cas(self, key, *args, **kwargs): - return self._run_cmd('cas', key, False, *args, **kwargs) - - def replace(self, key, *args, **kwargs): - return self._run_cmd('replace', key, False, *args, **kwargs) - - def flush_all(self): - for _, client in self.clients.items(): - self._safely_run_func(client, client.flush_all, False) diff --git a/server/www/packages/packages-common/pymemcache/client/murmur3.py b/server/www/packages/packages-common/pymemcache/client/murmur3.py deleted file mode 100644 index 787eeaf..0000000 --- a/server/www/packages/packages-common/pymemcache/client/murmur3.py +++ /dev/null @@ -1,51 +0,0 @@ -def murmur3_32(data, seed=0): - """MurmurHash3 was written by Austin Appleby, and is placed in the - public domain. The author hereby disclaims copyright to this source - code.""" - - c1 = 0xcc9e2d51 - c2 = 0x1b873593 - - length = len(data) - h1 = seed - roundedEnd = (length & 0xfffffffc) # round down to 4 byte block - for i in range(0, roundedEnd, 4): - # little endian load order - k1 = (ord(data[i]) & 0xff) | ((ord(data[i + 1]) & 0xff) << 8) | \ - ((ord(data[i + 2]) & 0xff) << 16) | (ord(data[i + 3]) << 24) - k1 *= c1 - k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15) - k1 *= c2 - - h1 ^= k1 - h1 = (h1 << 13) | ((h1 & 0xffffffff) >> 19) # ROTL32(h1,13) - h1 = h1 * 5 + 0xe6546b64 - - # tail - k1 = 0 - - val = length & 0x03 - if val == 3: - k1 = (ord(data[roundedEnd + 2]) & 0xff) << 16 - # fallthrough - if val in [2, 3]: - k1 |= (ord(data[roundedEnd + 1]) & 0xff) << 8 - # fallthrough - if val in [1, 2, 3]: - k1 |= ord(data[roundedEnd]) & 0xff - k1 *= c1 - k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15) - k1 *= c2 - h1 ^= k1 - - # finalization - h1 ^= length - - # fmix(h1) - h1 ^= ((h1 & 0xffffffff) >> 16) - h1 *= 0x85ebca6b - h1 ^= ((h1 & 0xffffffff) >> 13) - h1 *= 0xc2b2ae35 - h1 ^= ((h1 & 0xffffffff) >> 16) - - return h1 & 0xffffffff diff --git a/server/www/packages/packages-common/pymemcache/client/rendezvous.py b/server/www/packages/packages-common/pymemcache/client/rendezvous.py deleted file mode 100644 index 32ecc2b..0000000 --- a/server/www/packages/packages-common/pymemcache/client/rendezvous.py +++ /dev/null @@ -1,46 +0,0 @@ -from pymemcache.client.murmur3 import murmur3_32 - - -class RendezvousHash(object): - """ - Implements the Highest Random Weight (HRW) hashing algorithm most - commonly referred to as rendezvous hashing. - - Originally developed as part of python-clandestined. - - Copyright (c) 2014 Ernest W. Durbin III - """ - def __init__(self, nodes=None, seed=0, hash_function=murmur3_32): - """ - Constructor. - """ - self.nodes = [] - self.seed = seed - if nodes is not None: - self.nodes = nodes - self.hash_function = lambda x: hash_function(x, seed) - - def add_node(self, node): - if node not in self.nodes: - self.nodes.append(node) - - def remove_node(self, node): - if node in self.nodes: - self.nodes.remove(node) - else: - raise ValueError("No such node %s to remove" % (node)) - - def get_node(self, key): - high_score = -1 - winner = None - - for node in self.nodes: - score = self.hash_function( - "%s-%s" % (str(node), str(key))) - - if score > high_score: - (high_score, winner) = (score, node) - elif score == high_score: - (high_score, winner) = (score, max(str(node), str(winner))) - - return winner diff --git a/server/www/packages/packages-common/pymemcache/exceptions.py b/server/www/packages/packages-common/pymemcache/exceptions.py deleted file mode 100644 index 416fa0a..0000000 --- a/server/www/packages/packages-common/pymemcache/exceptions.py +++ /dev/null @@ -1,40 +0,0 @@ -class MemcacheError(Exception): - "Base exception class" - pass - - -class MemcacheClientError(MemcacheError): - """Raised when memcached fails to parse the arguments to a request, likely - due to a malformed key and/or value, a bug in this library, or a version - mismatch with memcached.""" - pass - - -class MemcacheUnknownCommandError(MemcacheClientError): - """Raised when memcached fails to parse a request, likely due to a bug in - this library or a version mismatch with memcached.""" - pass - - -class MemcacheIllegalInputError(MemcacheClientError): - """Raised when a key or value is not legal for Memcache (see the class docs - for Client for more details).""" - pass - - -class MemcacheServerError(MemcacheError): - """Raised when memcached reports a failure while processing a request, - likely due to a bug or transient issue in memcached.""" - pass - - -class MemcacheUnknownError(MemcacheError): - """Raised when this library receives a response from memcached that it - cannot parse, likely due to a bug in this library or a version mismatch - with memcached.""" - pass - - -class MemcacheUnexpectedCloseError(MemcacheServerError): - "Raised when the connection with memcached closes unexpectedly." - pass diff --git a/server/www/packages/packages-common/pymemcache/fallback.py b/server/www/packages/packages-common/pymemcache/fallback.py deleted file mode 100644 index d70d83c..0000000 --- a/server/www/packages/packages-common/pymemcache/fallback.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2012 Pinterest.com -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -A client for falling back to older memcached servers when performing reads. - -It is sometimes necessary to deploy memcached on new servers, or with a -different configuration. In theses cases, it is undesirable to start up an -empty memcached server and point traffic to it, since the cache will be cold, -and the backing store will have a large increase in traffic. - -This class attempts to solve that problem by providing an interface identical -to the Client interface, but which can fall back to older memcached servers -when reads to the primary server fail. The approach for upgrading memcached -servers or configuration then becomes: - - 1. Deploy a new host (or fleet) with memcached, possibly with a new - configuration. - 2. From your application servers, use FallbackClient to write and read from - the new cluster, and to read from the old cluster when there is a miss in - the new cluster. - 3. Wait until the new cache is warm enough to support the load. - 4. Switch from FallbackClient to a regular Client library for doing all - reads and writes to the new cluster. - 5. Take down the old cluster. - -Best Practices: ---------------- - - Make sure that the old client has "ignore_exc" set to True, so that it - treats failures like cache misses. That will allow you to take down the - old cluster before you switch away from FallbackClient. -""" - - -class FallbackClient(object): - def __init__(self, caches): - assert len(caches) > 0 - self.caches = caches - - def close(self): - "Close each of the memcached clients" - for cache in self.caches: - cache.close() - - def set(self, key, value, expire=0, noreply=True): - self.caches[0].set(key, value, expire, noreply) - - def add(self, key, value, expire=0, noreply=True): - self.caches[0].add(key, value, expire, noreply) - - def replace(self, key, value, expire=0, noreply=True): - self.caches[0].replace(key, value, expire, noreply) - - def append(self, key, value, expire=0, noreply=True): - self.caches[0].append(key, value, expire, noreply) - - def prepend(self, key, value, expire=0, noreply=True): - self.caches[0].prepend(key, value, expire, noreply) - - def cas(self, key, value, cas, expire=0, noreply=True): - self.caches[0].cas(key, value, cas, expire, noreply) - - def get(self, key): - for cache in self.caches: - result = cache.get(key) - if result is not None: - return result - return None - - def get_many(self, keys): - for cache in self.caches: - result = cache.get_many(keys) - if result: - return result - return [] - - def gets(self, key): - for cache in self.caches: - result = cache.gets(key) - if result is not None: - return result - return None - - def gets_many(self, keys): - for cache in self.caches: - result = cache.gets_many(keys) - if result: - return result - return [] - - def delete(self, key, noreply=True): - self.caches[0].delete(key, noreply) - - def incr(self, key, value, noreply=True): - self.caches[0].incr(key, value, noreply) - - def decr(self, key, value, noreply=True): - self.caches[0].decr(key, value, noreply) - - def touch(self, key, expire=0, noreply=True): - self.caches[0].touch(key, expire, noreply) - - def stats(self): - # TODO: ?? - pass - - def flush_all(self, delay=0, noreply=True): - self.caches[0].flush_all(delay, noreply) - - def quit(self): - # TODO: ?? - pass diff --git a/server/www/packages/packages-common/pymemcache/pool.py b/server/www/packages/packages-common/pymemcache/pool.py deleted file mode 100644 index f800f90..0000000 --- a/server/www/packages/packages-common/pymemcache/pool.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2015 Yahoo.com -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import contextlib -import sys -import threading - -import six - - -class ObjectPool(object): - """A pool of objects that release/creates/destroys as needed.""" - - def __init__(self, obj_creator, - after_remove=None, max_size=None, - lock_generator=None): - self._used_objs = collections.deque() - self._free_objs = collections.deque() - self._obj_creator = obj_creator - if lock_generator is None: - self._lock = threading.Lock() - else: - self._lock = lock_generator() - self._after_remove = after_remove - max_size = max_size or 2 ** 31 - if not isinstance(max_size, six.integer_types) or max_size < 0: - raise ValueError('"max_size" must be a positive integer') - self.max_size = max_size - - @property - def used(self): - return tuple(self._used_objs) - - @property - def free(self): - return tuple(self._free_objs) - - @contextlib.contextmanager - def get_and_release(self, destroy_on_fail=False): - obj = self.get() - try: - yield obj - except Exception: - exc_info = sys.exc_info() - if not destroy_on_fail: - self.release(obj) - else: - self.destroy(obj) - six.reraise(exc_info[0], exc_info[1], exc_info[2]) - self.release(obj) - - def get(self): - with self._lock: - if not self._free_objs: - curr_count = len(self._used_objs) - if curr_count >= self.max_size: - raise RuntimeError("Too many objects," - " %s >= %s" % (curr_count, - self.max_size)) - obj = self._obj_creator() - self._used_objs.append(obj) - return obj - else: - obj = self._free_objs.pop() - self._used_objs.append(obj) - return obj - - def destroy(self, obj, silent=True): - was_dropped = False - with self._lock: - try: - self._used_objs.remove(obj) - was_dropped = True - except ValueError: - if not silent: - raise - if was_dropped and self._after_remove is not None: - self._after_remove(obj) - - def release(self, obj, silent=True): - with self._lock: - try: - self._used_objs.remove(obj) - self._free_objs.append(obj) - except ValueError: - if not silent: - raise - - def clear(self): - if self._after_remove is not None: - needs_destroy = [] - with self._lock: - needs_destroy.extend(self._used_objs) - needs_destroy.extend(self._free_objs) - self._free_objs.clear() - self._used_objs.clear() - for obj in needs_destroy: - self._after_remove(obj) - else: - with self._lock: - self._free_objs.clear() - self._used_objs.clear() diff --git a/server/www/packages/packages-common/pymemcache/serde.py b/server/www/packages/packages-common/pymemcache/serde.py deleted file mode 100644 index c7a00ee..0000000 --- a/server/www/packages/packages-common/pymemcache/serde.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2012 Pinterest.com -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import pickle - -try: - from cStringIO import StringIO -except ImportError: - from StringIO import StringIO - - -FLAG_PICKLE = 1 << 0 -FLAG_INTEGER = 1 << 1 -FLAG_LONG = 1 << 2 - - -def python_memcache_serializer(key, value): - flags = 0 - - if isinstance(value, str): - pass - elif isinstance(value, int): - flags |= FLAG_INTEGER - value = "%d" % value - elif isinstance(value, long): - flags |= FLAG_LONG - value = "%d" % value - else: - flags |= FLAG_PICKLE - output = StringIO() - pickler = pickle.Pickler(output, 0) - pickler.dump(value) - value = output.getvalue() - - return value, flags - - -def python_memcache_deserializer(key, value, flags): - if flags == 0: - return value - - if flags & FLAG_INTEGER: - return int(value) - - if flags & FLAG_LONG: - return long(value) - - if flags & FLAG_PICKLE: - try: - buf = StringIO(value) - unpickler = pickle.Unpickler(buf) - return unpickler.load() - except Exception: - logging.info('Pickle error', exc_info=True) - return None - - return value diff --git a/server/www/packages/packages-common/pymysql/__init__.py b/server/www/packages/packages-common/pymysql/__init__.py index 2236ff1..43fb9a0 100644 --- a/server/www/packages/packages-common/pymysql/__init__.py +++ b/server/www/packages/packages-common/pymysql/__init__.py @@ -1,7 +1,7 @@ -''' +""" PyMySQL: A pure-Python MySQL client library. -Copyright (c) 2010, 2013 PyMySQL contributors +Copyright (c) 2010-2016 PyMySQL contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -20,30 +20,29 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -''' - -VERSION = (0, 6, 7, None) - -from ._compat import text_type, JYTHON, IRONPYTHON -from .constants import FIELD_TYPE -from .converters import escape_dict, escape_sequence, escape_string -from .err import Warning, Error, InterfaceError, DataError, \ - DatabaseError, OperationalError, IntegrityError, InternalError, \ - NotSupportedError, ProgrammingError, MySQLError -from .times import Date, Time, Timestamp, \ - DateFromTicks, TimeFromTicks, TimestampFromTicks - +""" import sys +from ._compat import PY2 +from .constants import FIELD_TYPE +from .converters import escape_dict, escape_sequence, escape_string +from .err import ( + Warning, Error, InterfaceError, DataError, + DatabaseError, OperationalError, IntegrityError, InternalError, + NotSupportedError, ProgrammingError, MySQLError) +from .times import ( + Date, Time, Timestamp, + DateFromTicks, TimeFromTicks, TimestampFromTicks) + +VERSION = (0, 7, 11, None) threadsafety = 1 apilevel = "2.0" -paramstyle = "format" +paramstyle = "pyformat" + class DBAPISet(frozenset): - def __ne__(self, other): if isinstance(other, set): return frozenset.__ne__(self, other) @@ -73,11 +72,14 @@ TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME]) DATETIME = TIMESTAMP ROWID = DBAPISet() + def Binary(x): """Return x as a binary type.""" - if isinstance(x, text_type) and not (JYTHON or IRONPYTHON): - return x.encode() - return bytes(x) + if PY2: + return bytearray(x) + else: + return bytes(x) + def Connect(*args, **kwargs): """ @@ -87,27 +89,26 @@ def Connect(*args, **kwargs): from .connections import Connection return Connection(*args, **kwargs) -from pymysql import connections as _orig_conn +from . import connections as _orig_conn if _orig_conn.Connection.__init__.__doc__ is not None: - Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ + (""" -See connections.Connection.__init__() for information about defaults. -""") + Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ del _orig_conn + def get_client_info(): # for MySQLdb compatibility return '.'.join(map(str, VERSION)) connect = Connection = Connect # we include a doctored version_info here for MySQLdb compatibility -version_info = (1,2,2,"final",0) +version_info = (1,2,6,"final",0) NULL = "NULL" __version__ = get_client_info() def thread_safe(): - return True # match MySQLdb.thread_safe() + return True # match MySQLdb.thread_safe() def install_as_MySQLdb(): """ @@ -116,6 +117,7 @@ def install_as_MySQLdb(): """ sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"] + __all__ = [ 'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date', 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks', @@ -128,6 +130,5 @@ __all__ = [ 'paramstyle', 'threadsafety', 'version_info', "install_as_MySQLdb", - - "NULL","__version__", - ] + "NULL", "__version__", +] diff --git a/server/www/packages/packages-common/pymysql/_compat.py b/server/www/packages/packages-common/pymysql/_compat.py index 0c55346..252789e 100644 --- a/server/www/packages/packages-common/pymysql/_compat.py +++ b/server/www/packages/packages-common/pymysql/_compat.py @@ -7,12 +7,15 @@ IRONPYTHON = sys.platform == 'cli' CPYTHON = not PYPY and not JYTHON and not IRONPYTHON if PY2: + import __builtin__ range_type = xrange text_type = unicode long_type = long str_type = basestring + unichr = __builtin__.unichr else: range_type = range text_type = str long_type = int str_type = str + unichr = chr diff --git a/server/www/packages/packages-common/pymysql/charset.py b/server/www/packages/packages-common/pymysql/charset.py index 1cf7d91..968376c 100644 --- a/server/www/packages/packages-common/pymysql/charset.py +++ b/server/www/packages/packages-common/pymysql/charset.py @@ -11,6 +11,10 @@ class Charset(object): self.id, self.name, self.collation = id, name, collation self.is_default = is_default == 'Yes' + def __repr__(self): + return "Charset(id=%s, name=%r, collation=%r)" % ( + self.id, self.name, self.collation) + @property def encoding(self): name = self.name @@ -249,6 +253,10 @@ _charsets.add(Charset(240, 'utf8mb4', 'utf8mb4_persian_ci', '')) _charsets.add(Charset(241, 'utf8mb4', 'utf8mb4_esperanto_ci', '')) _charsets.add(Charset(242, 'utf8mb4', 'utf8mb4_hungarian_ci', '')) _charsets.add(Charset(243, 'utf8mb4', 'utf8mb4_sinhala_ci', '')) +_charsets.add(Charset(244, 'utf8mb4', 'utf8mb4_german2_ci', '')) +_charsets.add(Charset(245, 'utf8mb4', 'utf8mb4_croatian_ci', '')) +_charsets.add(Charset(246, 'utf8mb4', 'utf8mb4_unicode_520_ci', '')) +_charsets.add(Charset(247, 'utf8mb4', 'utf8mb4_vietnamese_ci', '')) charset_by_name = _charsets.by_name diff --git a/server/www/packages/packages-common/pymysql/connections.py b/server/www/packages/packages-common/pymysql/connections.py index f6c06ce..31dd85a 100644 --- a/server/www/packages/packages-common/pymysql/connections.py +++ b/server/www/packages/packages-common/pymysql/connections.py @@ -17,9 +17,8 @@ import traceback import warnings from .charset import MBLENGTH, charset_by_name, charset_by_id -from .constants import CLIENT, COMMAND, FIELD_TYPE, SERVER_STATUS -from .converters import ( - escape_item, encoders, decoders, escape_string, through) +from .constants import CLIENT, COMMAND, CR, FIELD_TYPE, SERVER_STATUS +from .converters import escape_item, escape_string, through, conversions as _conv from .cursors import Cursor from .optionfile import Parser from .util import byte2int, int2byte @@ -36,7 +35,8 @@ try: import getpass DEFAULT_USER = getpass.getuser() del getpass -except ImportError: +except (ImportError, KeyError): + # KeyError occurs when there's no entry in OS database for a current user. DEFAULT_USER = None @@ -117,26 +117,24 @@ def dump_packet(data): # pragma: no cover try: print("packet length:", len(data)) - print("method call[1]:", sys._getframe(1).f_code.co_name) - print("method call[2]:", sys._getframe(2).f_code.co_name) - print("method call[3]:", sys._getframe(3).f_code.co_name) - print("method call[4]:", sys._getframe(4).f_code.co_name) - print("method call[5]:", sys._getframe(5).f_code.co_name) - print("-" * 88) + for i in range(1, 6): + f = sys._getframe(i) + print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno)) + print("-" * 66) except ValueError: pass dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)] for d in dump_data: print(' '.join(map(lambda x: "{:02X}".format(byte2int(x)), d)) + ' ' * (16 - len(d)) + ' ' * 2 + - ' '.join(map(lambda x: "{}".format(is_ascii(x)), d))) - print("-" * 88) + ''.join(map(lambda x: "{}".format(is_ascii(x)), d))) + print("-" * 66) print() def _scramble(password, message): if not password: - return b'\0' + return b'' if DEBUG: print('password=' + str(password)) stage1 = sha_new(password).digest() stage2 = sha_new(stage1).digest() @@ -149,7 +147,7 @@ def _scramble(password, message): def _my_crypt(message1, message2): length = len(message1) - result = struct.pack('B', length) + result = b'' for i in range_type(length): x = (struct.unpack('B', message1[i:i+1])[0] ^ struct.unpack('B', message2[i:i+1])[0]) @@ -196,7 +194,8 @@ def _hash_password_323(password): add = 7 nr2 = 0x12345671 - for c in [byte2int(x) for x in password if x not in (' ', '\t')]: + # x in py3 is numbers, p27 is chars + for c in [byte2int(x) for x in password if x not in (' ', '\t', 32, 9)]: nr ^= (((nr & 63) + add) * c) + (nr << 8) & 0xFFFFFFFF nr2 = (nr2 + ((nr2 << 8) ^ nr)) & 0xFFFFFFFF add = (add + c) & 0xFFFFFFFF @@ -209,6 +208,20 @@ def _hash_password_323(password): def pack_int24(n): return struct.pack('= 7 def is_eof_packet(self): # http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet # Caution: \xFE may be LengthEncodedInteger. # If \xFE is LengthEncodedInteger header, 8bytes followed. - return len(self._data) < 9 and self._data[0:1] == b'\xfe' + return self._data[0:1] == b'\xfe' and len(self._data) < 9 + + def is_auth_switch_request(self): + # http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest + return self._data[0:1] == b'\xfe' def is_resultset_packet(self): field_count = ord(self._data[0:1]) @@ -379,9 +405,9 @@ class FieldDescriptorPacket(MysqlPacket): def __init__(self, data, encoding): MysqlPacket.__init__(self, data, encoding) - self.__parse_field_descriptor(encoding) + self._parse_field_descriptor(encoding) - def __parse_field_descriptor(self, encoding): + def _parse_field_descriptor(self, encoding): """Parse the 'Field Descriptor' (Metadata) packet. This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0). @@ -494,20 +520,23 @@ class Connection(object): The proper way to get an instance of this class is to call connect(). - """ - socket = None + _sock = None + _auth_plugin_name = '' + _closed = False - def __init__(self, host="localhost", user=None, password="", - database=None, port=3306, unix_socket=None, + def __init__(self, host=None, user=None, password="", + database=None, port=0, unix_socket=None, charset='', sql_mode=None, - read_default_file=None, conv=decoders, use_unicode=None, + read_default_file=None, conv=None, use_unicode=None, client_flag=0, cursorclass=Cursor, init_command=None, - connect_timeout=None, ssl=None, read_default_group=None, + connect_timeout=10, ssl=None, read_default_group=None, compress=None, named_pipe=None, no_delay=None, autocommit=False, db=None, passwd=None, local_infile=False, - max_allowed_packet=16*1024*1024, defer_connect=False): + max_allowed_packet=16*1024*1024, defer_connect=False, + auth_plugin_map={}, read_timeout=None, write_timeout=None, + bind_address=None): """ Establish a connection to the MySQL database. Accepts several arguments: @@ -516,15 +545,19 @@ class Connection(object): user: Username to log in as password: Password to use. database: Database to use, None to not use a particular one. - port: MySQL port to use, default is usually OK. + port: MySQL port to use, default is usually OK. (default: 3306) + bind_address: When the client has multiple network interfaces, specify + the interface from which to connect to the host. Argument can be + a hostname or an IP address. unix_socket: Optionally, you can use a unix socket rather than TCP/IP. charset: Charset you want to use. sql_mode: Default SQL_MODE to use. read_default_file: Specifies my.cnf file to read these parameters from under the [client] section. conv: - Decoders dictionary to use instead of the default one. - This is used to provide custom marshalling of types. See converters. + Conversion dictionary to use instead of the default one. + This is used to provide custom marshalling and unmarshaling of types. + See converters. use_unicode: Whether or not to default to unicode strings. This option defaults to true for Py3k. @@ -532,27 +565,29 @@ class Connection(object): cursorclass: Custom cursor class to use. init_command: Initial SQL statement to run when connection is established. connect_timeout: Timeout before throwing an exception when connecting. + (default: 10, min: 1, max: 31536000) ssl: A dict of arguments similar to mysql_ssl_set()'s parameters. For now the capath and cipher arguments are not supported. read_default_group: Group to read from in the configuration file. compress; Not supported named_pipe: Not supported - no_delay: Disable Nagle's algorithm on the socket. (deprecated, default: True) autocommit: Autocommit mode. None means use server default. (default: False) local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False) max_allowed_packet: Max size of packet sent to server in bytes. (default: 16MB) + Only used to limit size of "LOAD LOCAL INFILE" data packet smaller than default (16KB). defer_connect: Don't explicitly connect on contruction - wait for connect call. (default: False) - + auth_plugin_map: A dict of plugin names to a class that processes that plugin. + The class will take the Connection object as the argument to the constructor. + The class needs an authenticate method taking an authentication packet as + an argument. For the dialog plugin, a prompt(echo, prompt) method can be used + (if no authenticate method) for returning a string from the user. (experimental) db: Alias for database. (for compatibility to MySQLdb) passwd: Alias for password. (for compatibility to MySQLdb) """ if no_delay is not None: warnings.warn("no_delay option is deprecated", DeprecationWarning) - no_delay = bool(no_delay) - else: - no_delay = True if use_unicode is None and sys.version_info[0] > 2: use_unicode = True @@ -565,24 +600,10 @@ class Connection(object): if compress or named_pipe: raise NotImplementedError("compress and named_pipe arguments are not supported") - if local_infile: + self._local_infile = bool(local_infile) + if self._local_infile: client_flag |= CLIENT.LOCAL_FILES - if ssl and ('capath' in ssl or 'cipher' in ssl): - raise NotImplementedError('ssl options capath and cipher are not supported') - - self.ssl = False - if ssl: - if not SSL_ENABLED: - raise NotImplementedError("ssl module not found") - self.ssl = True - client_flag |= CLIENT.SSL - for k in ('key', 'cert', 'ca'): - v = None - if k in ssl: - v = ssl[k] - setattr(self, k, v) - if read_default_group and not read_default_file: if sys.platform.startswith("win"): read_default_file = "c:\\my.ini" @@ -610,15 +631,40 @@ class Connection(object): database = _config("database", database) unix_socket = _config("socket", unix_socket) port = int(_config("port", port)) + bind_address = _config("bind-address", bind_address) charset = _config("default-character-set", charset) + if not ssl: + ssl = {} + if isinstance(ssl, dict): + for key in ["ca", "capath", "cert", "key", "cipher"]: + value = _config("ssl-" + key, ssl.get(key)) + if value: + ssl[key] = value - self.host = host - self.port = port + self.ssl = False + if ssl: + if not SSL_ENABLED: + raise NotImplementedError("ssl module not found") + self.ssl = True + client_flag |= CLIENT.SSL + self.ctx = self._create_ssl_ctx(ssl) + + self.host = host or "localhost" + self.port = port or 3306 self.user = user or DEFAULT_USER self.password = password or "" self.db = database - self.no_delay = no_delay self.unix_socket = unix_socket + self.bind_address = bind_address + if not (0 < connect_timeout <= 31536000): + raise ValueError("connect_timeout should be >0 and <=31536000") + self.connect_timeout = connect_timeout or None + if read_timeout is not None and read_timeout <= 0: + raise ValueError("read_timeout should be >= 0") + self._read_timeout = read_timeout + if write_timeout is not None and write_timeout <= 0: + raise ValueError("write_timeout should be >= 0") + self._write_timeout = write_timeout if charset: self.charset = charset self.use_unicode = True @@ -631,13 +677,12 @@ class Connection(object): self.encoding = charset_by_name(self.charset).encoding - client_flag |= CLIENT.CAPABILITIES | CLIENT.MULTI_STATEMENTS + client_flag |= CLIENT.CAPABILITIES if self.db: client_flag |= CLIENT.CONNECT_WITH_DB self.client_flag = client_flag self.cursorclass = cursorclass - self.connect_timeout = connect_timeout self._result = None self._affected_rows = 0 @@ -646,44 +691,68 @@ class Connection(object): #: specified autocommit mode. None means use server default. self.autocommit_mode = autocommit - self.encoders = encoders # Need for MySQLdb compatibility. - self.decoders = conv + if conv is None: + conv = _conv + # Need for MySQLdb compatibility. + self.encoders = dict([(k, v) for (k, v) in conv.items() if type(k) is not int]) + self.decoders = dict([(k, v) for (k, v) in conv.items() if type(k) is int]) self.sql_mode = sql_mode self.init_command = init_command self.max_allowed_packet = max_allowed_packet + self._auth_plugin_map = auth_plugin_map if defer_connect: - self.socket = None + self._sock = None else: self.connect() + def _create_ssl_ctx(self, sslp): + if isinstance(sslp, ssl.SSLContext): + return sslp + ca = sslp.get('ca') + capath = sslp.get('capath') + hasnoca = ca is None and capath is None + ctx = ssl.create_default_context(cafile=ca, capath=capath) + ctx.check_hostname = not hasnoca and sslp.get('check_hostname', True) + ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED + if 'cert' in sslp: + ctx.load_cert_chain(sslp['cert'], keyfile=sslp.get('key')) + if 'cipher' in sslp: + ctx.set_ciphers(sslp['cipher']) + ctx.options |= ssl.OP_NO_SSLv2 + ctx.options |= ssl.OP_NO_SSLv3 + return ctx + def close(self): """Send the quit message and close the socket""" - if self.socket is None: + if self._closed: raise err.Error("Already closed") + self._closed = True + if self._sock is None: + return send_data = struct.pack('= 5: self.client_flag |= CLIENT.MULTI_RESULTS @@ -1000,48 +1118,114 @@ class Connection(object): data_init = struct.pack('=5.0) + data += authresp + b'\0' - if self.db: + if self.db and self.server_capabilities & CLIENT.CONNECT_WITH_DB: if isinstance(self.db, text_type): self.db = self.db.encode(self.encoding) - data += self.db + int2byte(0) + data += self.db + b'\0' - data = pack_int24(len(data)) + int2byte(next_packet) + data - next_packet += 2 - - if DEBUG: dump_packet(data) - self._write_bytes(data) + if self.server_capabilities & CLIENT.PLUGIN_AUTH: + name = self._auth_plugin_name + if isinstance(name, text_type): + name = name.encode('ascii') + data += name + b'\0' + self.write_packet(data) auth_packet = self._read_packet() - # if old_passwords is enabled the packet will be 1 byte long and - # have the octet 254 + # if authentication method isn't accepted the first byte + # will have the octet 254 + if auth_packet.is_auth_switch_request(): + # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest + auth_packet.read_uint8() # 0xfe packet identifier + plugin_name = auth_packet.read_string() + if self.server_capabilities & CLIENT.PLUGIN_AUTH and plugin_name is not None: + auth_packet = self._process_auth(plugin_name, auth_packet) + else: + # send legacy handshake + data = _scramble_323(self.password.encode('latin1'), self.salt) + b'\0' + self.write_packet(data) + auth_packet = self._read_packet() - if auth_packet.is_eof_packet(): - # send legacy handshake - data = _scramble_323(self.password.encode('latin1'), self.salt) + b'\0' - data = pack_int24(len(data)) + int2byte(next_packet) + data - self._write_bytes(data) - auth_packet = self._read_packet() + def _process_auth(self, plugin_name, auth_packet): + plugin_class = self._auth_plugin_map.get(plugin_name) + if not plugin_class: + plugin_class = self._auth_plugin_map.get(plugin_name.decode('ascii')) + if plugin_class: + try: + handler = plugin_class(self) + return handler.authenticate(auth_packet) + except AttributeError: + if plugin_name != b'dialog': + raise err.OperationalError(2059, "Authentication plugin '%s'" \ + " not loaded: - %r missing authenticate method" % (plugin_name, plugin_class)) + except TypeError: + raise err.OperationalError(2059, "Authentication plugin '%s'" \ + " not loaded: - %r cannot be constructed with connection object" % (plugin_name, plugin_class)) + else: + handler = None + if plugin_name == b"mysql_native_password": + # https://dev.mysql.com/doc/internals/en/secure-password-authentication.html#packet-Authentication::Native41 + data = _scramble(self.password.encode('latin1'), auth_packet.read_all()) + b'\0' + elif plugin_name == b"mysql_old_password": + # https://dev.mysql.com/doc/internals/en/old-password-authentication.html + data = _scramble_323(self.password.encode('latin1'), auth_packet.read_all()) + b'\0' + elif plugin_name == b"mysql_clear_password": + # https://dev.mysql.com/doc/internals/en/clear-text-authentication.html + data = self.password.encode('latin1') + b'\0' + elif plugin_name == b"dialog": + pkt = auth_packet + while True: + flag = pkt.read_uint8() + echo = (flag & 0x06) == 0x02 + last = (flag & 0x01) == 0x01 + prompt = pkt.read_all() + + if prompt == b"Password: ": + self.write_packet(self.password.encode('latin1') + b'\0') + elif handler: + resp = 'no response - TypeError within plugin.prompt method' + try: + resp = handler.prompt(echo, prompt) + self.write_packet(resp + b'\0') + except AttributeError: + raise err.OperationalError(2059, "Authentication plugin '%s'" \ + " not loaded: - %r missing prompt method" % (plugin_name, handler)) + except TypeError: + raise err.OperationalError(2061, "Authentication plugin '%s'" \ + " %r didn't respond with string. Returned '%r' to prompt %r" % (plugin_name, handler, resp, prompt)) + else: + raise err.OperationalError(2059, "Authentication plugin '%s' (%r) not configured" % (plugin_name, handler)) + pkt = self._read_packet() + pkt.check_error() + if pkt.is_ok_packet() or last: + break + return pkt + else: + raise err.OperationalError(2059, "Authentication plugin '%s' not configured" % plugin_name) + + self.write_packet(data) + pkt = self._read_packet() + pkt.check_error() + return pkt # _mysql support def thread_id(self): @@ -1065,7 +1249,7 @@ class Connection(object): self.protocol_version = byte2int(data[i:i+1]) i += 1 - server_end = data.find(int2byte(0), i) + server_end = data.find(b'\0', i) self.server_version = data[i:server_end].decode('latin1') i = server_end + 1 @@ -1097,7 +1281,22 @@ class Connection(object): if len(data) >= i + salt_len: # salt_len includes auth_plugin_data_part_1 and filler self.salt += data[i:i+salt_len] - # TODO: AUTH PLUGIN NAME may appeare here. + i += salt_len + + i+=1 + # AUTH PLUGIN NAME may appear here. + if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i: + # Due to Bug#59453 the auth-plugin-name is missing the terminating + # NUL-char in versions prior to 5.5.10 and 5.6.2. + # ref: https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::Handshake + # didn't use version checks as mariadb is corrected and reports + # earlier than those two. + server_end = data.find(b'\0', i) + if server_end < 0: # pragma: no cover - very specific upstream bug + # not found \0 and last field so take it all + self._auth_plugin_name = data[i:].decode('latin1') + else: + self._auth_plugin_name = data[i:server_end].decode('latin1') def get_server_info(self): return self.server_version @@ -1117,6 +1316,9 @@ class Connection(object): class MySQLResult(object): def __init__(self, connection): + """ + :type connection: Connection + """ self.connection = connection self.affected_rows = None self.insert_id = None @@ -1144,7 +1346,7 @@ class MySQLResult(object): else: self._read_result_packet(first_packet) finally: - self.connection = False + self.connection = None def init_unbuffered_query(self): self.unbuffered_active = True @@ -1154,6 +1356,10 @@ class MySQLResult(object): self._read_ok_packet(first_packet) self.unbuffered_active = False self.connection = None + elif first_packet.is_load_local_packet(): + self._read_load_local_packet(first_packet) + self.unbuffered_active = False + self.connection = None else: self.field_count = first_packet.read_length_encoded_integer() self._get_descriptions() @@ -1173,22 +1379,33 @@ class MySQLResult(object): self.has_next = ok_packet.has_next def _read_load_local_packet(self, first_packet): + if not self.connection._local_infile: + raise RuntimeError( + "**WARN**: Received LOAD_LOCAL packet but local_infile option is false.") load_packet = LoadLocalPacketWrapper(first_packet) sender = LoadLocalFile(load_packet.filename, self.connection) - sender.send_data() + try: + sender.send_data() + except: + self.connection._read_packet() # skip ok packet + raise ok_packet = self.connection._read_packet() - if not ok_packet.is_ok_packet(): + if not ok_packet.is_ok_packet(): # pragma: no cover - upstream induced protocol error raise err.OperationalError(2014, "Commands Out of Sync") self._read_ok_packet(ok_packet) def _check_packet_is_eof(self, packet): - if packet.is_eof_packet(): - eof_packet = EOFPacketWrapper(packet) - self.warning_count = eof_packet.warning_count - self.has_next = eof_packet.has_next - return True - return False + if not packet.is_eof_packet(): + return False + #TODO: Support CLIENT.DEPRECATE_EOF + # 1) Add DEPRECATE_EOF to CAPABILITIES + # 2) Mask CAPABILITIES with server_capabilities + # 3) if server_capabilities & CLIENT.DEPRECATE_EOF: use OKPacketWrapper instead of EOFPacketWrapper + wp = EOFPacketWrapper(packet) + self.warning_count = wp.warning_count + self.has_next = wp.has_next + return True def _read_result_packet(self, first_packet): self.field_count = first_packet.read_length_encoded_integer() @@ -1239,7 +1456,12 @@ class MySQLResult(object): def _read_row_from_packet(self, packet): row = [] for encoding, converter in self.converters: - data = packet.read_length_coded_string() + try: + data = packet.read_length_coded_string() + except IndexError: + # No more columns in this row + # See https://github.com/PyMySQL/PyMySQL/pull/434 + break if data is not None: if encoding is not None: data = data.decode(encoding) @@ -1254,21 +1476,30 @@ class MySQLResult(object): self.fields = [] self.converters = [] use_unicode = self.connection.use_unicode + conn_encoding = self.connection.encoding description = [] + for i in range_type(self.field_count): field = self.connection._read_packet(FieldDescriptorPacket) self.fields.append(field) description.append(field.description()) field_type = field.type_code if use_unicode: - if field_type in TEXT_TYPES: - charset = charset_by_id(field.charsetnr) - if charset.is_binary: + if field_type == FIELD_TYPE.JSON: + # When SELECT from JSON column: charset = binary + # When SELECT CAST(... AS JSON): charset = connection encoding + # This behavior is different from TEXT / BLOB. + # We should decode result by connection encoding regardless charsetnr. + # See https://github.com/PyMySQL/PyMySQL/issues/488 + encoding = conn_encoding # SELECT CAST(... AS JSON) + elif field_type in TEXT_TYPES: + if field.charsetnr == 63: # binary # TEXTs with charset=binary means BINARY types. encoding = None else: - encoding = charset.encoding + encoding = conn_encoding else: + # Integers, Dates and Times, and other basic data is encoded in ascii encoding = 'ascii' else: encoding = None @@ -1290,28 +1521,20 @@ class LoadLocalFile(object): def send_data(self): """Send data packets from the local file to the server""" - if not self.connection.socket: + if not self.connection._sock: raise err.InterfaceError("(0, '')") + conn = self.connection - # sequence id is 2 as we already sent a query packet - seq_id = 2 try: with open(self.filename, 'rb') as open_file: - chunk_size = self.connection.max_allowed_packet - packet = b"" - + packet_size = min(conn.max_allowed_packet, 16*1024) # 16KB is efficient enough while True: - chunk = open_file.read(chunk_size) + chunk = open_file.read(packet_size) if not chunk: break - packet = struct.pack('Q", b)[0] -# +# # the snippet above is right, but MySQLdb doesn't process bits, # so we shouldn't either convert_bit = through @@ -309,7 +371,9 @@ encoders = { tuple: escape_sequence, list: escape_sequence, set: escape_sequence, + frozenset: escape_sequence, dict: escape_dict, + bytearray: escape_bytes, type(None): escape_None, datetime.date: escape_date, datetime.datetime: escape_datetime, @@ -350,7 +414,6 @@ decoders = { # for MySQLdb compatibility -conversions = decoders - -def Thing2Literal(obj): - return escape_str(str(obj)) +conversions = encoders.copy() +conversions.update(decoders) +Thing2Literal = escape_str diff --git a/server/www/packages/packages-common/pymysql/cursors.py b/server/www/packages/packages-common/pymysql/cursors.py index 266e137..dc3ce1e 100644 --- a/server/www/packages/packages-common/pymysql/cursors.py +++ b/server/www/packages/packages-common/pymysql/cursors.py @@ -5,33 +5,37 @@ import re import warnings from ._compat import range_type, text_type, PY2 - from . import err #: Regular expression for :meth:`Cursor.executemany`. #: executemany only suports simple bulk insert. #: You can use it to load large dataset. -RE_INSERT_VALUES = re.compile(r"""(INSERT\s.+\sVALUES\s+)(\(\s*%s\s*(?:,\s*%s\s*)*\))(\s*(?:ON DUPLICATE.*)?)\Z""", - re.IGNORECASE | re.DOTALL) +RE_INSERT_VALUES = re.compile( + r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" + + r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" + + r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z", + re.IGNORECASE | re.DOTALL) class Cursor(object): - ''' + """ This is the object you use to interact with the database. - ''' + """ - #: Max stetement size which :meth:`executemany` generates. + #: Max statement size which :meth:`executemany` generates. #: #: Max size of allowed statement is max_allowed_packet - packet_header_size. #: Default value of max_allowed_packet is 1048576. max_stmt_length = 1024000 + _defer_warnings = False + def __init__(self, connection): - ''' + """ Do not create an instance of a Cursor yourself. Call connections.Connection.cursor(). - ''' + """ self.connection = connection self.description = None self.rownumber = 0 @@ -40,11 +44,12 @@ class Cursor(object): self._executed = None self._result = None self._rows = None + self._warnings_handled = False def close(self): - ''' + """ Closing a cursor just exhausts all remaining data. - ''' + """ conn = self.connection if conn is None: return @@ -83,6 +88,9 @@ class Cursor(object): """Get the next query set""" conn = self._get_db() current_result = self._result + # for unbuffered queries warnings are only available once whole result has been read + if unbuffered: + self._show_warnings() if current_result is None or current_result is not conn._result: return None if not current_result.has_next: @@ -107,17 +115,17 @@ class Cursor(object): if isinstance(args, (tuple, list)): if PY2: args = tuple(map(ensure_bytes, args)) - return tuple(conn.escape(arg) for arg in args) + return tuple(conn.literal(arg) for arg in args) elif isinstance(args, dict): if PY2: args = dict((ensure_bytes(key), ensure_bytes(val)) for (key, val) in args.items()) - return dict((key, conn.escape(val)) for (key, val) in args.items()) + return dict((key, conn.literal(val)) for (key, val) in args.items()) else: # If it's not a dictionary let's try escaping it anyways. # Worst case it will throw a Value error if PY2: - ensure_bytes(args) + args = ensure_bytes(args) return conn.escape(args) def mogrify(self, query, args=None): @@ -137,7 +145,19 @@ class Cursor(object): return query def execute(self, query, args=None): - '''Execute a query''' + """Execute a query + + :param str query: Query to execute. + + :param args: parameters used with query. (optional) + :type args: tuple, list or dict + + :return: Number of affected rows + :rtype: int + + If args is a list or tuple, %s can be used as a placeholder in the query. + If args is a dict, %(name)s can be used as a placeholder in the query. + """ while self.nextset(): pass @@ -148,17 +168,23 @@ class Cursor(object): return result def executemany(self, query, args): + # type: (str, list) -> int """Run several data against one query - PyMySQL can execute bulkinsert for query like 'INSERT ... VALUES (%s)'. - In other form of queries, just run :meth:`execute` many times. + :param query: query to execute on server + :param args: Sequence of sequences or mappings. It is used as parameter. + :return: Number of rows affected, if any. + + This method improves performance on multiple-row INSERT and + REPLACE. Otherwise it is equivalent to looping over args with + execute(). """ if not args: return m = RE_INSERT_VALUES.match(query) if m: - q_prefix = m.group(1) + q_prefix = m.group(1) % () q_values = m.group(2).rstrip() q_postfix = m.group(3) or '' assert q_values[0] == '(' and q_values[-1] == ')' @@ -247,7 +273,7 @@ class Cursor(object): return args def fetchone(self): - ''' Fetch the next row ''' + """Fetch the next row""" self._check_executed() if self._rows is None or self.rownumber >= len(self._rows): return None @@ -256,7 +282,7 @@ class Cursor(object): return result def fetchmany(self, size=None): - ''' Fetch several rows ''' + """Fetch several rows""" self._check_executed() if self._rows is None: return () @@ -266,7 +292,7 @@ class Cursor(object): return result def fetchall(self): - ''' Fetch all the rows ''' + """Fetch all the rows""" self._check_executed() if self._rows is None: return () @@ -307,14 +333,18 @@ class Cursor(object): self.description = result.description self.lastrowid = result.insert_id self._rows = result.rows + self._warnings_handled = False - if result.warning_count > 0: - self._show_warnings(conn) + if not self._defer_warnings: + self._show_warnings() - def _show_warnings(self, conn): - if self._result and self._result.has_next: + def _show_warnings(self): + if self._warnings_handled: return - ws = conn.show_warnings() + self._warnings_handled = True + if self._result and (self._result.has_next or not self._result.warning_count): + return + ws = self._get_db().show_warnings() if ws is None: return for w in ws: @@ -322,7 +352,7 @@ class Cursor(object): if PY2: if isinstance(msg, unicode): msg = msg.encode('utf-8', 'replace') - warnings.warn(str(msg), err.Warning, 4) + warnings.warn(err.Warning(*w[1:3]), stacklevel=4) def __iter__(self): return iter(self.fetchone, None) @@ -373,8 +403,8 @@ class SSCursor(Cursor): or for connections to remote servers over a slow network. Instead of copying every row of data into a buffer, this will fetch - rows as needed. The upside of this, is the client uses much less memory, - and rows are returned much faster when traveling over a slow network, + rows as needed. The upside of this is the client uses much less memory, + and rows are returned much faster when traveling over a slow network or if the result set is very big. There are limitations, though. The MySQL protocol doesn't support @@ -383,6 +413,8 @@ class SSCursor(Cursor): possible to scroll backwards, as only the current row is held in memory. """ + _defer_warnings = True + def _conv_row(self, row): return row @@ -411,14 +443,15 @@ class SSCursor(Cursor): return self._nextset(unbuffered=True) def read_next(self): - """ Read next row """ + """Read next row""" return self._conv_row(self._result._read_rowdata_packet_unbuffered()) def fetchone(self): - """ Fetch next row """ + """Fetch next row""" self._check_executed() row = self.read_next() if row is None: + self._show_warnings() return None self.rownumber += 1 return row @@ -443,7 +476,7 @@ class SSCursor(Cursor): return self.fetchall_unbuffered() def fetchmany(self, size=None): - """ Fetch many """ + """Fetch many""" self._check_executed() if size is None: size = self.arraysize @@ -452,6 +485,7 @@ class SSCursor(Cursor): for i in range_type(size): row = self.read_next() if row is None: + self._show_warnings() break rows.append(row) self.rownumber += 1 @@ -482,4 +516,4 @@ class SSCursor(Cursor): class SSDictCursor(DictCursorMixin, SSCursor): - """ An unbuffered cursor, which returns results as a dictionary """ + """An unbuffered cursor, which returns results as a dictionary""" diff --git a/server/www/packages/packages-common/pymysql/err.py b/server/www/packages/packages-common/pymysql/err.py index 9b6f24e..2486263 100644 --- a/server/www/packages/packages-common/pymysql/err.py +++ b/server/www/packages/packages-common/pymysql/err.py @@ -68,10 +68,12 @@ class NotSupportedError(DatabaseError): error_map = {} + def _map_error(exc, *errors): for error in errors: error_map[error] = exc + _map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR, ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME, ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE, @@ -89,32 +91,17 @@ _map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR, ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR, ER.COLUMNACCESS_DENIED_ERROR) + del _map_error, ER -def _get_error_info(data): +def raise_mysql_exception(data): errno = struct.unpack(' -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""RSA module - -Module for calculating large primes, and RSA encryption, decryption, signing -and verification. Includes generating public and private keys. - -WARNING: this implementation does not use random padding, compression of the -cleartext input to prevent repetitions, or other common security improvements. -Use with care. - -If you want to have a more secure implementation, use the functions from the -``rsa.pkcs1`` module. - -""" - -__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly" -__date__ = "2015-07-29" -__version__ = '3.2' - -from rsa.key import newkeys, PrivateKey, PublicKey -from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \ - VerificationError - -# Do doctest if we're run directly -if __name__ == "__main__": - import doctest - doctest.testmod() - -__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey', - 'PrivateKey', 'DecryptionError', 'VerificationError'] - diff --git a/server/www/packages/packages-common/rsa/_compat.py b/server/www/packages/packages-common/rsa/_compat.py deleted file mode 100644 index 3c4eb81..0000000 --- a/server/www/packages/packages-common/rsa/_compat.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Python compatibility wrappers.""" - - -from __future__ import absolute_import - -import sys -from struct import pack - -try: - MAX_INT = sys.maxsize -except AttributeError: - MAX_INT = sys.maxint - -MAX_INT64 = (1 << 63) - 1 -MAX_INT32 = (1 << 31) - 1 -MAX_INT16 = (1 << 15) - 1 - -# Determine the word size of the processor. -if MAX_INT == MAX_INT64: - # 64-bit processor. - MACHINE_WORD_SIZE = 64 -elif MAX_INT == MAX_INT32: - # 32-bit processor. - MACHINE_WORD_SIZE = 32 -else: - # Else we just assume 64-bit processor keeping up with modern times. - MACHINE_WORD_SIZE = 64 - - -try: - # < Python3 - unicode_type = unicode - have_python3 = False -except NameError: - # Python3. - unicode_type = str - have_python3 = True - -# Fake byte literals. -if str is unicode_type: - def byte_literal(s): - return s.encode('latin1') -else: - def byte_literal(s): - return s - -# ``long`` is no more. Do type detection using this instead. -try: - integer_types = (int, long) -except NameError: - integer_types = (int,) - -b = byte_literal - -try: - # Python 2.6 or higher. - bytes_type = bytes -except NameError: - # Python 2.5 - bytes_type = str - - -# To avoid calling b() multiple times in tight loops. -ZERO_BYTE = b('\x00') -EMPTY_BYTE = b('') - - -def is_bytes(obj): - """ - Determines whether the given value is a byte string. - - :param obj: - The value to test. - :returns: - ``True`` if ``value`` is a byte string; ``False`` otherwise. - """ - return isinstance(obj, bytes_type) - - -def is_integer(obj): - """ - Determines whether the given value is an integer. - - :param obj: - The value to test. - :returns: - ``True`` if ``value`` is an integer; ``False`` otherwise. - """ - return isinstance(obj, integer_types) - - -def byte(num): - """ - Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) - representation. - - Use it as a replacement for ``chr`` where you are expecting a byte - because this will work on all current versions of Python:: - - :param num: - An unsigned integer between 0 and 255 (both inclusive). - :returns: - A single byte. - """ - return pack("B", num) - - -def get_word_alignment(num, force_arch=64, - _machine_word_size=MACHINE_WORD_SIZE): - """ - Returns alignment details for the given number based on the platform - Python is running on. - - :param num: - Unsigned integral number. - :param force_arch: - If you don't want to use 64-bit unsigned chunks, set this to - anything other than 64. 32-bit chunks will be preferred then. - Default 64 will be used when on a 64-bit machine. - :param _machine_word_size: - (Internal) The machine word size used for alignment. - :returns: - 4-tuple:: - - (word_bits, word_bytes, - max_uint, packing_format_type) - """ - max_uint64 = 0xffffffffffffffff - max_uint32 = 0xffffffff - max_uint16 = 0xffff - max_uint8 = 0xff - - if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32: - # 64-bit unsigned integer. - return 64, 8, max_uint64, "Q" - elif num > max_uint16: - # 32-bit unsigned integer - return 32, 4, max_uint32, "L" - elif num > max_uint8: - # 16-bit unsigned integer. - return 16, 2, max_uint16, "H" - else: - # 8-bit unsigned integer. - return 8, 1, max_uint8, "B" diff --git a/server/www/packages/packages-common/rsa/_version133.py b/server/www/packages/packages-common/rsa/_version133.py deleted file mode 100644 index 230a03c..0000000 --- a/server/www/packages/packages-common/rsa/_version133.py +++ /dev/null @@ -1,442 +0,0 @@ -"""RSA module -pri = k[1] //Private part of keys d,p,q - -Module for calculating large primes, and RSA encryption, decryption, -signing and verification. Includes generating public and private keys. - -WARNING: this code implements the mathematics of RSA. It is not suitable for -real-world secure cryptography purposes. It has not been reviewed by a security -expert. It does not include padding of data. There are many ways in which the -output of this module, when used without any modification, can be sucessfully -attacked. -""" - -__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer" -__date__ = "2010-02-05" -__version__ = '1.3.3' - -# NOTE: Python's modulo can return negative numbers. We compensate for -# this behaviour using the abs() function - -from cPickle import dumps, loads -import base64 -import math -import os -import random -import sys -import types -import zlib - -from rsa._compat import byte - -# Display a warning that this insecure version is imported. -import warnings -warnings.warn('Insecure version of the RSA module is imported as %s, be careful' - % __name__) - -def gcd(p, q): - """Returns the greatest common divisor of p and q - - - >>> gcd(42, 6) - 6 - """ - if p>> (128*256 + 64)*256 + + 15 - 8405007 - >>> l = [128, 64, 15] - >>> bytes2int(l) - 8405007 - """ - - if not (type(bytes) is types.ListType or type(bytes) is types.StringType): - raise TypeError("You must pass a string or a list") - - # Convert byte stream to integer - integer = 0 - for byte in bytes: - integer *= 256 - if type(byte) is types.StringType: byte = ord(byte) - integer += byte - - return integer - -def int2bytes(number): - """Converts a number to a string of bytes - - >>> bytes2int(int2bytes(123456789)) - 123456789 - """ - - if not (type(number) is types.LongType or type(number) is types.IntType): - raise TypeError("You must pass a long or an int") - - string = "" - - while number > 0: - string = "%s%s" % (byte(number & 0xFF), string) - number /= 256 - - return string - -def fast_exponentiation(a, p, n): - """Calculates r = a^p mod n - """ - result = a % n - remainders = [] - while p != 1: - remainders.append(p & 1) - p = p >> 1 - while remainders: - rem = remainders.pop() - result = ((a ** rem) * result ** 2) % n - return result - -def read_random_int(nbits): - """Reads a random integer of approximately nbits bits rounded up - to whole bytes""" - - nbytes = ceil(nbits/8.) - randomdata = os.urandom(nbytes) - return bytes2int(randomdata) - -def ceil(x): - """ceil(x) -> int(math.ceil(x))""" - - return int(math.ceil(x)) - -def randint(minvalue, maxvalue): - """Returns a random integer x with minvalue <= x <= maxvalue""" - - # Safety - get a lot of random data even if the range is fairly - # small - min_nbits = 32 - - # The range of the random numbers we need to generate - range = maxvalue - minvalue - - # Which is this number of bytes - rangebytes = ceil(math.log(range, 2) / 8.) - - # Convert to bits, but make sure it's always at least min_nbits*2 - rangebits = max(rangebytes * 8, min_nbits * 2) - - # Take a random number of bits between min_nbits and rangebits - nbits = random.randint(min_nbits, rangebits) - - return (read_random_int(nbits) % range) + minvalue - -def fermat_little_theorem(p): - """Returns 1 if p may be prime, and something else if p definitely - is not prime""" - - a = randint(1, p-1) - return fast_exponentiation(a, p-1, p) - -def jacobi(a, b): - """Calculates the value of the Jacobi symbol (a/b) - """ - - if a % b == 0: - return 0 - result = 1 - while a > 1: - if a & 1: - if ((a-1)*(b-1) >> 2) & 1: - result = -result - b, a = a, b % a - else: - if ((b ** 2 - 1) >> 3) & 1: - result = -result - a = a >> 1 - return result - -def jacobi_witness(x, n): - """Returns False if n is an Euler pseudo-prime with base x, and - True otherwise. - """ - - j = jacobi(x, n) % n - f = fast_exponentiation(x, (n-1)/2, n) - - if j == f: return False - return True - -def randomized_primality_testing(n, k): - """Calculates whether n is composite (which is always correct) or - prime (which is incorrect with error probability 2**-k) - - Returns False if the number if composite, and True if it's - probably prime. - """ - - q = 0.5 # Property of the jacobi_witness function - - # t = int(math.ceil(k / math.log(1/q, 2))) - t = ceil(k / math.log(1/q, 2)) - for i in range(t+1): - x = randint(1, n-1) - if jacobi_witness(x, n): return False - - return True - -def is_prime(number): - """Returns True if the number is prime, and False otherwise. - - >>> is_prime(42) - 0 - >>> is_prime(41) - 1 - """ - - """ - if not fermat_little_theorem(number) == 1: - # Not prime, according to Fermat's little theorem - return False - """ - - if randomized_primality_testing(number, 5): - # Prime, according to Jacobi - return True - - # Not prime - return False - - -def getprime(nbits): - """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In - other words: nbits is rounded up to whole bytes. - - >>> p = getprime(8) - >>> is_prime(p-1) - 0 - >>> is_prime(p) - 1 - >>> is_prime(p+1) - 0 - """ - - nbytes = int(math.ceil(nbits/8.)) - - while True: - integer = read_random_int(nbits) - - # Make sure it's odd - integer |= 1 - - # Test for primeness - if is_prime(integer): break - - # Retry if not prime - - return integer - -def are_relatively_prime(a, b): - """Returns True if a and b are relatively prime, and False if they - are not. - - >>> are_relatively_prime(2, 3) - 1 - >>> are_relatively_prime(2, 4) - 0 - """ - - d = gcd(a, b) - return (d == 1) - -def find_p_q(nbits): - """Returns a tuple of two different primes of nbits bits""" - - p = getprime(nbits) - while True: - q = getprime(nbits) - if not q == p: break - - return (p, q) - -def extended_euclid_gcd(a, b): - """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb - """ - - if b == 0: - return (a, 1, 0) - - q = abs(a % b) - r = long(a / b) - (d, k, l) = extended_euclid_gcd(b, q) - - return (d, l, k - l*r) - -# Main function: calculate encryption and decryption keys -def calculate_keys(p, q, nbits): - """Calculates an encryption and a decryption key for p and q, and - returns them as a tuple (e, d)""" - - n = p * q - phi_n = (p-1) * (q-1) - - while True: - # Make sure e has enough bits so we ensure "wrapping" through - # modulo n - e = getprime(max(8, nbits/2)) - if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break - - (d, i, j) = extended_euclid_gcd(e, phi_n) - - if not d == 1: - raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) - - if not (e * i) % phi_n == 1: - raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) - - return (e, i) - - -def gen_keys(nbits): - """Generate RSA keys of nbits bits. Returns (p, q, e, d). - - Note: this can take a long time, depending on the key size. - """ - - while True: - (p, q) = find_p_q(nbits) - (e, d) = calculate_keys(p, q, nbits) - - # For some reason, d is sometimes negative. We don't know how - # to fix it (yet), so we keep trying until everything is shiny - if d > 0: break - - return (p, q, e, d) - -def gen_pubpriv_keys(nbits): - """Generates public and private keys, and returns them as (pub, - priv). - - The public key consists of a dict {e: ..., , n: ....). The private - key consists of a dict {d: ...., p: ...., q: ....). - """ - - (p, q, e, d) = gen_keys(nbits) - - return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) - -def encrypt_int(message, ekey, n): - """Encrypts a message using encryption key 'ekey', working modulo - n""" - - if type(message) is types.IntType: - return encrypt_int(long(message), ekey, n) - - if not type(message) is types.LongType: - raise TypeError("You must pass a long or an int") - - if message > 0 and \ - math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)): - raise OverflowError("The message is too long") - - return fast_exponentiation(message, ekey, n) - -def decrypt_int(cyphertext, dkey, n): - """Decrypts a cypher text using the decryption key 'dkey', working - modulo n""" - - return encrypt_int(cyphertext, dkey, n) - -def sign_int(message, dkey, n): - """Signs 'message' using key 'dkey', working modulo n""" - - return decrypt_int(message, dkey, n) - -def verify_int(signed, ekey, n): - """verifies 'signed' using key 'ekey', working modulo n""" - - return encrypt_int(signed, ekey, n) - -def picklechops(chops): - """Pickles and base64encodes it's argument chops""" - - value = zlib.compress(dumps(chops)) - encoded = base64.encodestring(value) - return encoded.strip() - -def unpicklechops(string): - """base64decodes and unpickes it's argument string into chops""" - - return loads(zlib.decompress(base64.decodestring(string))) - -def chopstring(message, key, n, funcref): - """Splits 'message' into chops that are at most as long as n, - converts these into integers, and calls funcref(integer, key, n) - for each chop. - - Used by 'encrypt' and 'sign'. - """ - - msglen = len(message) - mbits = msglen * 8 - nbits = int(math.floor(math.log(n, 2))) - nbytes = nbits / 8 - blocks = msglen / nbytes - - if msglen % nbytes > 0: - blocks += 1 - - cypher = [] - - for bindex in range(blocks): - offset = bindex * nbytes - block = message[offset:offset+nbytes] - value = bytes2int(block) - cypher.append(funcref(value, key, n)) - - return picklechops(cypher) - -def gluechops(chops, key, n, funcref): - """Glues chops back together into a string. calls - funcref(integer, key, n) for each chop. - - Used by 'decrypt' and 'verify'. - """ - message = "" - - chops = unpicklechops(chops) - - for cpart in chops: - mpart = funcref(cpart, key, n) - message += int2bytes(mpart) - - return message - -def encrypt(message, key): - """Encrypts a string 'message' with the public key 'key'""" - - return chopstring(message, key['e'], key['n'], encrypt_int) - -def sign(message, key): - """Signs a string 'message' with the private key 'key'""" - - return chopstring(message, key['d'], key['p']*key['q'], decrypt_int) - -def decrypt(cypher, key): - """Decrypts a cypher with the private key 'key'""" - - return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) - -def verify(cypher, key): - """Verifies a cypher with the public key 'key'""" - - return gluechops(cypher, key['e'], key['n'], encrypt_int) - -# Do doctest if we're not imported -if __name__ == "__main__": - import doctest - doctest.testmod() - -__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"] - diff --git a/server/www/packages/packages-common/rsa/_version200.py b/server/www/packages/packages-common/rsa/_version200.py deleted file mode 100644 index f915653..0000000 --- a/server/www/packages/packages-common/rsa/_version200.py +++ /dev/null @@ -1,529 +0,0 @@ -"""RSA module - -Module for calculating large primes, and RSA encryption, decryption, -signing and verification. Includes generating public and private keys. - -WARNING: this implementation does not use random padding, compression of the -cleartext input to prevent repetitions, or other common security improvements. -Use with care. - -""" - -__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead" -__date__ = "2010-02-08" -__version__ = '2.0' - -import math -import os -import random -import sys -import types -from rsa._compat import byte - -# Display a warning that this insecure version is imported. -import warnings -warnings.warn('Insecure version of the RSA module is imported as %s' % __name__) - - -def bit_size(number): - """Returns the number of bits required to hold a specific long number""" - - return int(math.ceil(math.log(number,2))) - -def gcd(p, q): - """Returns the greatest common divisor of p and q - >>> gcd(48, 180) - 12 - """ - # Iterateive Version is faster and uses much less stack space - while q != 0: - if p < q: (p,q) = (q,p) - (p,q) = (q, p % q) - return p - - -def bytes2int(bytes): - """Converts a list of bytes or a string to an integer - - >>> (((128 * 256) + 64) * 256) + 15 - 8405007 - >>> l = [128, 64, 15] - >>> bytes2int(l) #same as bytes2int('\x80@\x0f') - 8405007 - """ - - if not (type(bytes) is types.ListType or type(bytes) is types.StringType): - raise TypeError("You must pass a string or a list") - - # Convert byte stream to integer - integer = 0 - for byte in bytes: - integer *= 256 - if type(byte) is types.StringType: byte = ord(byte) - integer += byte - - return integer - -def int2bytes(number): - """ - Converts a number to a string of bytes - """ - - if not (type(number) is types.LongType or type(number) is types.IntType): - raise TypeError("You must pass a long or an int") - - string = "" - - while number > 0: - string = "%s%s" % (byte(number & 0xFF), string) - number /= 256 - - return string - -def to64(number): - """Converts a number in the range of 0 to 63 into base 64 digit - character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'. - - >>> to64(10) - 'A' - """ - - if not (type(number) is types.LongType or type(number) is types.IntType): - raise TypeError("You must pass a long or an int") - - if 0 <= number <= 9: #00-09 translates to '0' - '9' - return byte(number + 48) - - if 10 <= number <= 35: - return byte(number + 55) #10-35 translates to 'A' - 'Z' - - if 36 <= number <= 61: - return byte(number + 61) #36-61 translates to 'a' - 'z' - - if number == 62: # 62 translates to '-' (minus) - return byte(45) - - if number == 63: # 63 translates to '_' (underscore) - return byte(95) - - raise ValueError('Invalid Base64 value: %i' % number) - - -def from64(number): - """Converts an ordinal character value in the range of - 0-9,A-Z,a-z,-,_ to a number in the range of 0-63. - - >>> from64(49) - 1 - """ - - if not (type(number) is types.LongType or type(number) is types.IntType): - raise TypeError("You must pass a long or an int") - - if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9 - return(number - 48) - - if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35 - return(number - 55) - - if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61 - return(number - 61) - - if number == 45: #ord('-') translates to 62 - return(62) - - if number == 95: #ord('_') translates to 63 - return(63) - - raise ValueError('Invalid Base64 value: %i' % number) - - -def int2str64(number): - """Converts a number to a string of base64 encoded characters in - the range of '0'-'9','A'-'Z,'a'-'z','-','_'. - - >>> int2str64(123456789) - '7MyqL' - """ - - if not (type(number) is types.LongType or type(number) is types.IntType): - raise TypeError("You must pass a long or an int") - - string = "" - - while number > 0: - string = "%s%s" % (to64(number & 0x3F), string) - number /= 64 - - return string - - -def str642int(string): - """Converts a base64 encoded string into an integer. - The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_' - - >>> str642int('7MyqL') - 123456789 - """ - - if not (type(string) is types.ListType or type(string) is types.StringType): - raise TypeError("You must pass a string or a list") - - integer = 0 - for byte in string: - integer *= 64 - if type(byte) is types.StringType: byte = ord(byte) - integer += from64(byte) - - return integer - -def read_random_int(nbits): - """Reads a random integer of approximately nbits bits rounded up - to whole bytes""" - - nbytes = int(math.ceil(nbits/8.)) - randomdata = os.urandom(nbytes) - return bytes2int(randomdata) - -def randint(minvalue, maxvalue): - """Returns a random integer x with minvalue <= x <= maxvalue""" - - # Safety - get a lot of random data even if the range is fairly - # small - min_nbits = 32 - - # The range of the random numbers we need to generate - range = (maxvalue - minvalue) + 1 - - # Which is this number of bytes - rangebytes = ((bit_size(range) + 7) / 8) - - # Convert to bits, but make sure it's always at least min_nbits*2 - rangebits = max(rangebytes * 8, min_nbits * 2) - - # Take a random number of bits between min_nbits and rangebits - nbits = random.randint(min_nbits, rangebits) - - return (read_random_int(nbits) % range) + minvalue - -def jacobi(a, b): - """Calculates the value of the Jacobi symbol (a/b) - where both a and b are positive integers, and b is odd - """ - - if a == 0: return 0 - result = 1 - while a > 1: - if a & 1: - if ((a-1)*(b-1) >> 2) & 1: - result = -result - a, b = b % a, a - else: - if (((b * b) - 1) >> 3) & 1: - result = -result - a >>= 1 - if a == 0: return 0 - return result - -def jacobi_witness(x, n): - """Returns False if n is an Euler pseudo-prime with base x, and - True otherwise. - """ - - j = jacobi(x, n) % n - f = pow(x, (n-1)/2, n) - - if j == f: return False - return True - -def randomized_primality_testing(n, k): - """Calculates whether n is composite (which is always correct) or - prime (which is incorrect with error probability 2**-k) - - Returns False if the number is composite, and True if it's - probably prime. - """ - - # 50% of Jacobi-witnesses can report compositness of non-prime numbers - - for i in range(k): - x = randint(1, n-1) - if jacobi_witness(x, n): return False - - return True - -def is_prime(number): - """Returns True if the number is prime, and False otherwise. - - >>> is_prime(42) - 0 - >>> is_prime(41) - 1 - """ - - if randomized_primality_testing(number, 6): - # Prime, according to Jacobi - return True - - # Not prime - return False - - -def getprime(nbits): - """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In - other words: nbits is rounded up to whole bytes. - - >>> p = getprime(8) - >>> is_prime(p-1) - 0 - >>> is_prime(p) - 1 - >>> is_prime(p+1) - 0 - """ - - while True: - integer = read_random_int(nbits) - - # Make sure it's odd - integer |= 1 - - # Test for primeness - if is_prime(integer): break - - # Retry if not prime - - return integer - -def are_relatively_prime(a, b): - """Returns True if a and b are relatively prime, and False if they - are not. - - >>> are_relatively_prime(2, 3) - 1 - >>> are_relatively_prime(2, 4) - 0 - """ - - d = gcd(a, b) - return (d == 1) - -def find_p_q(nbits): - """Returns a tuple of two different primes of nbits bits""" - pbits = nbits + (nbits/16) #Make sure that p and q aren't too close - qbits = nbits - (nbits/16) #or the factoring programs can factor n - p = getprime(pbits) - while True: - q = getprime(qbits) - #Make sure p and q are different. - if not q == p: break - return (p, q) - -def extended_gcd(a, b): - """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb - """ - # r = gcd(a,b) i = multiplicitive inverse of a mod b - # or j = multiplicitive inverse of b mod a - # Neg return values for i or j are made positive mod b or a respectively - # Iterateive Version is faster and uses much less stack space - x = 0 - y = 1 - lx = 1 - ly = 0 - oa = a #Remember original a/b to remove - ob = b #negative values from return results - while b != 0: - q = long(a/b) - (a, b) = (b, a % b) - (x, lx) = ((lx - (q * x)),x) - (y, ly) = ((ly - (q * y)),y) - if (lx < 0): lx += ob #If neg wrap modulo orignal b - if (ly < 0): ly += oa #If neg wrap modulo orignal a - return (a, lx, ly) #Return only positive values - -# Main function: calculate encryption and decryption keys -def calculate_keys(p, q, nbits): - """Calculates an encryption and a decryption key for p and q, and - returns them as a tuple (e, d)""" - - n = p * q - phi_n = (p-1) * (q-1) - - while True: - # Make sure e has enough bits so we ensure "wrapping" through - # modulo n - e = max(65537,getprime(nbits/4)) - if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break - - (d, i, j) = extended_gcd(e, phi_n) - - if not d == 1: - raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) - if (i < 0): - raise Exception("New extended_gcd shouldn't return negative values") - if not (e * i) % phi_n == 1: - raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) - - return (e, i) - - -def gen_keys(nbits): - """Generate RSA keys of nbits bits. Returns (p, q, e, d). - - Note: this can take a long time, depending on the key size. - """ - - (p, q) = find_p_q(nbits) - (e, d) = calculate_keys(p, q, nbits) - - return (p, q, e, d) - -def newkeys(nbits): - """Generates public and private keys, and returns them as (pub, - priv). - - The public key consists of a dict {e: ..., , n: ....). The private - key consists of a dict {d: ...., p: ...., q: ....). - """ - nbits = max(9,nbits) # Don't let nbits go below 9 bits - (p, q, e, d) = gen_keys(nbits) - - return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) - -def encrypt_int(message, ekey, n): - """Encrypts a message using encryption key 'ekey', working modulo n""" - - if type(message) is types.IntType: - message = long(message) - - if not type(message) is types.LongType: - raise TypeError("You must pass a long or int") - - if message < 0 or message > n: - raise OverflowError("The message is too long") - - #Note: Bit exponents start at zero (bit counts start at 1) this is correct - safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) - message += (1 << safebit) #add safebit to ensure folding - - return pow(message, ekey, n) - -def decrypt_int(cyphertext, dkey, n): - """Decrypts a cypher text using the decryption key 'dkey', working - modulo n""" - - message = pow(cyphertext, dkey, n) - - safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) - message -= (1 << safebit) #remove safebit before decode - - return message - -def encode64chops(chops): - """base64encodes chops and combines them into a ',' delimited string""" - - chips = [] #chips are character chops - - for value in chops: - chips.append(int2str64(value)) - - #delimit chops with comma - encoded = ','.join(chips) - - return encoded - -def decode64chops(string): - """base64decodes and makes a ',' delimited string into chops""" - - chips = string.split(',') #split chops at commas - - chops = [] - - for string in chips: #make char chops (chips) into chops - chops.append(str642int(string)) - - return chops - -def chopstring(message, key, n, funcref): - """Chops the 'message' into integers that fit into n, - leaving room for a safebit to be added to ensure that all - messages fold during exponentiation. The MSB of the number n - is not independant modulo n (setting it could cause overflow), so - use the next lower bit for the safebit. Therefore reserve 2-bits - in the number n for non-data bits. Calls specified encryption - function for each chop. - - Used by 'encrypt' and 'sign'. - """ - - msglen = len(message) - mbits = msglen * 8 - #Set aside 2-bits so setting of safebit won't overflow modulo n. - nbits = bit_size(n) - 2 # leave room for safebit - nbytes = nbits / 8 - blocks = msglen / nbytes - - if msglen % nbytes > 0: - blocks += 1 - - cypher = [] - - for bindex in range(blocks): - offset = bindex * nbytes - block = message[offset:offset+nbytes] - value = bytes2int(block) - cypher.append(funcref(value, key, n)) - - return encode64chops(cypher) #Encode encrypted ints to base64 strings - -def gluechops(string, key, n, funcref): - """Glues chops back together into a string. calls - funcref(integer, key, n) for each chop. - - Used by 'decrypt' and 'verify'. - """ - message = "" - - chops = decode64chops(string) #Decode base64 strings into integer chops - - for cpart in chops: - mpart = funcref(cpart, key, n) #Decrypt each chop - message += int2bytes(mpart) #Combine decrypted strings into a msg - - return message - -def encrypt(message, key): - """Encrypts a string 'message' with the public key 'key'""" - if 'n' not in key: - raise Exception("You must use the public key with encrypt") - - return chopstring(message, key['e'], key['n'], encrypt_int) - -def sign(message, key): - """Signs a string 'message' with the private key 'key'""" - if 'p' not in key: - raise Exception("You must use the private key with sign") - - return chopstring(message, key['d'], key['p']*key['q'], encrypt_int) - -def decrypt(cypher, key): - """Decrypts a string 'cypher' with the private key 'key'""" - if 'p' not in key: - raise Exception("You must use the private key with decrypt") - - return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) - -def verify(cypher, key): - """Verifies a string 'cypher' with the public key 'key'""" - if 'n' not in key: - raise Exception("You must use the public key with verify") - - return gluechops(cypher, key['e'], key['n'], decrypt_int) - -# Do doctest if we're not imported -if __name__ == "__main__": - import doctest - doctest.testmod() - -__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"] - diff --git a/server/www/packages/packages-common/rsa/asn1.py b/server/www/packages/packages-common/rsa/asn1.py deleted file mode 100644 index 706e6cf..0000000 --- a/server/www/packages/packages-common/rsa/asn1.py +++ /dev/null @@ -1,35 +0,0 @@ -'''ASN.1 definitions. - -Not all ASN.1-handling code use these definitions, but when it does, they should be here. -''' - -from pyasn1.type import univ, namedtype, tag - -class PubKeyHeader(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('oid', univ.ObjectIdentifier()), - namedtype.NamedType('parameters', univ.Null()), - ) - -class OpenSSLPubKey(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('header', PubKeyHeader()), - - # This little hack (the implicit tag) allows us to get a Bit String as Octet String - namedtype.NamedType('key', univ.OctetString().subtype( - implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))), - ) - - -class AsnPubKey(univ.Sequence): - '''ASN.1 contents of DER encoded public key: - - RSAPublicKey ::= SEQUENCE { - modulus INTEGER, -- n - publicExponent INTEGER, -- e - ''' - - componentType = namedtype.NamedTypes( - namedtype.NamedType('modulus', univ.Integer()), - namedtype.NamedType('publicExponent', univ.Integer()), - ) diff --git a/server/www/packages/packages-common/rsa/bigfile.py b/server/www/packages/packages-common/rsa/bigfile.py deleted file mode 100644 index 516cf56..0000000 --- a/server/www/packages/packages-common/rsa/bigfile.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Large file support - - - break a file into smaller blocks, and encrypt them, and store the - encrypted blocks in another file. - - - take such an encrypted files, decrypt its blocks, and reconstruct the - original file. - -The encrypted file format is as follows, where || denotes byte concatenation: - - FILE := VERSION || BLOCK || BLOCK ... - - BLOCK := LENGTH || DATA - - LENGTH := varint-encoded length of the subsequent data. Varint comes from - Google Protobuf, and encodes an integer into a variable number of bytes. - Each byte uses the 7 lowest bits to encode the value. The highest bit set - to 1 indicates the next byte is also part of the varint. The last byte will - have this bit set to 0. - -This file format is called the VARBLOCK format, in line with the varint format -used to denote the block sizes. - -''' - -from rsa import key, common, pkcs1, varblock -from rsa._compat import byte - -def encrypt_bigfile(infile, outfile, pub_key): - '''Encrypts a file, writing it to 'outfile' in VARBLOCK format. - - :param infile: file-like object to read the cleartext from - :param outfile: file-like object to write the crypto in VARBLOCK format to - :param pub_key: :py:class:`rsa.PublicKey` to encrypt with - - ''' - - if not isinstance(pub_key, key.PublicKey): - raise TypeError('Public key required, but got %r' % pub_key) - - key_bytes = common.bit_size(pub_key.n) // 8 - blocksize = key_bytes - 11 # keep space for PKCS#1 padding - - # Write the version number to the VARBLOCK file - outfile.write(byte(varblock.VARBLOCK_VERSION)) - - # Encrypt and write each block - for block in varblock.yield_fixedblocks(infile, blocksize): - crypto = pkcs1.encrypt(block, pub_key) - - varblock.write_varint(outfile, len(crypto)) - outfile.write(crypto) - -def decrypt_bigfile(infile, outfile, priv_key): - '''Decrypts an encrypted VARBLOCK file, writing it to 'outfile' - - :param infile: file-like object to read the crypto in VARBLOCK format from - :param outfile: file-like object to write the cleartext to - :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with - - ''' - - if not isinstance(priv_key, key.PrivateKey): - raise TypeError('Private key required, but got %r' % priv_key) - - for block in varblock.yield_varblocks(infile): - cleartext = pkcs1.decrypt(block, priv_key) - outfile.write(cleartext) - -__all__ = ['encrypt_bigfile', 'decrypt_bigfile'] - diff --git a/server/www/packages/packages-common/rsa/cli.py b/server/www/packages/packages-common/rsa/cli.py deleted file mode 100644 index 527cc49..0000000 --- a/server/www/packages/packages-common/rsa/cli.py +++ /dev/null @@ -1,379 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Commandline scripts. - -These scripts are called by the executables defined in setup.py. -''' - -from __future__ import with_statement, print_function - -import abc -import sys -from optparse import OptionParser - -import rsa -import rsa.bigfile -import rsa.pkcs1 - -HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys()) - -def keygen(): - '''Key generator.''' - - # Parse the CLI options - parser = OptionParser(usage='usage: %prog [options] keysize', - description='Generates a new RSA keypair of "keysize" bits.') - - parser.add_option('--pubout', type='string', - help='Output filename for the public key. The public key is ' - 'not saved if this option is not present. You can use ' - 'pyrsa-priv2pub to create the public key file later.') - - parser.add_option('-o', '--out', type='string', - help='Output filename for the private key. The key is ' - 'written to stdout if this option is not present.') - - parser.add_option('--form', - help='key format of the private and public keys - default PEM', - choices=('PEM', 'DER'), default='PEM') - - (cli, cli_args) = parser.parse_args(sys.argv[1:]) - - if len(cli_args) != 1: - parser.print_help() - raise SystemExit(1) - - try: - keysize = int(cli_args[0]) - except ValueError: - parser.print_help() - print('Not a valid number: %s' % cli_args[0], file=sys.stderr) - raise SystemExit(1) - - print('Generating %i-bit key' % keysize, file=sys.stderr) - (pub_key, priv_key) = rsa.newkeys(keysize) - - - # Save public key - if cli.pubout: - print('Writing public key to %s' % cli.pubout, file=sys.stderr) - data = pub_key.save_pkcs1(format=cli.form) - with open(cli.pubout, 'wb') as outfile: - outfile.write(data) - - # Save private key - data = priv_key.save_pkcs1(format=cli.form) - - if cli.out: - print('Writing private key to %s' % cli.out, file=sys.stderr) - with open(cli.out, 'wb') as outfile: - outfile.write(data) - else: - print('Writing private key to stdout', file=sys.stderr) - sys.stdout.write(data) - - -class CryptoOperation(object): - '''CLI callable that operates with input, output, and a key.''' - - __metaclass__ = abc.ABCMeta - - keyname = 'public' # or 'private' - usage = 'usage: %%prog [options] %(keyname)s_key' - description = None - operation = 'decrypt' - operation_past = 'decrypted' - operation_progressive = 'decrypting' - input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \ - 'not specified.' - output_help = 'Name of the file to write the %(operation_past)s file ' \ - 'to. Written to stdout if this option is not present.' - expected_cli_args = 1 - has_output = True - - key_class = rsa.PublicKey - - def __init__(self): - self.usage = self.usage % self.__class__.__dict__ - self.input_help = self.input_help % self.__class__.__dict__ - self.output_help = self.output_help % self.__class__.__dict__ - - @abc.abstractmethod - def perform_operation(self, indata, key, cli_args=None): - '''Performs the program's operation. - - Implement in a subclass. - - :returns: the data to write to the output. - ''' - - def __call__(self): - '''Runs the program.''' - - (cli, cli_args) = self.parse_cli() - - key = self.read_key(cli_args[0], cli.keyform) - - indata = self.read_infile(cli.input) - - print(self.operation_progressive.title(), file=sys.stderr) - outdata = self.perform_operation(indata, key, cli_args) - - if self.has_output: - self.write_outfile(outdata, cli.output) - - def parse_cli(self): - '''Parse the CLI options - - :returns: (cli_opts, cli_args) - ''' - - parser = OptionParser(usage=self.usage, description=self.description) - - parser.add_option('-i', '--input', type='string', help=self.input_help) - - if self.has_output: - parser.add_option('-o', '--output', type='string', help=self.output_help) - - parser.add_option('--keyform', - help='Key format of the %s key - default PEM' % self.keyname, - choices=('PEM', 'DER'), default='PEM') - - (cli, cli_args) = parser.parse_args(sys.argv[1:]) - - if len(cli_args) != self.expected_cli_args: - parser.print_help() - raise SystemExit(1) - - return (cli, cli_args) - - def read_key(self, filename, keyform): - '''Reads a public or private key.''' - - print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr) - with open(filename, 'rb') as keyfile: - keydata = keyfile.read() - - return self.key_class.load_pkcs1(keydata, keyform) - - def read_infile(self, inname): - '''Read the input file''' - - if inname: - print('Reading input from %s' % inname, file=sys.stderr) - with open(inname, 'rb') as infile: - return infile.read() - - print('Reading input from stdin', file=sys.stderr) - return sys.stdin.read() - - def write_outfile(self, outdata, outname): - '''Write the output file''' - - if outname: - print('Writing output to %s' % outname, file=sys.stderr) - with open(outname, 'wb') as outfile: - outfile.write(outdata) - else: - print('Writing output to stdout', file=sys.stderr) - sys.stdout.write(outdata) - -class EncryptOperation(CryptoOperation): - '''Encrypts a file.''' - - keyname = 'public' - description = ('Encrypts a file. The file must be shorter than the key ' - 'length in order to be encrypted. For larger files, use the ' - 'pyrsa-encrypt-bigfile command.') - operation = 'encrypt' - operation_past = 'encrypted' - operation_progressive = 'encrypting' - - - def perform_operation(self, indata, pub_key, cli_args=None): - '''Encrypts files.''' - - return rsa.encrypt(indata, pub_key) - -class DecryptOperation(CryptoOperation): - '''Decrypts a file.''' - - keyname = 'private' - description = ('Decrypts a file. The original file must be shorter than ' - 'the key length in order to have been encrypted. For larger ' - 'files, use the pyrsa-decrypt-bigfile command.') - operation = 'decrypt' - operation_past = 'decrypted' - operation_progressive = 'decrypting' - key_class = rsa.PrivateKey - - def perform_operation(self, indata, priv_key, cli_args=None): - '''Decrypts files.''' - - return rsa.decrypt(indata, priv_key) - -class SignOperation(CryptoOperation): - '''Signs a file.''' - - keyname = 'private' - usage = 'usage: %%prog [options] private_key hash_method' - description = ('Signs a file, outputs the signature. Choose the hash ' - 'method from %s' % ', '.join(HASH_METHODS)) - operation = 'sign' - operation_past = 'signature' - operation_progressive = 'Signing' - key_class = rsa.PrivateKey - expected_cli_args = 2 - - output_help = ('Name of the file to write the signature to. Written ' - 'to stdout if this option is not present.') - - def perform_operation(self, indata, priv_key, cli_args): - '''Decrypts files.''' - - hash_method = cli_args[1] - if hash_method not in HASH_METHODS: - raise SystemExit('Invalid hash method, choose one of %s' % - ', '.join(HASH_METHODS)) - - return rsa.sign(indata, priv_key, hash_method) - -class VerifyOperation(CryptoOperation): - '''Verify a signature.''' - - keyname = 'public' - usage = 'usage: %%prog [options] public_key signature_file' - description = ('Verifies a signature, exits with status 0 upon success, ' - 'prints an error message and exits with status 1 upon error.') - operation = 'verify' - operation_past = 'verified' - operation_progressive = 'Verifying' - key_class = rsa.PublicKey - expected_cli_args = 2 - has_output = False - - def perform_operation(self, indata, pub_key, cli_args): - '''Decrypts files.''' - - signature_file = cli_args[1] - - with open(signature_file, 'rb') as sigfile: - signature = sigfile.read() - - try: - rsa.verify(indata, signature, pub_key) - except rsa.VerificationError: - raise SystemExit('Verification failed.') - - print('Verification OK', file=sys.stderr) - - -class BigfileOperation(CryptoOperation): - '''CryptoOperation that doesn't read the entire file into memory.''' - - def __init__(self): - CryptoOperation.__init__(self) - - self.file_objects = [] - - def __del__(self): - '''Closes any open file handles.''' - - for fobj in self.file_objects: - fobj.close() - - def __call__(self): - '''Runs the program.''' - - (cli, cli_args) = self.parse_cli() - - key = self.read_key(cli_args[0], cli.keyform) - - # Get the file handles - infile = self.get_infile(cli.input) - outfile = self.get_outfile(cli.output) - - # Call the operation - print(self.operation_progressive.title(), file=sys.stderr) - self.perform_operation(infile, outfile, key, cli_args) - - def get_infile(self, inname): - '''Returns the input file object''' - - if inname: - print('Reading input from %s' % inname, file=sys.stderr) - fobj = open(inname, 'rb') - self.file_objects.append(fobj) - else: - print('Reading input from stdin', file=sys.stderr) - fobj = sys.stdin - - return fobj - - def get_outfile(self, outname): - '''Returns the output file object''' - - if outname: - print('Will write output to %s' % outname, file=sys.stderr) - fobj = open(outname, 'wb') - self.file_objects.append(fobj) - else: - print('Will write output to stdout', file=sys.stderr) - fobj = sys.stdout - - return fobj - -class EncryptBigfileOperation(BigfileOperation): - '''Encrypts a file to VARBLOCK format.''' - - keyname = 'public' - description = ('Encrypts a file to an encrypted VARBLOCK file. The file ' - 'can be larger than the key length, but the output file is only ' - 'compatible with Python-RSA.') - operation = 'encrypt' - operation_past = 'encrypted' - operation_progressive = 'encrypting' - - def perform_operation(self, infile, outfile, pub_key, cli_args=None): - '''Encrypts files to VARBLOCK.''' - - return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key) - -class DecryptBigfileOperation(BigfileOperation): - '''Decrypts a file in VARBLOCK format.''' - - keyname = 'private' - description = ('Decrypts an encrypted VARBLOCK file that was encrypted ' - 'with pyrsa-encrypt-bigfile') - operation = 'decrypt' - operation_past = 'decrypted' - operation_progressive = 'decrypting' - key_class = rsa.PrivateKey - - def perform_operation(self, infile, outfile, priv_key, cli_args=None): - '''Decrypts a VARBLOCK file.''' - - return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key) - - -encrypt = EncryptOperation() -decrypt = DecryptOperation() -sign = SignOperation() -verify = VerifyOperation() -encrypt_bigfile = EncryptBigfileOperation() -decrypt_bigfile = DecryptBigfileOperation() - diff --git a/server/www/packages/packages-common/rsa/common.py b/server/www/packages/packages-common/rsa/common.py deleted file mode 100644 index 39feb8c..0000000 --- a/server/www/packages/packages-common/rsa/common.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Common functionality shared by several modules.''' - - -def bit_size(num): - ''' - Number of bits needed to represent a integer excluding any prefix - 0 bits. - - As per definition from http://wiki.python.org/moin/BitManipulation and - to match the behavior of the Python 3 API. - - Usage:: - - >>> bit_size(1023) - 10 - >>> bit_size(1024) - 11 - >>> bit_size(1025) - 11 - - :param num: - Integer value. If num is 0, returns 0. Only the absolute value of the - number is considered. Therefore, signed integers will be abs(num) - before the number's bit length is determined. - :returns: - Returns the number of bits in the integer. - ''' - if num == 0: - return 0 - if num < 0: - num = -num - - # Make sure this is an int and not a float. - num & 1 - - hex_num = "%x" % num - return ((len(hex_num) - 1) * 4) + { - '0':0, '1':1, '2':2, '3':2, - '4':3, '5':3, '6':3, '7':3, - '8':4, '9':4, 'a':4, 'b':4, - 'c':4, 'd':4, 'e':4, 'f':4, - }[hex_num[0]] - - -def _bit_size(number): - ''' - Returns the number of bits required to hold a specific long number. - ''' - if number < 0: - raise ValueError('Only nonnegative numbers possible: %s' % number) - - if number == 0: - return 0 - - # This works, even with very large numbers. When using math.log(number, 2), - # you'll get rounding errors and it'll fail. - bits = 0 - while number: - bits += 1 - number >>= 1 - - return bits - - -def byte_size(number): - ''' - Returns the number of bytes required to hold a specific long number. - - The number of bytes is rounded up. - - Usage:: - - >>> byte_size(1 << 1023) - 128 - >>> byte_size((1 << 1024) - 1) - 128 - >>> byte_size(1 << 1024) - 129 - - :param number: - An unsigned integer - :returns: - The number of bytes required to hold a specific long number. - ''' - quanta, mod = divmod(bit_size(number), 8) - if mod or number == 0: - quanta += 1 - return quanta - #return int(math.ceil(bit_size(number) / 8.0)) - - -def extended_gcd(a, b): - '''Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb - ''' - # r = gcd(a,b) i = multiplicitive inverse of a mod b - # or j = multiplicitive inverse of b mod a - # Neg return values for i or j are made positive mod b or a respectively - # Iterateive Version is faster and uses much less stack space - x = 0 - y = 1 - lx = 1 - ly = 0 - oa = a #Remember original a/b to remove - ob = b #negative values from return results - while b != 0: - q = a // b - (a, b) = (b, a % b) - (x, lx) = ((lx - (q * x)),x) - (y, ly) = ((ly - (q * y)),y) - if (lx < 0): lx += ob #If neg wrap modulo orignal b - if (ly < 0): ly += oa #If neg wrap modulo orignal a - return (a, lx, ly) #Return only positive values - - -def inverse(x, n): - '''Returns x^-1 (mod n) - - >>> inverse(7, 4) - 3 - >>> (inverse(143, 4) * 143) % 4 - 1 - ''' - - (divider, inv, _) = extended_gcd(x, n) - - if divider != 1: - raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n)) - - return inv - - -def crt(a_values, modulo_values): - '''Chinese Remainder Theorem. - - Calculates x such that x = a[i] (mod m[i]) for each i. - - :param a_values: the a-values of the above equation - :param modulo_values: the m-values of the above equation - :returns: x such that x = a[i] (mod m[i]) for each i - - - >>> crt([2, 3], [3, 5]) - 8 - - >>> crt([2, 3, 2], [3, 5, 7]) - 23 - - >>> crt([2, 3, 0], [7, 11, 15]) - 135 - ''' - - m = 1 - x = 0 - - for modulo in modulo_values: - m *= modulo - - for (m_i, a_i) in zip(modulo_values, a_values): - M_i = m // m_i - inv = inverse(M_i, m_i) - - x = (x + a_i * M_i * inv) % m - - return x - -if __name__ == '__main__': - import doctest - doctest.testmod() - diff --git a/server/www/packages/packages-common/rsa/core.py b/server/www/packages/packages-common/rsa/core.py deleted file mode 100644 index 90dfee8..0000000 --- a/server/www/packages/packages-common/rsa/core.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Core mathematical operations. - -This is the actual core RSA implementation, which is only defined -mathematically on integers. -''' - - -from rsa._compat import is_integer - -def assert_int(var, name): - - if is_integer(var): - return - - raise TypeError('%s should be an integer, not %s' % (name, var.__class__)) - -def encrypt_int(message, ekey, n): - '''Encrypts a message using encryption key 'ekey', working modulo n''' - - assert_int(message, 'message') - assert_int(ekey, 'ekey') - assert_int(n, 'n') - - if message < 0: - raise ValueError('Only non-negative numbers are supported') - - if message > n: - raise OverflowError("The message %i is too long for n=%i" % (message, n)) - - return pow(message, ekey, n) - -def decrypt_int(cyphertext, dkey, n): - '''Decrypts a cypher text using the decryption key 'dkey', working - modulo n''' - - assert_int(cyphertext, 'cyphertext') - assert_int(dkey, 'dkey') - assert_int(n, 'n') - - message = pow(cyphertext, dkey, n) - return message - diff --git a/server/www/packages/packages-common/rsa/key.py b/server/www/packages/packages-common/rsa/key.py deleted file mode 100644 index b6de7b3..0000000 --- a/server/www/packages/packages-common/rsa/key.py +++ /dev/null @@ -1,612 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''RSA key generation code. - -Create new keys with the newkeys() function. It will give you a PublicKey and a -PrivateKey object. - -Loading and saving keys requires the pyasn1 module. This module is imported as -late as possible, such that other functionality will remain working in absence -of pyasn1. - -''' - -import logging -from rsa._compat import b, bytes_type - -import rsa.prime -import rsa.pem -import rsa.common - -log = logging.getLogger(__name__) - - - -class AbstractKey(object): - '''Abstract superclass for private and public keys.''' - - @classmethod - def load_pkcs1(cls, keyfile, format='PEM'): - r'''Loads a key in PKCS#1 DER or PEM format. - - :param keyfile: contents of a DER- or PEM-encoded file that contains - the public key. - :param format: the format of the file to load; 'PEM' or 'DER' - - :return: a PublicKey object - - ''' - - methods = { - 'PEM': cls._load_pkcs1_pem, - 'DER': cls._load_pkcs1_der, - } - - if format not in methods: - formats = ', '.join(sorted(methods.keys())) - raise ValueError('Unsupported format: %r, try one of %s' % (format, - formats)) - - method = methods[format] - return method(keyfile) - - def save_pkcs1(self, format='PEM'): - '''Saves the public key in PKCS#1 DER or PEM format. - - :param format: the format to save; 'PEM' or 'DER' - :returns: the DER- or PEM-encoded public key. - - ''' - - methods = { - 'PEM': self._save_pkcs1_pem, - 'DER': self._save_pkcs1_der, - } - - if format not in methods: - formats = ', '.join(sorted(methods.keys())) - raise ValueError('Unsupported format: %r, try one of %s' % (format, - formats)) - - method = methods[format] - return method() - -class PublicKey(AbstractKey): - '''Represents a public RSA key. - - This key is also known as the 'encryption key'. It contains the 'n' and 'e' - values. - - Supports attributes as well as dictionary-like access. Attribute accesss is - faster, though. - - >>> PublicKey(5, 3) - PublicKey(5, 3) - - >>> key = PublicKey(5, 3) - >>> key.n - 5 - >>> key['n'] - 5 - >>> key.e - 3 - >>> key['e'] - 3 - - ''' - - __slots__ = ('n', 'e') - - def __init__(self, n, e): - self.n = n - self.e = e - - def __getitem__(self, key): - return getattr(self, key) - - def __repr__(self): - return 'PublicKey(%i, %i)' % (self.n, self.e) - - def __eq__(self, other): - if other is None: - return False - - if not isinstance(other, PublicKey): - return False - - return self.n == other.n and self.e == other.e - - def __ne__(self, other): - return not (self == other) - - @classmethod - def _load_pkcs1_der(cls, keyfile): - r'''Loads a key in PKCS#1 DER format. - - @param keyfile: contents of a DER-encoded file that contains the public - key. - @return: a PublicKey object - - First let's construct a DER encoded key: - - >>> import base64 - >>> b64der = 'MAwCBQCNGmYtAgMBAAE=' - >>> der = base64.decodestring(b64der) - - This loads the file: - - >>> PublicKey._load_pkcs1_der(der) - PublicKey(2367317549, 65537) - - ''' - - from pyasn1.codec.der import decoder - from rsa.asn1 import AsnPubKey - - (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey()) - return cls(n=int(priv['modulus']), e=int(priv['publicExponent'])) - - def _save_pkcs1_der(self): - '''Saves the public key in PKCS#1 DER format. - - @returns: the DER-encoded public key. - ''' - - from pyasn1.codec.der import encoder - from rsa.asn1 import AsnPubKey - - # Create the ASN object - asn_key = AsnPubKey() - asn_key.setComponentByName('modulus', self.n) - asn_key.setComponentByName('publicExponent', self.e) - - return encoder.encode(asn_key) - - @classmethod - def _load_pkcs1_pem(cls, keyfile): - '''Loads a PKCS#1 PEM-encoded public key file. - - The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and - after the "-----END RSA PUBLIC KEY-----" lines is ignored. - - @param keyfile: contents of a PEM-encoded file that contains the public - key. - @return: a PublicKey object - ''' - - der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY') - return cls._load_pkcs1_der(der) - - def _save_pkcs1_pem(self): - '''Saves a PKCS#1 PEM-encoded public key file. - - @return: contents of a PEM-encoded file that contains the public key. - ''' - - der = self._save_pkcs1_der() - return rsa.pem.save_pem(der, 'RSA PUBLIC KEY') - - @classmethod - def load_pkcs1_openssl_pem(cls, keyfile): - '''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. - - These files can be recognised in that they start with BEGIN PUBLIC KEY - rather than BEGIN RSA PUBLIC KEY. - - The contents of the file before the "-----BEGIN PUBLIC KEY-----" and - after the "-----END PUBLIC KEY-----" lines is ignored. - - @param keyfile: contents of a PEM-encoded file that contains the public - key, from OpenSSL. - @return: a PublicKey object - ''' - - der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY') - return cls.load_pkcs1_openssl_der(der) - - @classmethod - def load_pkcs1_openssl_der(cls, keyfile): - '''Loads a PKCS#1 DER-encoded public key file from OpenSSL. - - @param keyfile: contents of a DER-encoded file that contains the public - key, from OpenSSL. - @return: a PublicKey object - ''' - - from rsa.asn1 import OpenSSLPubKey - from pyasn1.codec.der import decoder - from pyasn1.type import univ - - (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey()) - - if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'): - raise TypeError("This is not a DER-encoded OpenSSL-compatible public key") - - return cls._load_pkcs1_der(keyinfo['key'][1:]) - - - - -class PrivateKey(AbstractKey): - '''Represents a private RSA key. - - This key is also known as the 'decryption key'. It contains the 'n', 'e', - 'd', 'p', 'q' and other values. - - Supports attributes as well as dictionary-like access. Attribute accesss is - faster, though. - - >>> PrivateKey(3247, 65537, 833, 191, 17) - PrivateKey(3247, 65537, 833, 191, 17) - - exp1, exp2 and coef don't have to be given, they will be calculated: - - >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) - >>> pk.exp1 - 55063 - >>> pk.exp2 - 10095 - >>> pk.coef - 50797 - - If you give exp1, exp2 or coef, they will be used as-is: - - >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8) - >>> pk.exp1 - 6 - >>> pk.exp2 - 7 - >>> pk.coef - 8 - - ''' - - __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef') - - def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None): - self.n = n - self.e = e - self.d = d - self.p = p - self.q = q - - # Calculate the other values if they aren't supplied - if exp1 is None: - self.exp1 = int(d % (p - 1)) - else: - self.exp1 = exp1 - - if exp1 is None: - self.exp2 = int(d % (q - 1)) - else: - self.exp2 = exp2 - - if coef is None: - self.coef = rsa.common.inverse(q, p) - else: - self.coef = coef - - def __getitem__(self, key): - return getattr(self, key) - - def __repr__(self): - return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self - - def __eq__(self, other): - if other is None: - return False - - if not isinstance(other, PrivateKey): - return False - - return (self.n == other.n and - self.e == other.e and - self.d == other.d and - self.p == other.p and - self.q == other.q and - self.exp1 == other.exp1 and - self.exp2 == other.exp2 and - self.coef == other.coef) - - def __ne__(self, other): - return not (self == other) - - @classmethod - def _load_pkcs1_der(cls, keyfile): - r'''Loads a key in PKCS#1 DER format. - - @param keyfile: contents of a DER-encoded file that contains the private - key. - @return: a PrivateKey object - - First let's construct a DER encoded key: - - >>> import base64 - >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' - >>> der = base64.decodestring(b64der) - - This loads the file: - - >>> PrivateKey._load_pkcs1_der(der) - PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) - - ''' - - from pyasn1.codec.der import decoder - (priv, _) = decoder.decode(keyfile) - - # ASN.1 contents of DER encoded private key: - # - # RSAPrivateKey ::= SEQUENCE { - # version Version, - # modulus INTEGER, -- n - # publicExponent INTEGER, -- e - # privateExponent INTEGER, -- d - # prime1 INTEGER, -- p - # prime2 INTEGER, -- q - # exponent1 INTEGER, -- d mod (p-1) - # exponent2 INTEGER, -- d mod (q-1) - # coefficient INTEGER, -- (inverse of q) mod p - # otherPrimeInfos OtherPrimeInfos OPTIONAL - # } - - if priv[0] != 0: - raise ValueError('Unable to read this file, version %s != 0' % priv[0]) - - as_ints = tuple(int(x) for x in priv[1:9]) - return cls(*as_ints) - - def _save_pkcs1_der(self): - '''Saves the private key in PKCS#1 DER format. - - @returns: the DER-encoded private key. - ''' - - from pyasn1.type import univ, namedtype - from pyasn1.codec.der import encoder - - class AsnPrivKey(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer()), - namedtype.NamedType('modulus', univ.Integer()), - namedtype.NamedType('publicExponent', univ.Integer()), - namedtype.NamedType('privateExponent', univ.Integer()), - namedtype.NamedType('prime1', univ.Integer()), - namedtype.NamedType('prime2', univ.Integer()), - namedtype.NamedType('exponent1', univ.Integer()), - namedtype.NamedType('exponent2', univ.Integer()), - namedtype.NamedType('coefficient', univ.Integer()), - ) - - # Create the ASN object - asn_key = AsnPrivKey() - asn_key.setComponentByName('version', 0) - asn_key.setComponentByName('modulus', self.n) - asn_key.setComponentByName('publicExponent', self.e) - asn_key.setComponentByName('privateExponent', self.d) - asn_key.setComponentByName('prime1', self.p) - asn_key.setComponentByName('prime2', self.q) - asn_key.setComponentByName('exponent1', self.exp1) - asn_key.setComponentByName('exponent2', self.exp2) - asn_key.setComponentByName('coefficient', self.coef) - - return encoder.encode(asn_key) - - @classmethod - def _load_pkcs1_pem(cls, keyfile): - '''Loads a PKCS#1 PEM-encoded private key file. - - The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and - after the "-----END RSA PRIVATE KEY-----" lines is ignored. - - @param keyfile: contents of a PEM-encoded file that contains the private - key. - @return: a PrivateKey object - ''' - - der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY')) - return cls._load_pkcs1_der(der) - - def _save_pkcs1_pem(self): - '''Saves a PKCS#1 PEM-encoded private key file. - - @return: contents of a PEM-encoded file that contains the private key. - ''' - - der = self._save_pkcs1_der() - return rsa.pem.save_pem(der, b('RSA PRIVATE KEY')) - -def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): - ''''Returns a tuple of two different primes of nbits bits each. - - The resulting p * q has exacty 2 * nbits bits, and the returned p and q - will not be equal. - - :param nbits: the number of bits in each of p and q. - :param getprime_func: the getprime function, defaults to - :py:func:`rsa.prime.getprime`. - - *Introduced in Python-RSA 3.1* - - :param accurate: whether to enable accurate mode or not. - :returns: (p, q), where p > q - - >>> (p, q) = find_p_q(128) - >>> from rsa import common - >>> common.bit_size(p * q) - 256 - - When not in accurate mode, the number of bits can be slightly less - - >>> (p, q) = find_p_q(128, accurate=False) - >>> from rsa import common - >>> common.bit_size(p * q) <= 256 - True - >>> common.bit_size(p * q) > 240 - True - - ''' - - total_bits = nbits * 2 - - # Make sure that p and q aren't too close or the factoring programs can - # factor n. - shift = nbits // 16 - pbits = nbits + shift - qbits = nbits - shift - - # Choose the two initial primes - log.debug('find_p_q(%i): Finding p', nbits) - p = getprime_func(pbits) - log.debug('find_p_q(%i): Finding q', nbits) - q = getprime_func(qbits) - - def is_acceptable(p, q): - '''Returns True iff p and q are acceptable: - - - p and q differ - - (p * q) has the right nr of bits (when accurate=True) - ''' - - if p == q: - return False - - if not accurate: - return True - - # Make sure we have just the right amount of bits - found_size = rsa.common.bit_size(p * q) - return total_bits == found_size - - # Keep choosing other primes until they match our requirements. - change_p = False - while not is_acceptable(p, q): - # Change p on one iteration and q on the other - if change_p: - p = getprime_func(pbits) - else: - q = getprime_func(qbits) - - change_p = not change_p - - # We want p > q as described on - # http://www.di-mgt.com.au/rsa_alg.html#crt - return (max(p, q), min(p, q)) - -def calculate_keys(p, q, nbits): - '''Calculates an encryption and a decryption key given p and q, and - returns them as a tuple (e, d) - - ''' - - phi_n = (p - 1) * (q - 1) - - # A very common choice for e is 65537 - e = 65537 - - try: - d = rsa.common.inverse(e, phi_n) - except ValueError: - raise ValueError("e (%d) and phi_n (%d) are not relatively prime" % - (e, phi_n)) - - if (e * d) % phi_n != 1: - raise ValueError("e (%d) and d (%d) are not mult. inv. modulo " - "phi_n (%d)" % (e, d, phi_n)) - - return (e, d) - -def gen_keys(nbits, getprime_func, accurate=True): - '''Generate RSA keys of nbits bits. Returns (p, q, e, d). - - Note: this can take a long time, depending on the key size. - - :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and - ``q`` will use ``nbits/2`` bits. - :param getprime_func: either :py:func:`rsa.prime.getprime` or a function - with similar signature. - ''' - - (p, q) = find_p_q(nbits // 2, getprime_func, accurate) - (e, d) = calculate_keys(p, q, nbits // 2) - - return (p, q, e, d) - -def newkeys(nbits, accurate=True, poolsize=1): - '''Generates public and private keys, and returns them as (pub, priv). - - The public key is also known as the 'encryption key', and is a - :py:class:`rsa.PublicKey` object. The private key is also known as the - 'decryption key' and is a :py:class:`rsa.PrivateKey` object. - - :param nbits: the number of bits required to store ``n = p*q``. - :param accurate: when True, ``n`` will have exactly the number of bits you - asked for. However, this makes key generation much slower. When False, - `n`` may have slightly less bits. - :param poolsize: the number of processes to use to generate the prime - numbers. If set to a number > 1, a parallel algorithm will be used. - This requires Python 2.6 or newer. - - :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`) - - The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires - Python 2.6 or newer. - - ''' - - if nbits < 16: - raise ValueError('Key too small') - - if poolsize < 1: - raise ValueError('Pool size (%i) should be >= 1' % poolsize) - - # Determine which getprime function to use - if poolsize > 1: - from rsa import parallel - import functools - - getprime_func = functools.partial(parallel.getprime, poolsize=poolsize) - else: getprime_func = rsa.prime.getprime - - # Generate the key components - (p, q, e, d) = gen_keys(nbits, getprime_func) - - # Create the key objects - n = p * q - - return ( - PublicKey(n, e), - PrivateKey(n, e, d, p, q) - ) - -__all__ = ['PublicKey', 'PrivateKey', 'newkeys'] - -if __name__ == '__main__': - import doctest - - try: - for count in range(100): - (failures, tests) = doctest.testmod() - if failures: - break - - if (count and count % 10 == 0) or count == 1: - print('%i times' % count) - except KeyboardInterrupt: - print('Aborted') - else: - print('Doctests done') diff --git a/server/www/packages/packages-common/rsa/parallel.py b/server/www/packages/packages-common/rsa/parallel.py deleted file mode 100644 index e5034ac..0000000 --- a/server/www/packages/packages-common/rsa/parallel.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Functions for parallel computation on multiple cores. - -Introduced in Python-RSA 3.1. - -.. note:: - - Requires Python 2.6 or newer. - -''' - -from __future__ import print_function - -import multiprocessing as mp - -import rsa.prime -import rsa.randnum - -def _find_prime(nbits, pipe): - while True: - integer = rsa.randnum.read_random_int(nbits) - - # Make sure it's odd - integer |= 1 - - # Test for primeness - if rsa.prime.is_prime(integer): - pipe.send(integer) - return - -def getprime(nbits, poolsize): - '''Returns a prime number that can be stored in 'nbits' bits. - - Works in multiple threads at the same time. - - >>> p = getprime(128, 3) - >>> rsa.prime.is_prime(p-1) - False - >>> rsa.prime.is_prime(p) - True - >>> rsa.prime.is_prime(p+1) - False - - >>> from rsa import common - >>> common.bit_size(p) == 128 - True - - ''' - - (pipe_recv, pipe_send) = mp.Pipe(duplex=False) - - # Create processes - procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) - for _ in range(poolsize)] - [p.start() for p in procs] - - result = pipe_recv.recv() - - [p.terminate() for p in procs] - - return result - -__all__ = ['getprime'] - - -if __name__ == '__main__': - print('Running doctests 1000x or until failure') - import doctest - - for count in range(100): - (failures, tests) = doctest.testmod() - if failures: - break - - if count and count % 10 == 0: - print('%i times' % count) - - print('Doctests done') - diff --git a/server/www/packages/packages-common/rsa/pem.py b/server/www/packages/packages-common/rsa/pem.py deleted file mode 100644 index b1c3a0e..0000000 --- a/server/www/packages/packages-common/rsa/pem.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Functions that load and write PEM-encoded files.''' - -import base64 -from rsa._compat import b, is_bytes - -def _markers(pem_marker): - ''' - Returns the start and end PEM markers - ''' - - if is_bytes(pem_marker): - pem_marker = pem_marker.decode('utf-8') - - return (b('-----BEGIN %s-----' % pem_marker), - b('-----END %s-----' % pem_marker)) - -def load_pem(contents, pem_marker): - '''Loads a PEM file. - - @param contents: the contents of the file to interpret - @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' - when your file has '-----BEGIN RSA PRIVATE KEY-----' and - '-----END RSA PRIVATE KEY-----' markers. - - @return the base64-decoded content between the start and end markers. - - @raise ValueError: when the content is invalid, for example when the start - marker cannot be found. - - ''' - - (pem_start, pem_end) = _markers(pem_marker) - - pem_lines = [] - in_pem_part = False - - for line in contents.splitlines(): - line = line.strip() - - # Skip empty lines - if not line: - continue - - # Handle start marker - if line == pem_start: - if in_pem_part: - raise ValueError('Seen start marker "%s" twice' % pem_start) - - in_pem_part = True - continue - - # Skip stuff before first marker - if not in_pem_part: - continue - - # Handle end marker - if in_pem_part and line == pem_end: - in_pem_part = False - break - - # Load fields - if b(':') in line: - continue - - pem_lines.append(line) - - # Do some sanity checks - if not pem_lines: - raise ValueError('No PEM start marker "%s" found' % pem_start) - - if in_pem_part: - raise ValueError('No PEM end marker "%s" found' % pem_end) - - # Base64-decode the contents - pem = b('').join(pem_lines) - return base64.decodestring(pem) - - -def save_pem(contents, pem_marker): - '''Saves a PEM file. - - @param contents: the contents to encode in PEM format - @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' - when your file has '-----BEGIN RSA PRIVATE KEY-----' and - '-----END RSA PRIVATE KEY-----' markers. - - @return the base64-encoded content between the start and end markers. - - ''' - - (pem_start, pem_end) = _markers(pem_marker) - - b64 = base64.encodestring(contents).replace(b('\n'), b('')) - pem_lines = [pem_start] - - for block_start in range(0, len(b64), 64): - block = b64[block_start:block_start + 64] - pem_lines.append(block) - - pem_lines.append(pem_end) - pem_lines.append(b('')) - - return b('\n').join(pem_lines) - diff --git a/server/www/packages/packages-common/rsa/pkcs1.py b/server/www/packages/packages-common/rsa/pkcs1.py deleted file mode 100644 index 15e4cf6..0000000 --- a/server/www/packages/packages-common/rsa/pkcs1.py +++ /dev/null @@ -1,391 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Functions for PKCS#1 version 1.5 encryption and signing - -This module implements certain functionality from PKCS#1 version 1.5. For a -very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes - -At least 8 bytes of random padding is used when encrypting a message. This makes -these methods much more secure than the ones in the ``rsa`` module. - -WARNING: this module leaks information when decryption or verification fails. -The exceptions that are raised contain the Python traceback information, which -can be used to deduce where in the process the failure occurred. DO NOT PASS -SUCH INFORMATION to your users. -''' - -import hashlib -import os - -from rsa._compat import b -from rsa import common, transform, core, varblock - -# ASN.1 codes that describe the hash algorithm used. -HASH_ASN1 = { - 'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'), - 'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'), - 'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'), - 'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'), - 'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'), -} - -HASH_METHODS = { - 'MD5': hashlib.md5, - 'SHA-1': hashlib.sha1, - 'SHA-256': hashlib.sha256, - 'SHA-384': hashlib.sha384, - 'SHA-512': hashlib.sha512, -} - -class CryptoError(Exception): - '''Base class for all exceptions in this module.''' - -class DecryptionError(CryptoError): - '''Raised when decryption fails.''' - -class VerificationError(CryptoError): - '''Raised when verification fails.''' - -def _pad_for_encryption(message, target_length): - r'''Pads the message for encryption, returning the padded message. - - :return: 00 02 RANDOM_DATA 00 MESSAGE - - >>> block = _pad_for_encryption('hello', 16) - >>> len(block) - 16 - >>> block[0:2] - '\x00\x02' - >>> block[-6:] - '\x00hello' - - ''' - - max_msglength = target_length - 11 - msglength = len(message) - - if msglength > max_msglength: - raise OverflowError('%i bytes needed for message, but there is only' - ' space for %i' % (msglength, max_msglength)) - - # Get random padding - padding = b('') - padding_length = target_length - msglength - 3 - - # We remove 0-bytes, so we'll end up with less padding than we've asked for, - # so keep adding data until we're at the correct length. - while len(padding) < padding_length: - needed_bytes = padding_length - len(padding) - - # Always read at least 8 bytes more than we need, and trim off the rest - # after removing the 0-bytes. This increases the chance of getting - # enough bytes, especially when needed_bytes is small - new_padding = os.urandom(needed_bytes + 5) - new_padding = new_padding.replace(b('\x00'), b('')) - padding = padding + new_padding[:needed_bytes] - - assert len(padding) == padding_length - - return b('').join([b('\x00\x02'), - padding, - b('\x00'), - message]) - - -def _pad_for_signing(message, target_length): - r'''Pads the message for signing, returning the padded message. - - The padding is always a repetition of FF bytes. - - :return: 00 01 PADDING 00 MESSAGE - - >>> block = _pad_for_signing('hello', 16) - >>> len(block) - 16 - >>> block[0:2] - '\x00\x01' - >>> block[-6:] - '\x00hello' - >>> block[2:-6] - '\xff\xff\xff\xff\xff\xff\xff\xff' - - ''' - - max_msglength = target_length - 11 - msglength = len(message) - - if msglength > max_msglength: - raise OverflowError('%i bytes needed for message, but there is only' - ' space for %i' % (msglength, max_msglength)) - - padding_length = target_length - msglength - 3 - - return b('').join([b('\x00\x01'), - padding_length * b('\xff'), - b('\x00'), - message]) - - -def encrypt(message, pub_key): - '''Encrypts the given message using PKCS#1 v1.5 - - :param message: the message to encrypt. Must be a byte string no longer than - ``k-11`` bytes, where ``k`` is the number of bytes needed to encode - the ``n`` component of the public key. - :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. - :raise OverflowError: when the message is too large to fit in the padded - block. - - >>> from rsa import key, common - >>> (pub_key, priv_key) = key.newkeys(256) - >>> message = 'hello' - >>> crypto = encrypt(message, pub_key) - - The crypto text should be just as long as the public key 'n' component: - - >>> len(crypto) == common.byte_size(pub_key.n) - True - - ''' - - keylength = common.byte_size(pub_key.n) - padded = _pad_for_encryption(message, keylength) - - payload = transform.bytes2int(padded) - encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n) - block = transform.int2bytes(encrypted, keylength) - - return block - -def decrypt(crypto, priv_key): - r'''Decrypts the given message using PKCS#1 v1.5 - - The decryption is considered 'failed' when the resulting cleartext doesn't - start with the bytes 00 02, or when the 00 byte between the padding and - the message cannot be found. - - :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` - :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. - :raise DecryptionError: when the decryption fails. No details are given as - to why the code thinks the decryption fails, as this would leak - information about the private key. - - - >>> import rsa - >>> (pub_key, priv_key) = rsa.newkeys(256) - - It works with strings: - - >>> crypto = encrypt('hello', pub_key) - >>> decrypt(crypto, priv_key) - 'hello' - - And with binary data: - - >>> crypto = encrypt('\x00\x00\x00\x00\x01', pub_key) - >>> decrypt(crypto, priv_key) - '\x00\x00\x00\x00\x01' - - Altering the encrypted information will *likely* cause a - :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use - :py:func:`rsa.sign`. - - - .. warning:: - - Never display the stack trace of a - :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the - code the exception occurred, and thus leaks information about the key. - It's only a tiny bit of information, but every bit makes cracking the - keys easier. - - >>> crypto = encrypt('hello', pub_key) - >>> crypto = crypto[0:5] + 'X' + crypto[6:] # change a byte - >>> decrypt(crypto, priv_key) - Traceback (most recent call last): - ... - DecryptionError: Decryption failed - - ''' - - blocksize = common.byte_size(priv_key.n) - encrypted = transform.bytes2int(crypto) - decrypted = core.decrypt_int(encrypted, priv_key.d, priv_key.n) - cleartext = transform.int2bytes(decrypted, blocksize) - - # If we can't find the cleartext marker, decryption failed. - if cleartext[0:2] != b('\x00\x02'): - raise DecryptionError('Decryption failed') - - # Find the 00 separator between the padding and the message - try: - sep_idx = cleartext.index(b('\x00'), 2) - except ValueError: - raise DecryptionError('Decryption failed') - - return cleartext[sep_idx+1:] - -def sign(message, priv_key, hash): - '''Signs the message with the private key. - - Hashes the message, then signs the hash with the given key. This is known - as a "detached signature", because the message itself isn't altered. - - :param message: the message to sign. Can be an 8-bit string or a file-like - object. If ``message`` has a ``read()`` method, it is assumed to be a - file-like object. - :param priv_key: the :py:class:`rsa.PrivateKey` to sign with - :param hash: the hash method used on the message. Use 'MD5', 'SHA-1', - 'SHA-256', 'SHA-384' or 'SHA-512'. - :return: a message signature block. - :raise OverflowError: if the private key is too small to contain the - requested hash. - - ''' - - # Get the ASN1 code for this hash method - if hash not in HASH_ASN1: - raise ValueError('Invalid hash method: %s' % hash) - asn1code = HASH_ASN1[hash] - - # Calculate the hash - hash = _hash(message, hash) - - # Encrypt the hash with the private key - cleartext = asn1code + hash - keylength = common.byte_size(priv_key.n) - padded = _pad_for_signing(cleartext, keylength) - - payload = transform.bytes2int(padded) - encrypted = core.encrypt_int(payload, priv_key.d, priv_key.n) - block = transform.int2bytes(encrypted, keylength) - - return block - -def verify(message, signature, pub_key): - '''Verifies that the signature matches the message. - - The hash method is detected automatically from the signature. - - :param message: the signed message. Can be an 8-bit string or a file-like - object. If ``message`` has a ``read()`` method, it is assumed to be a - file-like object. - :param signature: the signature block, as created with :py:func:`rsa.sign`. - :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. - :raise VerificationError: when the signature doesn't match the message. - - .. warning:: - - Never display the stack trace of a - :py:class:`rsa.pkcs1.VerificationError` exception. It shows where in - the code the exception occurred, and thus leaks information about the - key. It's only a tiny bit of information, but every bit makes cracking - the keys easier. - - ''' - - blocksize = common.byte_size(pub_key.n) - encrypted = transform.bytes2int(signature) - decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) - clearsig = transform.int2bytes(decrypted, blocksize) - - # If we can't find the signature marker, verification failed. - if clearsig[0:2] != b('\x00\x01'): - raise VerificationError('Verification failed') - - # Find the 00 separator between the padding and the payload - try: - sep_idx = clearsig.index(b('\x00'), 2) - except ValueError: - raise VerificationError('Verification failed') - - # Get the hash and the hash method - (method_name, signature_hash) = _find_method_hash(clearsig[sep_idx+1:]) - message_hash = _hash(message, method_name) - - # Compare the real hash to the hash in the signature - if message_hash != signature_hash: - raise VerificationError('Verification failed') - - return True - -def _hash(message, method_name): - '''Returns the message digest. - - :param message: the signed message. Can be an 8-bit string or a file-like - object. If ``message`` has a ``read()`` method, it is assumed to be a - file-like object. - :param method_name: the hash method, must be a key of - :py:const:`HASH_METHODS`. - - ''' - - if method_name not in HASH_METHODS: - raise ValueError('Invalid hash method: %s' % method_name) - - method = HASH_METHODS[method_name] - hasher = method() - - if hasattr(message, 'read') and hasattr(message.read, '__call__'): - # read as 1K blocks - for block in varblock.yield_fixedblocks(message, 1024): - hasher.update(block) - else: - # hash the message object itself. - hasher.update(message) - - return hasher.digest() - - -def _find_method_hash(method_hash): - '''Finds the hash method and the hash itself. - - :param method_hash: ASN1 code for the hash method concatenated with the - hash itself. - - :return: tuple (method, hash) where ``method`` is the used hash method, and - ``hash`` is the hash itself. - - :raise VerificationFailed: when the hash method cannot be found - - ''' - - for (hashname, asn1code) in HASH_ASN1.items(): - if not method_hash.startswith(asn1code): - continue - - return (hashname, method_hash[len(asn1code):]) - - raise VerificationError('Verification failed') - - -__all__ = ['encrypt', 'decrypt', 'sign', 'verify', - 'DecryptionError', 'VerificationError', 'CryptoError'] - -if __name__ == '__main__': - print('Running doctests 1000x or until failure') - import doctest - - for count in range(1000): - (failures, tests) = doctest.testmod() - if failures: - break - - if count and count % 100 == 0: - print('%i times' % count) - - print('Doctests done') diff --git a/server/www/packages/packages-common/rsa/prime.py b/server/www/packages/packages-common/rsa/prime.py deleted file mode 100644 index 7422eb1..0000000 --- a/server/www/packages/packages-common/rsa/prime.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Numerical functions related to primes. - -Implementation based on the book Algorithm Design by Michael T. Goodrich and -Roberto Tamassia, 2002. -''' - -__all__ = [ 'getprime', 'are_relatively_prime'] - -import rsa.randnum - -def gcd(p, q): - '''Returns the greatest common divisor of p and q - - >>> gcd(48, 180) - 12 - ''' - - while q != 0: - if p < q: (p,q) = (q,p) - (p,q) = (q, p % q) - return p - - -def jacobi(a, b): - '''Calculates the value of the Jacobi symbol (a/b) where both a and b are - positive integers, and b is odd - - :returns: -1, 0 or 1 - ''' - - assert a > 0 - assert b > 0 - - if a == 0: return 0 - result = 1 - while a > 1: - if a & 1: - if ((a-1)*(b-1) >> 2) & 1: - result = -result - a, b = b % a, a - else: - if (((b * b) - 1) >> 3) & 1: - result = -result - a >>= 1 - if a == 0: return 0 - return result - -def jacobi_witness(x, n): - '''Returns False if n is an Euler pseudo-prime with base x, and - True otherwise. - ''' - - j = jacobi(x, n) % n - - f = pow(x, n >> 1, n) - - if j == f: return False - return True - -def randomized_primality_testing(n, k): - '''Calculates whether n is composite (which is always correct) or - prime (which is incorrect with error probability 2**-k) - - Returns False if the number is composite, and True if it's - probably prime. - ''' - - # 50% of Jacobi-witnesses can report compositness of non-prime numbers - - # The implemented algorithm using the Jacobi witness function has error - # probability q <= 0.5, according to Goodrich et. al - # - # q = 0.5 - # t = int(math.ceil(k / log(1 / q, 2))) - # So t = k / log(2, 2) = k / 1 = k - # this means we can use range(k) rather than range(t) - - for _ in range(k): - x = rsa.randnum.randint(n-1) - if jacobi_witness(x, n): return False - - return True - -def is_prime(number): - '''Returns True if the number is prime, and False otherwise. - - >>> is_prime(42) - False - >>> is_prime(41) - True - ''' - - return randomized_primality_testing(number, 6) - -def getprime(nbits): - '''Returns a prime number that can be stored in 'nbits' bits. - - >>> p = getprime(128) - >>> is_prime(p-1) - False - >>> is_prime(p) - True - >>> is_prime(p+1) - False - - >>> from rsa import common - >>> common.bit_size(p) == 128 - True - - ''' - - while True: - integer = rsa.randnum.read_random_int(nbits) - - # Make sure it's odd - integer |= 1 - - # Test for primeness - if is_prime(integer): - return integer - - # Retry if not prime - - -def are_relatively_prime(a, b): - '''Returns True if a and b are relatively prime, and False if they - are not. - - >>> are_relatively_prime(2, 3) - 1 - >>> are_relatively_prime(2, 4) - 0 - ''' - - d = gcd(a, b) - return (d == 1) - -if __name__ == '__main__': - print('Running doctests 1000x or until failure') - import doctest - - for count in range(1000): - (failures, tests) = doctest.testmod() - if failures: - break - - if count and count % 100 == 0: - print('%i times' % count) - - print('Doctests done') diff --git a/server/www/packages/packages-common/rsa/randnum.py b/server/www/packages/packages-common/rsa/randnum.py deleted file mode 100644 index 0e78274..0000000 --- a/server/www/packages/packages-common/rsa/randnum.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Functions for generating random numbers.''' - -# Source inspired by code by Yesudeep Mangalapilly - -import os - -from rsa import common, transform -from rsa._compat import byte - -def read_random_bits(nbits): - '''Reads 'nbits' random bits. - - If nbits isn't a whole number of bytes, an extra byte will be appended with - only the lower bits set. - ''' - - nbytes, rbits = divmod(nbits, 8) - - # Get the random bytes - randomdata = os.urandom(nbytes) - - # Add the remaining random bits - if rbits > 0: - randomvalue = ord(os.urandom(1)) - randomvalue >>= (8 - rbits) - randomdata = byte(randomvalue) + randomdata - - return randomdata - - -def read_random_int(nbits): - '''Reads a random integer of approximately nbits bits. - ''' - - randomdata = read_random_bits(nbits) - value = transform.bytes2int(randomdata) - - # Ensure that the number is large enough to just fill out the required - # number of bits. - value |= 1 << (nbits - 1) - - return value - -def randint(maxvalue): - '''Returns a random integer x with 1 <= x <= maxvalue - - May take a very long time in specific situations. If maxvalue needs N bits - to store, the closer maxvalue is to (2 ** N) - 1, the faster this function - is. - ''' - - bit_size = common.bit_size(maxvalue) - - tries = 0 - while True: - value = read_random_int(bit_size) - if value <= maxvalue: - break - - if tries and tries % 10 == 0: - # After a lot of tries to get the right number of bits but still - # smaller than maxvalue, decrease the number of bits by 1. That'll - # dramatically increase the chances to get a large enough number. - bit_size -= 1 - tries += 1 - - return value - - diff --git a/server/www/packages/packages-common/rsa/transform.py b/server/www/packages/packages-common/rsa/transform.py deleted file mode 100644 index c740b2d..0000000 --- a/server/www/packages/packages-common/rsa/transform.py +++ /dev/null @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Data transformation functions. - -From bytes to a number, number to bytes, etc. -''' - -from __future__ import absolute_import - -try: - # We'll use psyco if available on 32-bit architectures to speed up code. - # Using psyco (if available) cuts down the execution time on Python 2.5 - # at least by half. - import psyco - psyco.full() -except ImportError: - pass - -import binascii -from struct import pack -from rsa import common -from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE - - -def bytes2int(raw_bytes): - r'''Converts a list of bytes or an 8-bit string to an integer. - - When using unicode strings, encode it to some encoding like UTF8 first. - - >>> (((128 * 256) + 64) * 256) + 15 - 8405007 - >>> bytes2int('\x80@\x0f') - 8405007 - - ''' - - return int(binascii.hexlify(raw_bytes), 16) - - -def _int2bytes(number, block_size=None): - r'''Converts a number to a string of bytes. - - Usage:: - - >>> _int2bytes(123456789) - '\x07[\xcd\x15' - >>> bytes2int(_int2bytes(123456789)) - 123456789 - - >>> _int2bytes(123456789, 6) - '\x00\x00\x07[\xcd\x15' - >>> bytes2int(_int2bytes(123456789, 128)) - 123456789 - - >>> _int2bytes(123456789, 3) - Traceback (most recent call last): - ... - OverflowError: Needed 4 bytes for number, but block size is 3 - - @param number: the number to convert - @param block_size: the number of bytes to output. If the number encoded to - bytes is less than this, the block will be zero-padded. When not given, - the returned block is not padded. - - @throws OverflowError when block_size is given and the number takes up more - bytes than fit into the block. - ''' - # Type checking - if not is_integer(number): - raise TypeError("You must pass an integer for 'number', not %s" % - number.__class__) - - if number < 0: - raise ValueError('Negative numbers cannot be used: %i' % number) - - # Do some bounds checking - if number == 0: - needed_bytes = 1 - raw_bytes = [ZERO_BYTE] - else: - needed_bytes = common.byte_size(number) - raw_bytes = [] - - # You cannot compare None > 0 in Python 3x. It will fail with a TypeError. - if block_size and block_size > 0: - if needed_bytes > block_size: - raise OverflowError('Needed %i bytes for number, but block size ' - 'is %i' % (needed_bytes, block_size)) - - # Convert the number to bytes. - while number > 0: - raw_bytes.insert(0, byte(number & 0xFF)) - number >>= 8 - - # Pad with zeroes to fill the block - if block_size and block_size > 0: - padding = (block_size - needed_bytes) * ZERO_BYTE - else: - padding = EMPTY_BYTE - - return padding + EMPTY_BYTE.join(raw_bytes) - - -def bytes_leading(raw_bytes, needle=ZERO_BYTE): - ''' - Finds the number of prefixed byte occurrences in the haystack. - - Useful when you want to deal with padding. - - :param raw_bytes: - Raw bytes. - :param needle: - The byte to count. Default \000. - :returns: - The number of leading needle bytes. - ''' - leading = 0 - # Indexing keeps compatibility between Python 2.x and Python 3.x - _byte = needle[0] - for x in raw_bytes: - if x == _byte: - leading += 1 - else: - break - return leading - - -def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): - ''' - Convert an unsigned integer to bytes (base-256 representation):: - - Does not preserve leading zeros if you don't specify a chunk size or - fill size. - - .. NOTE: - You must not specify both fill_size and chunk_size. Only one - of them is allowed. - - :param number: - Integer value - :param fill_size: - If the optional fill size is given the length of the resulting - byte string is expected to be the fill size and will be padded - with prefix zero bytes to satisfy that length. - :param chunk_size: - If optional chunk size is given and greater than zero, pad the front of - the byte string with binary zeros so that the length is a multiple of - ``chunk_size``. - :param overflow: - ``False`` (default). If this is ``True``, no ``OverflowError`` - will be raised when the fill_size is shorter than the length - of the generated byte sequence. Instead the byte sequence will - be returned as is. - :returns: - Raw bytes (base-256 representation). - :raises: - ``OverflowError`` when fill_size is given and the number takes up more - bytes than fit into the block. This requires the ``overflow`` - argument to this function to be set to ``False`` otherwise, no - error will be raised. - ''' - if number < 0: - raise ValueError("Number must be an unsigned integer: %d" % number) - - if fill_size and chunk_size: - raise ValueError("You can either fill or pad chunks, but not both") - - # Ensure these are integers. - number & 1 - - raw_bytes = b('') - - # Pack the integer one machine word at a time into bytes. - num = number - word_bits, _, max_uint, pack_type = get_word_alignment(num) - pack_format = ">%s" % pack_type - while num > 0: - raw_bytes = pack(pack_format, num & max_uint) + raw_bytes - num >>= word_bits - # Obtain the index of the first non-zero byte. - zero_leading = bytes_leading(raw_bytes) - if number == 0: - raw_bytes = ZERO_BYTE - # De-padding. - raw_bytes = raw_bytes[zero_leading:] - - length = len(raw_bytes) - if fill_size and fill_size > 0: - if not overflow and length > fill_size: - raise OverflowError( - "Need %d bytes for number, but fill size is %d" % - (length, fill_size) - ) - raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE) - elif chunk_size and chunk_size > 0: - remainder = length % chunk_size - if remainder: - padding_size = chunk_size - remainder - raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE) - return raw_bytes - - -if __name__ == '__main__': - import doctest - doctest.testmod() - diff --git a/server/www/packages/packages-common/rsa/util.py b/server/www/packages/packages-common/rsa/util.py deleted file mode 100644 index 5bbb70b..0000000 --- a/server/www/packages/packages-common/rsa/util.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''Utility functions.''' - -from __future__ import with_statement, print_function - -import sys -from optparse import OptionParser - -import rsa.key - -def private_to_public(): - '''Reads a private key and outputs the corresponding public key.''' - - # Parse the CLI options - parser = OptionParser(usage='usage: %prog [options]', - description='Reads a private key and outputs the ' - 'corresponding public key. Both private and public keys use ' - 'the format described in PKCS#1 v1.5') - - parser.add_option('-i', '--input', dest='infilename', type='string', - help='Input filename. Reads from stdin if not specified') - parser.add_option('-o', '--output', dest='outfilename', type='string', - help='Output filename. Writes to stdout of not specified') - - parser.add_option('--inform', dest='inform', - help='key format of input - default PEM', - choices=('PEM', 'DER'), default='PEM') - - parser.add_option('--outform', dest='outform', - help='key format of output - default PEM', - choices=('PEM', 'DER'), default='PEM') - - (cli, cli_args) = parser.parse_args(sys.argv) - - # Read the input data - if cli.infilename: - print('Reading private key from %s in %s format' % \ - (cli.infilename, cli.inform), file=sys.stderr) - with open(cli.infilename, 'rb') as infile: - in_data = infile.read() - else: - print('Reading private key from stdin in %s format' % cli.inform, - file=sys.stderr) - in_data = sys.stdin.read().encode('ascii') - - assert type(in_data) == bytes, type(in_data) - - - # Take the public fields and create a public key - priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform) - pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e) - - # Save to the output file - out_data = pub_key.save_pkcs1(cli.outform) - - if cli.outfilename: - print('Writing public key to %s in %s format' % \ - (cli.outfilename, cli.outform), file=sys.stderr) - with open(cli.outfilename, 'wb') as outfile: - outfile.write(out_data) - else: - print('Writing public key to stdout in %s format' % cli.outform, - file=sys.stderr) - sys.stdout.write(out_data.decode('ascii')) - - diff --git a/server/www/packages/packages-common/rsa/varblock.py b/server/www/packages/packages-common/rsa/varblock.py deleted file mode 100644 index c7d96ae..0000000 --- a/server/www/packages/packages-common/rsa/varblock.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2011 Sybren A. Stüvel -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -'''VARBLOCK file support - -The VARBLOCK file format is as follows, where || denotes byte concatenation: - - FILE := VERSION || BLOCK || BLOCK ... - - BLOCK := LENGTH || DATA - - LENGTH := varint-encoded length of the subsequent data. Varint comes from - Google Protobuf, and encodes an integer into a variable number of bytes. - Each byte uses the 7 lowest bits to encode the value. The highest bit set - to 1 indicates the next byte is also part of the varint. The last byte will - have this bit set to 0. - -This file format is called the VARBLOCK format, in line with the varint format -used to denote the block sizes. - -''' - -from rsa._compat import byte, b - - -ZERO_BYTE = b('\x00') -VARBLOCK_VERSION = 1 - -def read_varint(infile): - '''Reads a varint from the file. - - When the first byte to be read indicates EOF, (0, 0) is returned. When an - EOF occurs when at least one byte has been read, an EOFError exception is - raised. - - @param infile: the file-like object to read from. It should have a read() - method. - @returns (varint, length), the read varint and the number of read bytes. - ''' - - varint = 0 - read_bytes = 0 - - while True: - char = infile.read(1) - if len(char) == 0: - if read_bytes == 0: - return (0, 0) - raise EOFError('EOF while reading varint, value is %i so far' % - varint) - - byte = ord(char) - varint += (byte & 0x7F) << (7 * read_bytes) - - read_bytes += 1 - - if not byte & 0x80: - return (varint, read_bytes) - - -def write_varint(outfile, value): - '''Writes a varint to a file. - - @param outfile: the file-like object to write to. It should have a write() - method. - @returns the number of written bytes. - ''' - - # there is a big difference between 'write the value 0' (this case) and - # 'there is nothing left to write' (the false-case of the while loop) - - if value == 0: - outfile.write(ZERO_BYTE) - return 1 - - written_bytes = 0 - while value > 0: - to_write = value & 0x7f - value = value >> 7 - - if value > 0: - to_write |= 0x80 - - outfile.write(byte(to_write)) - written_bytes += 1 - - return written_bytes - - -def yield_varblocks(infile): - '''Generator, yields each block in the input file. - - @param infile: file to read, is expected to have the VARBLOCK format as - described in the module's docstring. - @yields the contents of each block. - ''' - - # Check the version number - first_char = infile.read(1) - if len(first_char) == 0: - raise EOFError('Unable to read VARBLOCK version number') - - version = ord(first_char) - if version != VARBLOCK_VERSION: - raise ValueError('VARBLOCK version %i not supported' % version) - - while True: - (block_size, read_bytes) = read_varint(infile) - - # EOF at block boundary, that's fine. - if read_bytes == 0 and block_size == 0: - break - - block = infile.read(block_size) - - read_size = len(block) - if read_size != block_size: - raise EOFError('Block size is %i, but could read only %i bytes' % - (block_size, read_size)) - - yield block - - -def yield_fixedblocks(infile, blocksize): - '''Generator, yields each block of ``blocksize`` bytes in the input file. - - :param infile: file to read and separate in blocks. - :returns: a generator that yields the contents of each block - ''' - - while True: - block = infile.read(blocksize) - - read_bytes = len(block) - if read_bytes == 0: - break - - yield block - - if read_bytes < blocksize: - break - diff --git a/server/www/packages/packages-common/tornado/__init__.py b/server/www/packages/packages-common/tornado/__init__.py index 85bacc7..f054e40 100644 --- a/server/www/packages/packages-common/tornado/__init__.py +++ b/server/www/packages/packages-common/tornado/__init__.py @@ -16,7 +16,7 @@ """The Tornado web server and tools.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function # version is a human-readable version number. @@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement # is zero for an official release, positive for a development branch, # or negative for a release candidate or beta (after the base version # number has been incremented) -version = "4.3" -version_info = (4, 3, 0, 0) +version = "4.5.1" +version_info = (4, 5, 1, 0) diff --git a/server/www/packages/packages-common/tornado/_locale_data.py b/server/www/packages/packages-common/tornado/_locale_data.py index 47c1df6..6fa2c29 100644 --- a/server/www/packages/packages-common/tornado/_locale_data.py +++ b/server/www/packages/packages-common/tornado/_locale_data.py @@ -17,78 +17,69 @@ """Data used by the tornado.locale module.""" -from __future__ import absolute_import, division, print_function, with_statement - -# NOTE: This file is supposed to contain unicode strings, which is -# exactly what you'd get with e.g. u"Español" in most python versions. -# However, Python 3.2 doesn't support the u"" syntax, so we use a u() -# function instead. tornado.util.u cannot be used because it doesn't -# support non-ascii characters on python 2. -# When we drop support for Python 3.2, we can remove the parens -# and make these plain unicode strings. -from tornado.escape import to_unicode as u +from __future__ import absolute_import, division, print_function LOCALE_NAMES = { - "af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")}, - "am_ET": {"name_en": u("Amharic"), "name": u("አማርኛ")}, - "ar_AR": {"name_en": u("Arabic"), "name": u("العربية")}, - "bg_BG": {"name_en": u("Bulgarian"), "name": u("Български")}, - "bn_IN": {"name_en": u("Bengali"), "name": u("বাংলা")}, - "bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")}, - "ca_ES": {"name_en": u("Catalan"), "name": u("Català")}, - "cs_CZ": {"name_en": u("Czech"), "name": u("Čeština")}, - "cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")}, - "da_DK": {"name_en": u("Danish"), "name": u("Dansk")}, - "de_DE": {"name_en": u("German"), "name": u("Deutsch")}, - "el_GR": {"name_en": u("Greek"), "name": u("Ελληνικά")}, - "en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")}, - "en_US": {"name_en": u("English (US)"), "name": u("English (US)")}, - "es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Español (España)")}, - "es_LA": {"name_en": u("Spanish"), "name": u("Español")}, - "et_EE": {"name_en": u("Estonian"), "name": u("Eesti")}, - "eu_ES": {"name_en": u("Basque"), "name": u("Euskara")}, - "fa_IR": {"name_en": u("Persian"), "name": u("فارسی")}, - "fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")}, - "fr_CA": {"name_en": u("French (Canada)"), "name": u("Français (Canada)")}, - "fr_FR": {"name_en": u("French"), "name": u("Français")}, - "ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")}, - "gl_ES": {"name_en": u("Galician"), "name": u("Galego")}, - "he_IL": {"name_en": u("Hebrew"), "name": u("עברית")}, - "hi_IN": {"name_en": u("Hindi"), "name": u("हिन्दी")}, - "hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")}, - "hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")}, - "id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")}, - "is_IS": {"name_en": u("Icelandic"), "name": u("Íslenska")}, - "it_IT": {"name_en": u("Italian"), "name": u("Italiano")}, - "ja_JP": {"name_en": u("Japanese"), "name": u("日本語")}, - "ko_KR": {"name_en": u("Korean"), "name": u("한국어")}, - "lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvių")}, - "lv_LV": {"name_en": u("Latvian"), "name": u("Latviešu")}, - "mk_MK": {"name_en": u("Macedonian"), "name": u("Македонски")}, - "ml_IN": {"name_en": u("Malayalam"), "name": u("മലയാളം")}, - "ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")}, - "nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokmål)")}, - "nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")}, - "nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")}, - "pa_IN": {"name_en": u("Punjabi"), "name": u("ਪੰਜਾਬੀ")}, - "pl_PL": {"name_en": u("Polish"), "name": u("Polski")}, - "pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Português (Brasil)")}, - "pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Português (Portugal)")}, - "ro_RO": {"name_en": u("Romanian"), "name": u("Română")}, - "ru_RU": {"name_en": u("Russian"), "name": u("Русский")}, - "sk_SK": {"name_en": u("Slovak"), "name": u("Slovenčina")}, - "sl_SI": {"name_en": u("Slovenian"), "name": u("Slovenščina")}, - "sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")}, - "sr_RS": {"name_en": u("Serbian"), "name": u("Српски")}, - "sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")}, - "sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")}, - "ta_IN": {"name_en": u("Tamil"), "name": u("தமிழ்")}, - "te_IN": {"name_en": u("Telugu"), "name": u("తెలుగు")}, - "th_TH": {"name_en": u("Thai"), "name": u("ภาษาไทย")}, - "tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")}, - "tr_TR": {"name_en": u("Turkish"), "name": u("Türkçe")}, - "uk_UA": {"name_en": u("Ukraini "), "name": u("Українська")}, - "vi_VN": {"name_en": u("Vietnamese"), "name": u("Tiếng Việt")}, - "zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("中文(简体)")}, - "zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("中文(繁體)")}, + "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"}, + "am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"}, + "ar_AR": {"name_en": u"Arabic", "name": u"العربية"}, + "bg_BG": {"name_en": u"Bulgarian", "name": u"Български"}, + "bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"}, + "bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"}, + "ca_ES": {"name_en": u"Catalan", "name": u"Català"}, + "cs_CZ": {"name_en": u"Czech", "name": u"Čeština"}, + "cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"}, + "da_DK": {"name_en": u"Danish", "name": u"Dansk"}, + "de_DE": {"name_en": u"German", "name": u"Deutsch"}, + "el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"}, + "en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"}, + "en_US": {"name_en": u"English (US)", "name": u"English (US)"}, + "es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"}, + "es_LA": {"name_en": u"Spanish", "name": u"Español"}, + "et_EE": {"name_en": u"Estonian", "name": u"Eesti"}, + "eu_ES": {"name_en": u"Basque", "name": u"Euskara"}, + "fa_IR": {"name_en": u"Persian", "name": u"فارسی"}, + "fi_FI": {"name_en": u"Finnish", "name": u"Suomi"}, + "fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"}, + "fr_FR": {"name_en": u"French", "name": u"Français"}, + "ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"}, + "gl_ES": {"name_en": u"Galician", "name": u"Galego"}, + "he_IL": {"name_en": u"Hebrew", "name": u"עברית"}, + "hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"}, + "hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"}, + "hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"}, + "id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"}, + "is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"}, + "it_IT": {"name_en": u"Italian", "name": u"Italiano"}, + "ja_JP": {"name_en": u"Japanese", "name": u"日本語"}, + "ko_KR": {"name_en": u"Korean", "name": u"한국어"}, + "lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"}, + "lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"}, + "mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"}, + "ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"}, + "ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"}, + "nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"}, + "nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"}, + "nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"}, + "pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"}, + "pl_PL": {"name_en": u"Polish", "name": u"Polski"}, + "pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"}, + "pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"}, + "ro_RO": {"name_en": u"Romanian", "name": u"Română"}, + "ru_RU": {"name_en": u"Russian", "name": u"Русский"}, + "sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"}, + "sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"}, + "sq_AL": {"name_en": u"Albanian", "name": u"Shqip"}, + "sr_RS": {"name_en": u"Serbian", "name": u"Српски"}, + "sv_SE": {"name_en": u"Swedish", "name": u"Svenska"}, + "sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"}, + "ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"}, + "te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"}, + "th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"}, + "tl_PH": {"name_en": u"Filipino", "name": u"Filipino"}, + "tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"}, + "uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"}, + "vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"}, + "zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"}, + "zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"}, } diff --git a/server/www/packages/packages-common/tornado/auth.py b/server/www/packages/packages-common/tornado/auth.py index ff7172a..f02d289 100644 --- a/server/www/packages/packages-common/tornado/auth.py +++ b/server/www/packages/packages-common/tornado/auth.py @@ -65,7 +65,7 @@ Example usage for Google OAuth: errors are more consistently reported through the ``Future`` interfaces. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import base64 import binascii @@ -82,22 +82,15 @@ from tornado import escape from tornado.httputil import url_concat from tornado.log import gen_log from tornado.stack_context import ExceptionStackContext -from tornado.util import u, unicode_type, ArgReplacer +from tornado.util import unicode_type, ArgReplacer, PY3 -try: - import urlparse # py2 -except ImportError: - import urllib.parse as urlparse # py3 - -try: - import urllib.parse as urllib_parse # py3 -except ImportError: - import urllib as urllib_parse # py2 - -try: - long # py2 -except NameError: - long = int # py3 +if PY3: + import urllib.parse as urlparse + import urllib.parse as urllib_parse + long = int +else: + import urlparse + import urllib as urllib_parse class AuthError(Exception): @@ -188,7 +181,7 @@ class OpenIdMixin(object): """ # Verify the OpenID response via direct request to the OP args = dict((k, v[-1]) for k, v in self.request.arguments.items()) - args["openid.mode"] = u("check_authentication") + args["openid.mode"] = u"check_authentication" url = self._OPENID_ENDPOINT if http_client is None: http_client = self.get_auth_http_client() @@ -255,13 +248,13 @@ class OpenIdMixin(object): ax_ns = None for name in self.request.arguments: if name.startswith("openid.ns.") and \ - self.get_argument(name) == u("http://openid.net/srv/ax/1.0"): + self.get_argument(name) == u"http://openid.net/srv/ax/1.0": ax_ns = name[10:] break def get_ax_arg(uri): if not ax_ns: - return u("") + return u"" prefix = "openid." + ax_ns + ".type." ax_name = None for name in self.request.arguments.keys(): @@ -270,8 +263,8 @@ class OpenIdMixin(object): ax_name = "openid." + ax_ns + ".value." + part break if not ax_name: - return u("") - return self.get_argument(ax_name, u("")) + return u"" + return self.get_argument(ax_name, u"") email = get_ax_arg("http://axschema.org/contact/email") name = get_ax_arg("http://axschema.org/namePerson") @@ -290,7 +283,7 @@ class OpenIdMixin(object): if name: user["name"] = name elif name_parts: - user["name"] = u(" ").join(name_parts) + user["name"] = u" ".join(name_parts) elif email: user["name"] = email.split("@")[0] if email: @@ -961,6 +954,20 @@ class FacebookGraphMixin(OAuth2Mixin): .. testoutput:: :hide: + This method returns a dictionary which may contain the following fields: + + * ``access_token``, a string which may be passed to `facebook_request` + * ``session_expires``, an integer encoded as a string representing + the time until the access token expires in seconds. This field should + be used like ``int(user['session_expires'])``; in a future version of + Tornado it will change from a string to an integer. + * ``id``, ``name``, ``first_name``, ``last_name``, ``locale``, ``picture``, + ``link``, plus any fields named in the ``extra_fields`` argument. These + fields are copied from the Facebook graph API `user object `_ + + .. versionchanged:: 4.5 + The ``session_expires`` field was updated to support changes made to the + Facebook API in March 2017. """ http = self.get_auth_http_client() args = { @@ -985,10 +992,10 @@ class FacebookGraphMixin(OAuth2Mixin): future.set_exception(AuthError('Facebook auth error: %s' % str(response))) return - args = urlparse.parse_qs(escape.native_str(response.body)) + args = escape.json_decode(response.body) session = { - "access_token": args["access_token"][-1], - "expires": args.get("expires") + "access_token": args.get("access_token"), + "expires_in": args.get("expires_in") } self.facebook_request( @@ -996,6 +1003,9 @@ class FacebookGraphMixin(OAuth2Mixin): callback=functools.partial( self._on_get_user_info, future, session, fields), access_token=session["access_token"], + appsecret_proof=hmac.new(key=client_secret.encode('utf8'), + msg=session["access_token"].encode('utf8'), + digestmod=hashlib.sha256).hexdigest(), fields=",".join(fields) ) @@ -1008,7 +1018,12 @@ class FacebookGraphMixin(OAuth2Mixin): for field in fields: fieldmap[field] = user.get(field) - fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")}) + # session_expires is converted to str for compatibility with + # older versions in which the server used url-encoding and + # this code simply returned the string verbatim. + # This should change in Tornado 5.0. + fieldmap.update({"access_token": session["access_token"], + "session_expires": str(session.get("expires_in"))}) future.set_result(fieldmap) @_auth_return_future diff --git a/server/www/packages/packages-common/tornado/autoreload.py b/server/www/packages/packages-common/tornado/autoreload.py index 1cbf26c..60571ef 100644 --- a/server/www/packages/packages-common/tornado/autoreload.py +++ b/server/www/packages/packages-common/tornado/autoreload.py @@ -45,7 +45,7 @@ incorrectly. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import os import sys @@ -83,7 +83,7 @@ if __name__ == "__main__": import functools import logging import os -import pkgutil +import pkgutil # type: ignore import sys import traceback import types @@ -103,16 +103,12 @@ except ImportError: # os.execv is broken on Windows and can't properly parse command line # arguments and executable name if they contain whitespaces. subprocess # fixes that behavior. -# This distinction is also important because when we use execv, we want to -# close the IOLoop and all its file descriptors, to guard against any -# file descriptors that were not set CLOEXEC. When execv is not available, -# we must not close the IOLoop because we want the process to exit cleanly. _has_execv = sys.platform != 'win32' _watched_files = set() _reload_hooks = [] _reload_attempted = False -_io_loops = weakref.WeakKeyDictionary() +_io_loops = weakref.WeakKeyDictionary() # type: ignore def start(io_loop=None, check_time=500): @@ -127,8 +123,6 @@ def start(io_loop=None, check_time=500): _io_loops[io_loop] = True if len(_io_loops) > 1: gen_log.warning("tornado.autoreload started more than once in the same process") - if _has_execv: - add_reload_hook(functools.partial(io_loop.close, all_fds=True)) modify_times = {} callback = functools.partial(_reload_on_update, modify_times) scheduler = ioloop.PeriodicCallback(callback, check_time, io_loop=io_loop) @@ -249,6 +243,7 @@ def _reload(): # unwind, so just exit uncleanly. os._exit(0) + _USAGE = """\ Usage: python -m tornado.autoreload -m module.to.run [args...] diff --git a/server/www/packages/packages-common/tornado/concurrent.py b/server/www/packages/packages-common/tornado/concurrent.py index 5f8cdc4..667e6b1 100644 --- a/server/www/packages/packages-common/tornado/concurrent.py +++ b/server/www/packages/packages-common/tornado/concurrent.py @@ -21,7 +21,7 @@ a mostly-compatible `Future` class designed for use from coroutines, as well as some utility functions for interacting with the `concurrent.futures` package. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import functools import platform @@ -31,13 +31,18 @@ import sys from tornado.log import app_log from tornado.stack_context import ExceptionStackContext, wrap -from tornado.util import raise_exc_info, ArgReplacer +from tornado.util import raise_exc_info, ArgReplacer, is_finalizing try: from concurrent import futures except ImportError: futures = None +try: + import typing +except ImportError: + typing = None + # Can the garbage collector handle cycles that include __del__ methods? # This is true in cpython beginning with version 3.4 (PEP 442). @@ -118,8 +123,8 @@ class _TracebackLogger(object): self.exc_info = None self.formatted_tb = None - def __del__(self): - if self.formatted_tb: + def __del__(self, is_finalizing=is_finalizing): + if not is_finalizing() and self.formatted_tb: app_log.error('Future exception was never retrieved: %s', ''.join(self.formatted_tb).rstrip()) @@ -229,7 +234,10 @@ class Future(object): if self._result is not None: return self._result if self._exc_info is not None: - raise_exc_info(self._exc_info) + try: + raise_exc_info(self._exc_info) + finally: + self = None self._check_done() return self._result @@ -324,8 +332,8 @@ class Future(object): # cycle are never destroyed. It's no longer the case on Python 3.4 thanks to # the PEP 442. if _GC_CYCLE_FINALIZERS: - def __del__(self): - if not self._log_traceback: + def __del__(self, is_finalizing=is_finalizing): + if is_finalizing() or not self._log_traceback: # set_exception() was not called, or result() or exception() # has consumed the exception return @@ -335,10 +343,11 @@ class Future(object): app_log.error('Future %r exception was never retrieved: %s', self, ''.join(tb).rstrip()) + TracebackFuture = Future if futures is None: - FUTURES = Future + FUTURES = Future # type: typing.Union[type, typing.Tuple[type, ...]] else: FUTURES = (futures.Future, Future) @@ -359,6 +368,7 @@ class DummyExecutor(object): def shutdown(self, wait=True): pass + dummy_executor = DummyExecutor() @@ -500,8 +510,9 @@ def chain_future(a, b): assert future is a if b.done(): return - if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture) - and a.exc_info() is not None): + if (isinstance(a, TracebackFuture) and + isinstance(b, TracebackFuture) and + a.exc_info() is not None): b.set_exc_info(a.exc_info()) elif a.exception() is not None: b.set_exception(a.exception()) diff --git a/server/www/packages/packages-common/tornado/curl_httpclient.py b/server/www/packages/packages-common/tornado/curl_httpclient.py index 22f2502..eef4a17 100644 --- a/server/www/packages/packages-common/tornado/curl_httpclient.py +++ b/server/www/packages/packages-common/tornado/curl_httpclient.py @@ -16,12 +16,12 @@ """Non-blocking HTTP client implementation using pycurl.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import collections import functools import logging -import pycurl +import pycurl # type: ignore import threading import time from io import BytesIO @@ -221,6 +221,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): # _process_queue() is called from # _finish_pending_requests the exceptions have # nowhere to go. + self._free_list.append(curl) callback(HTTPResponse( request=request, code=599, @@ -277,6 +278,9 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): if curl_log.isEnabledFor(logging.DEBUG): curl.setopt(pycurl.VERBOSE, 1) curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug) + if hasattr(pycurl, 'PROTOCOLS'): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12) + curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS) + curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS) return curl def _curl_setup_request(self, curl, request, buffer, headers): @@ -341,6 +345,15 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): credentials = '%s:%s' % (request.proxy_username, request.proxy_password) curl.setopt(pycurl.PROXYUSERPWD, credentials) + + if (request.proxy_auth_mode is None or + request.proxy_auth_mode == "basic"): + curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC) + elif request.proxy_auth_mode == "digest": + curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST) + else: + raise ValueError( + "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode) else: curl.setopt(pycurl.PROXY, '') curl.unsetopt(pycurl.PROXYUSERPWD) @@ -461,7 +474,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): request.prepare_curl_callback(curl) def _curl_header_callback(self, headers, header_callback, header_line): - header_line = native_str(header_line) + header_line = native_str(header_line.decode('latin1')) if header_callback is not None: self.io_loop.add_callback(header_callback, header_line) # header_line as returned by curl includes the end-of-line characters. diff --git a/server/www/packages/packages-common/tornado/escape.py b/server/www/packages/packages-common/tornado/escape.py index 2f04b46..2ca3fe3 100644 --- a/server/www/packages/packages-common/tornado/escape.py +++ b/server/www/packages/packages-common/tornado/escape.py @@ -20,34 +20,28 @@ Also includes a few other miscellaneous string manipulation functions that have crept in over time. """ -from __future__ import absolute_import, division, print_function, with_statement - -import re -import sys - -from tornado.util import unicode_type, basestring_type, u - -try: - from urllib.parse import parse_qs as _parse_qs # py3 -except ImportError: - from urlparse import parse_qs as _parse_qs # Python 2.6+ - -try: - import htmlentitydefs # py2 -except ImportError: - import html.entities as htmlentitydefs # py3 - -try: - import urllib.parse as urllib_parse # py3 -except ImportError: - import urllib as urllib_parse # py2 +from __future__ import absolute_import, division, print_function import json +import re + +from tornado.util import PY3, unicode_type, basestring_type + +if PY3: + from urllib.parse import parse_qs as _parse_qs + import html.entities as htmlentitydefs + import urllib.parse as urllib_parse + unichr = chr +else: + from urlparse import parse_qs as _parse_qs + import htmlentitydefs + import urllib as urllib_parse try: - unichr -except NameError: - unichr = chr + import typing # noqa +except ImportError: + pass + _XHTML_ESCAPE_RE = re.compile('[&<>"\']') _XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"', @@ -116,7 +110,7 @@ def url_escape(value, plus=True): # python 3 changed things around enough that we need two separate # implementations of url_unescape. We also need our own implementation # of parse_qs since python 3's version insists on decoding everything. -if sys.version_info[0] < 3: +if not PY3: def url_unescape(value, encoding='utf-8', plus=True): """Decodes the given value from a URL. @@ -191,6 +185,7 @@ _UTF8_TYPES = (bytes, type(None)) def utf8(value): + # type: (typing.Union[bytes,unicode_type,None])->typing.Union[bytes,None] """Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. @@ -204,6 +199,7 @@ def utf8(value): ) return value.encode("utf-8") + _TO_UNICODE_TYPES = (unicode_type, type(None)) @@ -221,6 +217,7 @@ def to_unicode(value): ) return value.decode("utf-8") + # to_unicode was previously named _unicode not because it was private, # but to avoid conflicts with the built-in unicode() function/type _unicode = to_unicode @@ -269,6 +266,7 @@ def recursive_unicode(obj): else: return obj + # I originally used the regex from # http://daringfireball.net/2010/07/improved_regex_for_matching_urls # but it gets all exponential on certain patterns (such as too many trailing @@ -366,7 +364,7 @@ def linkify(text, shorten=False, extra_params="", # have a status bar, such as Safari by default) params += ' title="%s"' % href - return u('%s') % (href, params, url) + return u'%s' % (href, params, url) # First HTML-escape so that our strings are all safe. # The regex is modified to avoid character entites other than & so @@ -396,4 +394,5 @@ def _build_unicode_map(): unicode_map[name] = unichr(value) return unicode_map + _HTML_UNICODE_MAP = _build_unicode_map() diff --git a/server/www/packages/packages-common/tornado/gen.py b/server/www/packages/packages-common/tornado/gen.py index bf184e5..99f9106 100644 --- a/server/www/packages/packages-common/tornado/gen.py +++ b/server/www/packages/packages-common/tornado/gen.py @@ -74,7 +74,7 @@ See the `convert_yielded` function to extend this mechanism. via ``singledispatch``. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import collections import functools @@ -83,16 +83,18 @@ import os import sys import textwrap import types +import weakref from tornado.concurrent import Future, TracebackFuture, is_future, chain_future from tornado.ioloop import IOLoop from tornado.log import app_log from tornado import stack_context -from tornado.util import raise_exc_info +from tornado.util import PY3, raise_exc_info try: try: - from functools import singledispatch # py34+ + # py34+ + from functools import singledispatch # type: ignore except ImportError: from singledispatch import singledispatch # backport except ImportError: @@ -108,12 +110,14 @@ except ImportError: try: try: - from collections.abc import Generator as GeneratorType # py35+ + # py35+ + from collections.abc import Generator as GeneratorType # type: ignore except ImportError: - from backports_abc import Generator as GeneratorType + from backports_abc import Generator as GeneratorType # type: ignore try: - from inspect import isawaitable # py35+ + # py35+ + from inspect import isawaitable # type: ignore except ImportError: from backports_abc import isawaitable except ImportError: @@ -121,12 +125,12 @@ except ImportError: raise from types import GeneratorType - def isawaitable(x): + def isawaitable(x): # type: ignore return False -try: - import builtins # py3 -except ImportError: +if PY3: + import builtins +else: import __builtin__ as builtins @@ -242,6 +246,26 @@ def coroutine(func, replace_callback=True): return _make_coroutine_wrapper(func, replace_callback=True) +# Ties lifetime of runners to their result futures. Github Issue #1769 +# Generators, like any object in Python, must be strong referenced +# in order to not be cleaned up by the garbage collector. When using +# coroutines, the Runner object is what strong-refs the inner +# generator. However, the only item that strong-reffed the Runner +# was the last Future that the inner generator yielded (via the +# Future's internal done_callback list). Usually this is enough, but +# it is also possible for this Future to not have any strong references +# other than other objects referenced by the Runner object (usually +# when using other callback patterns and/or weakrefs). In this +# situation, if a garbage collection ran, a cycle would be detected and +# Runner objects could be destroyed along with their inner generators +# and everything in their local scope. +# This map provides strong references to Runner objects as long as +# their result future objects also have strong references (typically +# from the parent coroutine's Runner). This keeps the coroutine's +# Runner alive. +_futures_to_runners = weakref.WeakKeyDictionary() + + def _make_coroutine_wrapper(func, replace_callback): """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. @@ -251,10 +275,11 @@ def _make_coroutine_wrapper(func, replace_callback): """ # On Python 3.5, set the coroutine flag on our generator, to allow it # to be used with 'await'. + wrapped = func if hasattr(types, 'coroutine'): func = types.coroutine(func) - @functools.wraps(func) + @functools.wraps(wrapped) def wrapper(*args, **kwargs): future = TracebackFuture() @@ -291,7 +316,8 @@ def _make_coroutine_wrapper(func, replace_callback): except Exception: future.set_exc_info(sys.exc_info()) else: - Runner(result, future, yielded) + _futures_to_runners[future] = Runner(result, future, yielded) + yielded = None try: return future finally: @@ -306,9 +332,21 @@ def _make_coroutine_wrapper(func, replace_callback): future = None future.set_result(result) return future + + wrapper.__wrapped__ = wrapped + wrapper.__tornado_coroutine__ = True return wrapper +def is_coroutine_function(func): + """Return whether *func* is a coroutine function, i.e. a function + wrapped with `~.gen.coroutine`. + + .. versionadded:: 4.5 + """ + return getattr(func, '__tornado_coroutine__', False) + + class Return(Exception): """Special exception to return a value from a `coroutine`. @@ -682,6 +720,7 @@ def multi(children, quiet_exceptions=()): else: return multi_future(children, quiet_exceptions=quiet_exceptions) + Multi = multi @@ -830,7 +869,7 @@ def maybe_future(x): def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): - """Wraps a `.Future` in a timeout. + """Wraps a `.Future` (or other yieldable object) in a timeout. Raises `TimeoutError` if the input future does not complete before ``timeout``, which may be specified in any form allowed by @@ -841,15 +880,18 @@ def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): will be logged unless it is of a type contained in ``quiet_exceptions`` (which may be an exception type or a sequence of types). - Currently only supports Futures, not other `YieldPoint` classes. + Does not support `YieldPoint` subclasses. .. versionadded:: 4.0 .. versionchanged:: 4.1 Added the ``quiet_exceptions`` argument and the logging of unhandled exceptions. + + .. versionchanged:: 4.4 + Added support for yieldable objects other than `.Future`. """ - # TODO: allow yield points in addition to futures? + # TODO: allow YieldPoints in addition to other yieldables? # Tricky to do with stack_context semantics. # # It's tempting to optimize this by cancelling the input future on timeout @@ -857,6 +899,7 @@ def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): # one waiting on the input future, so cancelling it might disrupt other # callers and B) concurrent futures can only be cancelled while they are # in the queue, so cancellation cannot reliably bound our waiting time. + future = convert_yielded(future) result = Future() chain_future(future, result) if io_loop is None: @@ -923,6 +966,9 @@ coroutines that are likely to yield Futures that are ready instantly. Usage: ``yield gen.moment`` .. versionadded:: 4.0 + +.. deprecated:: 4.5 + ``yield None`` is now equivalent to ``yield gen.moment``. """ moment.set_result(None) @@ -953,6 +999,7 @@ class Runner(object): # of the coroutine. self.stack_context_deactivate = None if self.handle_yield(first_yielded): + gen = result_future = first_yielded = None self.run() def register_callback(self, key): @@ -1009,10 +1056,15 @@ class Runner(object): except Exception: self.had_exception = True exc_info = sys.exc_info() + future = None if exc_info is not None: - yielded = self.gen.throw(*exc_info) - exc_info = None + try: + yielded = self.gen.throw(*exc_info) + finally: + # Break up a reference to itself + # for faster GC on CPython. + exc_info = None else: yielded = self.gen.send(value) @@ -1045,6 +1097,7 @@ class Runner(object): return if not self.handle_yield(yielded): return + yielded = None finally: self.running = False @@ -1093,8 +1146,12 @@ class Runner(object): self.future.set_exc_info(sys.exc_info()) if not self.future.done() or self.future is moment: + def inner(f): + # Break a reference cycle to speed GC. + f = None # noqa + self.run() self.io_loop.add_future( - self.future, lambda f: self.run()) + self.future, inner) return False return True @@ -1116,6 +1173,7 @@ class Runner(object): self.stack_context_deactivate() self.stack_context_deactivate = None + Arguments = collections.namedtuple('Arguments', ['args', 'kwargs']) @@ -1135,6 +1193,7 @@ def _argument_adapter(callback): callback(None) return wrapper + # Convert Awaitables into Futures. It is unfortunately possible # to have infinite recursion here if those Awaitables assume that # we're using a different coroutine runner and yield objects @@ -1212,7 +1271,9 @@ def convert_yielded(yielded): .. versionadded:: 4.1 """ # Lists and dicts containing YieldPoints were handled earlier. - if isinstance(yielded, (list, dict)): + if yielded is None: + return moment + elif isinstance(yielded, (list, dict)): return multi(yielded) elif is_future(yielded): return yielded @@ -1221,6 +1282,7 @@ def convert_yielded(yielded): else: raise BadYieldError("yielded unknown object %r" % (yielded,)) + if singledispatch is not None: convert_yielded = singledispatch(convert_yielded) diff --git a/server/www/packages/packages-common/tornado/http1connection.py b/server/www/packages/packages-common/tornado/http1connection.py index 1c57706..53744ec 100644 --- a/server/www/packages/packages-common/tornado/http1connection.py +++ b/server/www/packages/packages-common/tornado/http1connection.py @@ -19,7 +19,7 @@ .. versionadded:: 4.0 """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import re @@ -30,7 +30,7 @@ from tornado import httputil from tornado import iostream from tornado.log import gen_log, app_log from tornado import stack_context -from tornado.util import GzipDecompressor +from tornado.util import GzipDecompressor, PY3 class _QuietException(Exception): @@ -257,6 +257,7 @@ class HTTP1Connection(httputil.HTTPConnection): if need_delegate_close: with _ExceptionLoggingContext(app_log): delegate.on_connection_close() + header_future = None self._clear_callbacks() raise gen.Return(True) @@ -342,7 +343,7 @@ class HTTP1Connection(httputil.HTTPConnection): 'Transfer-Encoding' not in headers) else: self._response_start_line = start_line - lines.append(utf8('HTTP/1.1 %s %s' % (start_line[1], start_line[2]))) + lines.append(utf8('HTTP/1.1 %d %s' % (start_line[1], start_line[2]))) self._chunking_output = ( # TODO: should this use # self._request_start_line.version or @@ -351,7 +352,7 @@ class HTTP1Connection(httputil.HTTPConnection): # 304 responses have no body (not even a zero-length body), and so # should not have either Content-Length or Transfer-Encoding. # headers. - start_line.code != 304 and + start_line.code not in (204, 304) and # No need to chunk the output if a Content-Length is specified. 'Content-Length' not in headers and # Applications are discouraged from touching Transfer-Encoding, @@ -359,8 +360,8 @@ class HTTP1Connection(httputil.HTTPConnection): 'Transfer-Encoding' not in headers) # If a 1.0 client asked for keep-alive, add the header. if (self._request_start_line.version == 'HTTP/1.0' and - (self._request_headers.get('Connection', '').lower() - == 'keep-alive')): + (self._request_headers.get('Connection', '').lower() == + 'keep-alive')): headers['Connection'] = 'Keep-Alive' if self._chunking_output: headers['Transfer-Encoding'] = 'chunked' @@ -372,7 +373,14 @@ class HTTP1Connection(httputil.HTTPConnection): self._expected_content_remaining = int(headers['Content-Length']) else: self._expected_content_remaining = None - lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()]) + # TODO: headers are supposed to be of type str, but we still have some + # cases that let bytes slip through. Remove these native_str calls when those + # are fixed. + header_lines = (native_str(n) + ": " + native_str(v) for n, v in headers.get_all()) + if PY3: + lines.extend(l.encode('latin1') for l in header_lines) + else: + lines.extend(header_lines) for line in lines: if b'\n' in line: raise ValueError('Newline in header: ' + repr(line)) @@ -479,9 +487,11 @@ class HTTP1Connection(httputil.HTTPConnection): connection_header = connection_header.lower() if start_line.version == "HTTP/1.1": return connection_header != "close" - elif ("Content-Length" in headers - or headers.get("Transfer-Encoding", "").lower() == "chunked" - or start_line.method in ("HEAD", "GET")): + elif ("Content-Length" in headers or + headers.get("Transfer-Encoding", "").lower() == "chunked" or + getattr(start_line, 'method', None) in ("HEAD", "GET")): + # start_line may be a request or response start line; only + # the former has a method attribute. return connection_header == "keep-alive" return False @@ -531,7 +541,13 @@ class HTTP1Connection(httputil.HTTPConnection): "Multiple unequal Content-Lengths: %r" % headers["Content-Length"]) headers["Content-Length"] = pieces[0] - content_length = int(headers["Content-Length"]) + + try: + content_length = int(headers["Content-Length"]) + except ValueError: + # Handles non-integer Content-Length value. + raise httputil.HTTPInputError( + "Only integer Content-Length is allowed: %s" % headers["Content-Length"]) if content_length > self._max_body_size: raise httputil.HTTPInputError("Content-Length too long") @@ -550,7 +566,7 @@ class HTTP1Connection(httputil.HTTPConnection): if content_length is not None: return self._read_fixed_body(content_length, delegate) - if headers.get("Transfer-Encoding") == "chunked": + if headers.get("Transfer-Encoding", "").lower() == "chunked": return self._read_chunked_body(delegate) if self.is_client: return self._read_body_until_close(delegate) diff --git a/server/www/packages/packages-common/tornado/httpclient.py b/server/www/packages/packages-common/tornado/httpclient.py index 9179227..8436ece 100644 --- a/server/www/packages/packages-common/tornado/httpclient.py +++ b/server/www/packages/packages-common/tornado/httpclient.py @@ -25,7 +25,7 @@ to switch to ``curl_httpclient`` for reasons such as the following: Note that if you are using ``curl_httpclient``, it is highly recommended that you use a recent version of ``libcurl`` and ``pycurl``. Currently the minimum supported version of libcurl is -7.21.1, and the minimum version of pycurl is 7.18.2. It is highly +7.22.0, and the minimum version of pycurl is 7.18.2. It is highly recommended that your ``libcurl`` installation is built with asynchronous DNS resolver (threaded or c-ares), otherwise you may encounter various problems with request timeouts (for more @@ -38,7 +38,7 @@ To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup:: AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import functools import time @@ -61,7 +61,7 @@ class HTTPClient(object): http_client = httpclient.HTTPClient() try: response = http_client.fetch("http://www.google.com/") - print response.body + print(response.body) except httpclient.HTTPError as e: # HTTPError is raised for non-200 responses; the response # can be found in e.response. @@ -108,14 +108,14 @@ class AsyncHTTPClient(Configurable): Example usage:: - def handle_request(response): + def handle_response(response): if response.error: - print "Error:", response.error + print("Error: %s" % response.error) else: - print response.body + print(response.body) http_client = AsyncHTTPClient() - http_client.fetch("http://www.google.com/", handle_request) + http_client.fetch("http://www.google.com/", handle_response) The constructor for this class is magic in several respects: It actually creates an instance of an implementation-specific @@ -211,10 +211,12 @@ class AsyncHTTPClient(Configurable): kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an - `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` - if the request returned a non-200 response code. Instead, if - ``raise_error`` is set to False, the response will always be - returned regardless of the response code. + `HTTPResponse`. By default, the ``Future`` will raise an + `HTTPError` if the request returned a non-200 response code + (other errors may also be raised if the server could not be + contacted). Instead, if ``raise_error`` is set to False, the + response will always be returned regardless of the response + code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. @@ -225,6 +227,9 @@ class AsyncHTTPClient(Configurable): raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) + else: + if kwargs: + raise ValueError("kwargs can't be used if request is an HTTPRequest object") # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. @@ -305,10 +310,10 @@ class HTTPRequest(object): network_interface=None, streaming_callback=None, header_callback=None, prepare_curl_callback=None, proxy_host=None, proxy_port=None, proxy_username=None, - proxy_password=None, allow_nonstandard_methods=None, - validate_cert=None, ca_certs=None, - allow_ipv6=None, - client_key=None, client_cert=None, body_producer=None, + proxy_password=None, proxy_auth_mode=None, + allow_nonstandard_methods=None, validate_cert=None, + ca_certs=None, allow_ipv6=None, client_key=None, + client_cert=None, body_producer=None, expect_100_continue=False, decompress_response=None, ssl_options=None): r"""All parameters except ``url`` are optional. @@ -336,13 +341,15 @@ class HTTPRequest(object): Allowed values are implementation-defined; ``curl_httpclient`` supports "basic" and "digest"; ``simple_httpclient`` only supports "basic" - :arg float connect_timeout: Timeout for initial connection in seconds - :arg float request_timeout: Timeout for entire request in seconds + :arg float connect_timeout: Timeout for initial connection in seconds, + default 20 seconds + :arg float request_timeout: Timeout for entire request in seconds, + default 20 seconds :arg if_modified_since: Timestamp for ``If-Modified-Since`` header :type if_modified_since: `datetime` or `float` :arg bool follow_redirects: Should redirects be followed automatically - or return the 3xx response? - :arg int max_redirects: Limit for ``follow_redirects`` + or return the 3xx response? Default True. + :arg int max_redirects: Limit for ``follow_redirects``, default 5. :arg string user_agent: String to send as ``User-Agent`` header :arg bool decompress_response: Request a compressed response from the server and decompress it after downloading. Default is True. @@ -367,16 +374,18 @@ class HTTPRequest(object): a ``pycurl.Curl`` object to allow the application to make additional ``setopt`` calls. :arg string proxy_host: HTTP proxy hostname. To use proxies, - ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username`` and - ``proxy_pass`` are optional. Proxies are currently only supported - with ``curl_httpclient``. + ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username``, + ``proxy_pass`` and ``proxy_auth_mode`` are optional. Proxies are + currently only supported with ``curl_httpclient``. :arg int proxy_port: HTTP proxy port :arg string proxy_username: HTTP proxy username :arg string proxy_password: HTTP proxy password + :arg string proxy_auth_mode: HTTP proxy Authentication mode; + default is "basic". supports "basic" and "digest" :arg bool allow_nonstandard_methods: Allow unknown values for ``method`` - argument? + argument? Default is False. :arg bool validate_cert: For HTTPS requests, validate the server's - certificate? + certificate? Default is True. :arg string ca_certs: filename of CA certificates in PEM format, or None to use defaults. See note below when used with ``curl_httpclient``. @@ -414,6 +423,9 @@ class HTTPRequest(object): .. versionadded:: 4.2 The ``ssl_options`` argument. + + .. versionadded:: 4.5 + The ``proxy_auth_mode`` argument. """ # Note that some of these attributes go through property setters # defined below. @@ -425,6 +437,7 @@ class HTTPRequest(object): self.proxy_port = proxy_port self.proxy_username = proxy_username self.proxy_password = proxy_password + self.proxy_auth_mode = proxy_auth_mode self.url = url self.method = method self.body = body @@ -525,7 +538,7 @@ class HTTPResponse(object): * buffer: ``cStringIO`` object for response body - * body: response body as string (created on demand from ``self.buffer``) + * body: response body as bytes (created on demand from ``self.buffer``) * error: Exception object, if any @@ -567,7 +580,8 @@ class HTTPResponse(object): self.request_time = request_time self.time_info = time_info or {} - def _get_body(self): + @property + def body(self): if self.buffer is None: return None elif self._body is None: @@ -575,8 +589,6 @@ class HTTPResponse(object): return self._body - body = property(_get_body) - def rethrow(self): """If there was an error on the request, raise an `HTTPError`.""" if self.error: @@ -610,6 +622,12 @@ class HTTPError(Exception): def __str__(self): return "HTTP %d: %s" % (self.code, self.message) + # There is a cyclic reference between self and self.response, + # which breaks the default __repr__ implementation. + # (especially on pypy, which doesn't have the same recursion + # detection as cpython). + __repr__ = __str__ + class _RequestProxy(object): """Combines an object with a dictionary of defaults. @@ -655,5 +673,6 @@ def main(): print(native_str(response.body)) client.close() + if __name__ == "__main__": main() diff --git a/server/www/packages/packages-common/tornado/httpserver.py b/server/www/packages/packages-common/tornado/httpserver.py index ff235fe..d757be1 100644 --- a/server/www/packages/packages-common/tornado/httpserver.py +++ b/server/www/packages/packages-common/tornado/httpserver.py @@ -26,7 +26,7 @@ class except to start a server at the beginning of the process to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import socket @@ -62,6 +62,13 @@ class HTTPServer(TCPServer, Configurable, if Tornado is run behind an SSL-decoding proxy that does not set one of the supported ``xheaders``. + By default, when parsing the ``X-Forwarded-For`` header, Tornado will + select the last (i.e., the closest) address on the list of hosts as the + remote host IP address. To select the next server in the chain, a list of + trusted downstream hosts may be passed as the ``trusted_downstream`` + argument. These hosts will be skipped when parsing the ``X-Forwarded-For`` + header. + To make this server serve SSL traffic, send the ``ssl_options`` keyword argument with an `ssl.SSLContext` object. For compatibility with older versions of Python ``ssl_options`` may also be a dictionary of keyword @@ -124,6 +131,9 @@ class HTTPServer(TCPServer, Configurable, .. versionchanged:: 4.2 `HTTPServer` is now a subclass of `tornado.util.Configurable`. + + .. versionchanged:: 4.5 + Added the ``trusted_downstream`` argument. """ def __init__(self, *args, **kwargs): # Ignore args to __init__; real initialization belongs in @@ -138,7 +148,8 @@ class HTTPServer(TCPServer, Configurable, decompress_request=False, chunk_size=None, max_header_size=None, idle_connection_timeout=None, body_timeout=None, - max_body_size=None, max_buffer_size=None): + max_body_size=None, max_buffer_size=None, + trusted_downstream=None): self.request_callback = request_callback self.no_keep_alive = no_keep_alive self.xheaders = xheaders @@ -149,11 +160,13 @@ class HTTPServer(TCPServer, Configurable, max_header_size=max_header_size, header_timeout=idle_connection_timeout or 3600, max_body_size=max_body_size, - body_timeout=body_timeout) + body_timeout=body_timeout, + no_keep_alive=no_keep_alive) TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options, max_buffer_size=max_buffer_size, read_chunk_size=chunk_size) self._connections = set() + self.trusted_downstream = trusted_downstream @classmethod def configurable_base(cls): @@ -172,21 +185,55 @@ class HTTPServer(TCPServer, Configurable, def handle_stream(self, stream, address): context = _HTTPRequestContext(stream, address, - self.protocol) + self.protocol, + self.trusted_downstream) conn = HTTP1ServerConnection( stream, self.conn_params, context) self._connections.add(conn) conn.start_serving(self) def start_request(self, server_conn, request_conn): - return _ServerRequestAdapter(self, server_conn, request_conn) + if isinstance(self.request_callback, httputil.HTTPServerConnectionDelegate): + delegate = self.request_callback.start_request(server_conn, request_conn) + else: + delegate = _CallableAdapter(self.request_callback, request_conn) + + if self.xheaders: + delegate = _ProxyAdapter(delegate, request_conn) + + return delegate def on_close(self, server_conn): self._connections.remove(server_conn) +class _CallableAdapter(httputil.HTTPMessageDelegate): + def __init__(self, request_callback, request_conn): + self.connection = request_conn + self.request_callback = request_callback + self.request = None + self.delegate = None + self._chunks = [] + + def headers_received(self, start_line, headers): + self.request = httputil.HTTPServerRequest( + connection=self.connection, start_line=start_line, + headers=headers) + + def data_received(self, chunk): + self._chunks.append(chunk) + + def finish(self): + self.request.body = b''.join(self._chunks) + self.request._parse_body() + self.request_callback(self.request) + + def on_connection_close(self): + self._chunks = None + + class _HTTPRequestContext(object): - def __init__(self, stream, address, protocol): + def __init__(self, stream, address, protocol, trusted_downstream=None): self.address = address # Save the socket's address family now so we know how to # interpret self.address even after the stream is closed @@ -210,6 +257,7 @@ class _HTTPRequestContext(object): self.protocol = "http" self._orig_remote_ip = self.remote_ip self._orig_protocol = self.protocol + self.trusted_downstream = set(trusted_downstream or []) def __str__(self): if self.address_family in (socket.AF_INET, socket.AF_INET6): @@ -226,7 +274,10 @@ class _HTTPRequestContext(object): """Rewrite the ``remote_ip`` and ``protocol`` fields.""" # Squid uses X-Forwarded-For, others use X-Real-Ip ip = headers.get("X-Forwarded-For", self.remote_ip) - ip = ip.split(',')[-1].strip() + # Skip trusted downstream hosts in X-Forwarded-For list + for ip in (cand.strip() for cand in reversed(ip.split(','))): + if ip not in self.trusted_downstream: + break ip = headers.get("X-Real-Ip", ip) if netutil.is_valid_ip(ip): self.remote_ip = ip @@ -247,58 +298,28 @@ class _HTTPRequestContext(object): self.protocol = self._orig_protocol -class _ServerRequestAdapter(httputil.HTTPMessageDelegate): - """Adapts the `HTTPMessageDelegate` interface to the interface expected - by our clients. - """ - def __init__(self, server, server_conn, request_conn): - self.server = server +class _ProxyAdapter(httputil.HTTPMessageDelegate): + def __init__(self, delegate, request_conn): self.connection = request_conn - self.request = None - if isinstance(server.request_callback, - httputil.HTTPServerConnectionDelegate): - self.delegate = server.request_callback.start_request( - server_conn, request_conn) - self._chunks = None - else: - self.delegate = None - self._chunks = [] + self.delegate = delegate def headers_received(self, start_line, headers): - if self.server.xheaders: - self.connection.context._apply_xheaders(headers) - if self.delegate is None: - self.request = httputil.HTTPServerRequest( - connection=self.connection, start_line=start_line, - headers=headers) - else: - return self.delegate.headers_received(start_line, headers) + self.connection.context._apply_xheaders(headers) + return self.delegate.headers_received(start_line, headers) def data_received(self, chunk): - if self.delegate is None: - self._chunks.append(chunk) - else: - return self.delegate.data_received(chunk) + return self.delegate.data_received(chunk) def finish(self): - if self.delegate is None: - self.request.body = b''.join(self._chunks) - self.request._parse_body() - self.server.request_callback(self.request) - else: - self.delegate.finish() + self.delegate.finish() self._cleanup() def on_connection_close(self): - if self.delegate is None: - self._chunks = None - else: - self.delegate.on_connection_close() + self.delegate.on_connection_close() self._cleanup() def _cleanup(self): - if self.server.xheaders: - self.connection.context._unapply_xheaders() + self.connection.context._unapply_xheaders() HTTPRequest = httputil.HTTPServerRequest diff --git a/server/www/packages/packages-common/tornado/httputil.py b/server/www/packages/packages-common/tornado/httputil.py index 471df54..818ea91 100644 --- a/server/www/packages/packages-common/tornado/httputil.py +++ b/server/www/packages/packages-common/tornado/httputil.py @@ -20,7 +20,7 @@ This module also defines the `HTTPServerRequest` class which is exposed via `tornado.web.RequestHandler.request`. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import calendar import collections @@ -33,33 +33,37 @@ import time from tornado.escape import native_str, parse_qs_bytes, utf8 from tornado.log import gen_log -from tornado.util import ObjectDict +from tornado.util import ObjectDict, PY3 -try: - import Cookie # py2 -except ImportError: - import http.cookies as Cookie # py3 +if PY3: + import http.cookies as Cookie + from http.client import responses + from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl +else: + import Cookie + from httplib import responses + from urllib import urlencode + from urlparse import urlparse, urlunparse, parse_qsl -try: - from httplib import responses # py2 -except ImportError: - from http.client import responses # py3 # responses is unused in this file, but we re-export it to other files. # Reference it so pyflakes doesn't complain. responses -try: - from urllib import urlencode # py2 -except ImportError: - from urllib.parse import urlencode # py3 - try: from ssl import SSLError except ImportError: # ssl is unavailable on app engine. - class SSLError(Exception): + class _SSLError(Exception): pass + # Hack around a mypy limitation. We can't simply put "type: ignore" + # on the class definition itself; must go through an assignment. + SSLError = _SSLError # type: ignore + +try: + import typing +except ImportError: + pass # RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line @@ -95,6 +99,7 @@ class _NormalizedHeaderCache(dict): del self[old_key] return normalized + _normalized_headers = _NormalizedHeaderCache(1000) @@ -127,8 +132,8 @@ class HTTPHeaders(collections.MutableMapping): Set-Cookie: C=D """ def __init__(self, *args, **kwargs): - self._dict = {} - self._as_list = {} + self._dict = {} # type: typing.Dict[str, str] + self._as_list = {} # type: typing.Dict[str, typing.List[str]] self._last_key = None if (len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], HTTPHeaders)): @@ -142,6 +147,7 @@ class HTTPHeaders(collections.MutableMapping): # new public methods def add(self, name, value): + # type: (str, str) -> None """Adds a new value for the given key.""" norm_name = _normalized_headers[name] self._last_key = norm_name @@ -158,6 +164,7 @@ class HTTPHeaders(collections.MutableMapping): return self._as_list.get(norm_name, []) def get_all(self): + # type: () -> typing.Iterable[typing.Tuple[str, str]] """Returns an iterable of all (name, value) pairs. If a header has multiple values, multiple pairs will be @@ -206,6 +213,7 @@ class HTTPHeaders(collections.MutableMapping): self._as_list[norm_name] = [value] def __getitem__(self, name): + # type: (str) -> str return self._dict[_normalized_headers[name]] def __delitem__(self, name): @@ -228,6 +236,14 @@ class HTTPHeaders(collections.MutableMapping): # the appearance that HTTPHeaders is a single container. __copy__ = copy + def __str__(self): + lines = [] + for name, value in self.get_all(): + lines.append("%s: %s\n" % (name, value)) + return "".join(lines) + + __unicode__ = __str__ + class HTTPServerRequest(object): """A single HTTP request. @@ -323,7 +339,7 @@ class HTTPServerRequest(object): """ def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None, body=None, host=None, files=None, connection=None, - start_line=None): + start_line=None, server_connection=None): if start_line is not None: method, uri, version = start_line self.method = method @@ -338,8 +354,10 @@ class HTTPServerRequest(object): self.protocol = getattr(context, 'protocol', "http") self.host = host or self.headers.get("Host") or "127.0.0.1" + self.host_name = split_host_and_port(self.host.lower())[0] self.files = files or {} self.connection = connection + self.server_connection = server_connection self._start_time = time.time() self._finish_time = None @@ -365,10 +383,18 @@ class HTTPServerRequest(object): self._cookies = Cookie.SimpleCookie() if "Cookie" in self.headers: try: - self._cookies.load( - native_str(self.headers["Cookie"])) + parsed = parse_cookie(self.headers["Cookie"]) except Exception: - self._cookies = {} + pass + else: + for k, v in parsed.items(): + try: + self._cookies[k] = v + except Exception: + # SimpleCookie imposes some restrictions on keys; + # parse_cookie does not. Discard any cookies + # with disallowed keys. + pass return self._cookies def write(self, chunk, callback=None): @@ -577,11 +603,28 @@ def url_concat(url, args): >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")]) 'http://example.com/foo?a=b&c=d&c=d2' """ - if not args: + if args is None: return url - if url[-1] not in ('?', '&'): - url += '&' if ('?' in url) else '?' - return url + urlencode(args) + parsed_url = urlparse(url) + if isinstance(args, dict): + parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True) + parsed_query.extend(args.items()) + elif isinstance(args, list) or isinstance(args, tuple): + parsed_query = parse_qsl(parsed_url.query, keep_blank_values=True) + parsed_query.extend(args) + else: + err = "'args' parameter should be dict, list or tuple. Not {0}".format( + type(args)) + raise TypeError(err) + final_query = urlencode(parsed_query) + url = urlunparse(( + parsed_url[0], + parsed_url[1], + parsed_url[2], + parsed_url[3], + final_query, + parsed_url[5])) + return url class HTTPFile(ObjectDict): @@ -743,7 +786,7 @@ def parse_multipart_form_data(boundary, data, arguments, files): name = disp_params["name"] if disp_params.get("filename"): ctype = headers.get("Content-Type", "application/unknown") - files.setdefault(name, []).append(HTTPFile( + files.setdefault(name, []).append(HTTPFile( # type: ignore filename=disp_params["filename"], body=value, content_type=ctype)) else: @@ -895,3 +938,84 @@ def split_host_and_port(netloc): host = netloc port = None return (host, port) + + +_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") +_QuotePatt = re.compile(r"[\\].") +_nulljoin = ''.join + + +def _unquote_cookie(str): + """Handle double quotes and escaping in cookie values. + + This method is copied verbatim from the Python 3.5 standard + library (http.cookies._unquote) so we don't have to depend on + non-public interfaces. + """ + # If there aren't any doublequotes, + # then there can't be any special characters. See RFC 2109. + if str is None or len(str) < 2: + return str + if str[0] != '"' or str[-1] != '"': + return str + + # We have to assume that we must decode this string. + # Down to work. + + # Remove the "s + str = str[1:-1] + + # Check for special sequences. Examples: + # \012 --> \n + # \" --> " + # + i = 0 + n = len(str) + res = [] + while 0 <= i < n: + o_match = _OctalPatt.search(str, i) + q_match = _QuotePatt.search(str, i) + if not o_match and not q_match: # Neither matched + res.append(str[i:]) + break + # else: + j = k = -1 + if o_match: + j = o_match.start(0) + if q_match: + k = q_match.start(0) + if q_match and (not o_match or k < j): # QuotePatt matched + res.append(str[i:k]) + res.append(str[k + 1]) + i = k + 2 + else: # OctalPatt matched + res.append(str[i:j]) + res.append(chr(int(str[j + 1:j + 4], 8))) + i = j + 4 + return _nulljoin(res) + + +def parse_cookie(cookie): + """Parse a ``Cookie`` HTTP header into a dict of name/value pairs. + + This function attempts to mimic browser cookie parsing behavior; + it specifically does not follow any of the cookie-related RFCs + (because browsers don't either). + + The algorithm used is identical to that used by Django version 1.9.10. + + .. versionadded:: 4.4.2 + """ + cookiedict = {} + for chunk in cookie.split(str(';')): + if str('=') in chunk: + key, val = chunk.split(str('='), 1) + else: + # Assume an empty name per + # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 + key, val = str(''), chunk + key, val = key.strip(), val.strip() + if key or val: + # unquote using Python's algorithm. + cookiedict[key] = _unquote_cookie(val) + return cookiedict diff --git a/server/www/packages/packages-common/tornado/ioloop.py b/server/www/packages/packages-common/tornado/ioloop.py index c23cb33..ad35787 100644 --- a/server/www/packages/packages-common/tornado/ioloop.py +++ b/server/www/packages/packages-common/tornado/ioloop.py @@ -26,8 +26,9 @@ In addition to I/O events, the `IOLoop` can also schedule time-based events. `IOLoop.add_timeout` is a non-blocking alternative to `time.sleep`. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function +import collections import datetime import errno import functools @@ -45,20 +46,20 @@ import math from tornado.concurrent import TracebackFuture, is_future from tornado.log import app_log, gen_log +from tornado.platform.auto import set_close_exec, Waker from tornado import stack_context -from tornado.util import Configurable, errno_from_exception, timedelta_to_seconds +from tornado.util import PY3, Configurable, errno_from_exception, timedelta_to_seconds try: import signal except ImportError: signal = None -try: - import thread # py2 -except ImportError: - import _thread as thread # py3 -from tornado.platform.auto import set_close_exec, Waker +if PY3: + import _thread as thread +else: + import thread _POLL_TIMEOUT = 3600.0 @@ -172,6 +173,10 @@ class IOLoop(Configurable): This is normally not necessary as `instance()` will create an `IOLoop` on demand, but you may want to call `install` to use a custom subclass of `IOLoop`. + + When using an `IOLoop` subclass, `install` must be called prior + to creating any objects that implicitly create their own + `IOLoop` (e.g., :class:`tornado.httpclient.AsyncHTTPClient`). """ assert not IOLoop.initialized() IOLoop._instance = self @@ -612,10 +617,14 @@ class IOLoop(Configurable): # result, which should just be ignored. pass else: - self.add_future(ret, lambda f: f.result()) + self.add_future(ret, self._discard_future_result) except Exception: self.handle_callback_exception(callback) + def _discard_future_result(self, future): + """Avoid unhandled-exception warnings from spawned coroutines.""" + future.result() + def handle_callback_exception(self, callback): """This method is called whenever a callback run by the `IOLoop` throws an exception. @@ -685,8 +694,7 @@ class PollIOLoop(IOLoop): self.time_func = time_func or time.time self._handlers = {} self._events = {} - self._callbacks = [] - self._callback_lock = threading.Lock() + self._callbacks = collections.deque() self._timeouts = [] self._cancellations = 0 self._running = False @@ -704,11 +712,10 @@ class PollIOLoop(IOLoop): self.READ) def close(self, all_fds=False): - with self._callback_lock: - self._closing = True + self._closing = True self.remove_handler(self._waker.fileno()) if all_fds: - for fd, handler in self._handlers.values(): + for fd, handler in list(self._handlers.values()): self.close_fd(fd) self._waker.close() self._impl.close() @@ -792,9 +799,7 @@ class PollIOLoop(IOLoop): while True: # Prevent IO event starvation by delaying new callbacks # to the next iteration of the event loop. - with self._callback_lock: - callbacks = self._callbacks - self._callbacks = [] + ncallbacks = len(self._callbacks) # Add any timeouts that have come due to the callback list. # Do not run anything until we have determined which ones @@ -814,8 +819,8 @@ class PollIOLoop(IOLoop): due_timeouts.append(heapq.heappop(self._timeouts)) else: break - if (self._cancellations > 512 - and self._cancellations > (len(self._timeouts) >> 1)): + if (self._cancellations > 512 and + self._cancellations > (len(self._timeouts) >> 1)): # Clean up the timeout queue when it gets large and it's # more than half cancellations. self._cancellations = 0 @@ -823,14 +828,14 @@ class PollIOLoop(IOLoop): if x.callback is not None] heapq.heapify(self._timeouts) - for callback in callbacks: - self._run_callback(callback) + for i in range(ncallbacks): + self._run_callback(self._callbacks.popleft()) for timeout in due_timeouts: if timeout.callback is not None: self._run_callback(timeout.callback) # Closures may be holding on to a lot of memory, so allow # them to be freed before we go into our poll wait. - callbacks = callback = due_timeouts = timeout = None + due_timeouts = timeout = None if self._callbacks: # If any callbacks or timeouts called add_callback, @@ -874,7 +879,7 @@ class PollIOLoop(IOLoop): # Pop one fd at a time from the set of pending fds and run # its handler. Since that handler may perform actions on # other file descriptors, there may be reentrant calls to - # this IOLoop that update self._events + # this IOLoop that modify self._events self._events.update(event_pairs) while self._events: fd, events = self._events.popitem() @@ -926,36 +931,20 @@ class PollIOLoop(IOLoop): self._cancellations += 1 def add_callback(self, callback, *args, **kwargs): + if self._closing: + return + # Blindly insert into self._callbacks. This is safe even + # from signal handlers because deque.append is atomic. + self._callbacks.append(functools.partial( + stack_context.wrap(callback), *args, **kwargs)) if thread.get_ident() != self._thread_ident: - # If we're not on the IOLoop's thread, we need to synchronize - # with other threads, or waking logic will induce a race. - with self._callback_lock: - if self._closing: - return - list_empty = not self._callbacks - self._callbacks.append(functools.partial( - stack_context.wrap(callback), *args, **kwargs)) - if list_empty: - # If we're not in the IOLoop's thread, and we added the - # first callback to an empty list, we may need to wake it - # up (it may wake up on its own, but an occasional extra - # wake is harmless). Waking up a polling IOLoop is - # relatively expensive, so we try to avoid it when we can. - self._waker.wake() + # This will write one byte but Waker.consume() reads many + # at once, so it's ok to write even when not strictly + # necessary. + self._waker.wake() else: - if self._closing: - return - # If we're on the IOLoop's thread, we don't need the lock, - # since we don't need to wake anyone, just add the - # callback. Blindly insert into self._callbacks. This is - # safe even from signal handlers because the GIL makes - # list.append atomic. One subtlety is that if the signal - # is interrupting another thread holding the - # _callback_lock block in IOLoop.start, we may modify - # either the old or new version of self._callbacks, but - # either way will work. - self._callbacks.append(functools.partial( - stack_context.wrap(callback), *args, **kwargs)) + # If we're on the IOLoop's thread, we don't need to wake anyone. + pass def add_callback_from_signal(self, callback, *args, **kwargs): with stack_context.NullContext(): @@ -966,26 +955,24 @@ class _Timeout(object): """An IOLoop timeout, a UNIX timestamp and a callback""" # Reduce memory overhead when there are lots of pending callbacks - __slots__ = ['deadline', 'callback', 'tiebreaker'] + __slots__ = ['deadline', 'callback', 'tdeadline'] def __init__(self, deadline, callback, io_loop): if not isinstance(deadline, numbers.Real): raise TypeError("Unsupported deadline %r" % deadline) self.deadline = deadline self.callback = callback - self.tiebreaker = next(io_loop._timeout_counter) + self.tdeadline = (deadline, next(io_loop._timeout_counter)) # Comparison methods to sort by deadline, with object id as a tiebreaker # to guarantee a consistent ordering. The heapq module uses __le__ # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons # use __lt__). def __lt__(self, other): - return ((self.deadline, self.tiebreaker) < - (other.deadline, other.tiebreaker)) + return self.tdeadline < other.tdeadline def __le__(self, other): - return ((self.deadline, self.tiebreaker) <= - (other.deadline, other.tiebreaker)) + return self.tdeadline <= other.tdeadline class PeriodicCallback(object): @@ -1048,6 +1035,7 @@ class PeriodicCallback(object): if self._next_timeout <= current_time: callback_time_sec = self.callback_time / 1000.0 - self._next_timeout += (math.floor((current_time - self._next_timeout) / callback_time_sec) + 1) * callback_time_sec + self._next_timeout += (math.floor((current_time - self._next_timeout) / + callback_time_sec) + 1) * callback_time_sec self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run) diff --git a/server/www/packages/packages-common/tornado/iostream.py b/server/www/packages/packages-common/tornado/iostream.py index 4e304f8..a1619c4 100644 --- a/server/www/packages/packages-common/tornado/iostream.py +++ b/server/www/packages/packages-common/tornado/iostream.py @@ -24,7 +24,7 @@ Contents: * `PipeIOStream`: Pipe-based IOStream implementation. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import collections import errno @@ -58,7 +58,7 @@ except ImportError: _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) if hasattr(errno, "WSAEWOULDBLOCK"): - _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore # These errnos indicate that a connection has been abruptly terminated. # They should be caught and handled less noisily than other errors. @@ -66,7 +66,7 @@ _ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, errno.ETIMEDOUT) if hasattr(errno, "WSAECONNRESET"): - _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) + _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) # type: ignore if sys.platform == 'darwin': # OSX appears to have a race condition that causes send(2) to return @@ -74,13 +74,15 @@ if sys.platform == 'darwin': # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ # Since the socket is being closed anyway, treat this as an ECONNRESET # instead of an unexpected error. - _ERRNO_CONNRESET += (errno.EPROTOTYPE,) + _ERRNO_CONNRESET += (errno.EPROTOTYPE,) # type: ignore # More non-portable errnos: _ERRNO_INPROGRESS = (errno.EINPROGRESS,) if hasattr(errno, "WSAEINPROGRESS"): - _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) + _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) # type: ignore + +_WINDOWS = sys.platform.startswith('win') class StreamClosedError(IOError): @@ -158,11 +160,16 @@ class BaseIOStream(object): self.max_buffer_size // 2) self.max_write_buffer_size = max_write_buffer_size self.error = None - self._read_buffer = collections.deque() - self._write_buffer = collections.deque() + self._read_buffer = bytearray() + self._read_buffer_pos = 0 self._read_buffer_size = 0 + self._write_buffer = bytearray() + self._write_buffer_pos = 0 self._write_buffer_size = 0 self._write_buffer_frozen = False + self._total_write_index = 0 + self._total_write_done_index = 0 + self._pending_writes_while_frozen = [] self._read_delimiter = None self._read_regex = None self._read_max_bytes = None @@ -173,7 +180,7 @@ class BaseIOStream(object): self._read_future = None self._streaming_callback = None self._write_callback = None - self._write_future = None + self._write_futures = collections.deque() self._close_callback = None self._connect_callback = None self._connect_future = None @@ -367,36 +374,37 @@ class BaseIOStream(object): If no ``callback`` is given, this method returns a `.Future` that resolves (with a result of ``None``) when the write has been - completed. If `write` is called again before that `.Future` has - resolved, the previous future will be orphaned and will never resolve. + completed. + + The ``data`` argument may be of type `bytes` or `memoryview`. .. versionchanged:: 4.0 Now returns a `.Future` if no callback is given. + + .. versionchanged:: 4.5 + Added support for `memoryview` arguments. """ - assert isinstance(data, bytes) self._check_closed() - # We use bool(_write_buffer) as a proxy for write_buffer_size>0, - # so never put empty strings in the buffer. if data: if (self.max_write_buffer_size is not None and self._write_buffer_size + len(data) > self.max_write_buffer_size): raise StreamBufferFullError("Reached maximum write buffer size") - # Break up large contiguous strings before inserting them in the - # write buffer, so we don't have to recopy the entire thing - # as we slice off pieces to send to the socket. - WRITE_BUFFER_CHUNK_SIZE = 128 * 1024 - for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE): - self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE]) - self._write_buffer_size += len(data) + if self._write_buffer_frozen: + self._pending_writes_while_frozen.append(data) + else: + self._write_buffer += data + self._write_buffer_size += len(data) + self._total_write_index += len(data) if callback is not None: self._write_callback = stack_context.wrap(callback) future = None else: - future = self._write_future = TracebackFuture() + future = TracebackFuture() future.add_done_callback(lambda f: f.exception()) + self._write_futures.append((self._total_write_index, future)) if not self._connecting: self._handle_write() - if self._write_buffer: + if self._write_buffer_size: self._add_io_state(self.io_loop.WRITE) self._maybe_add_error_listener() return future @@ -445,9 +453,8 @@ class BaseIOStream(object): if self._read_future is not None: futures.append(self._read_future) self._read_future = None - if self._write_future is not None: - futures.append(self._write_future) - self._write_future = None + futures += [future for _, future in self._write_futures] + self._write_futures.clear() if self._connect_future is not None: futures.append(self._connect_future) self._connect_future = None @@ -466,6 +473,7 @@ class BaseIOStream(object): # if the IOStream object is kept alive by a reference cycle. # TODO: Clear the read buffer too; it currently breaks some tests. self._write_buffer = None + self._write_buffer_size = 0 def reading(self): """Returns true if we are currently reading from the stream.""" @@ -473,7 +481,7 @@ class BaseIOStream(object): def writing(self): """Returns true if we are currently writing to the stream.""" - return bool(self._write_buffer) + return self._write_buffer_size > 0 def closed(self): """Returns true if the stream has been closed.""" @@ -743,7 +751,7 @@ class BaseIOStream(object): break if chunk is None: return 0 - self._read_buffer.append(chunk) + self._read_buffer += chunk self._read_buffer_size += len(chunk) if self._read_buffer_size > self.max_buffer_size: gen_log.error("Reached maximum read buffer size") @@ -791,30 +799,25 @@ class BaseIOStream(object): # since large merges are relatively expensive and get undone in # _consume(). if self._read_buffer: - while True: - loc = self._read_buffer[0].find(self._read_delimiter) - if loc != -1: - delimiter_len = len(self._read_delimiter) - self._check_max_bytes(self._read_delimiter, - loc + delimiter_len) - return loc + delimiter_len - if len(self._read_buffer) == 1: - break - _double_prefix(self._read_buffer) + loc = self._read_buffer.find(self._read_delimiter, + self._read_buffer_pos) + if loc != -1: + loc -= self._read_buffer_pos + delimiter_len = len(self._read_delimiter) + self._check_max_bytes(self._read_delimiter, + loc + delimiter_len) + return loc + delimiter_len self._check_max_bytes(self._read_delimiter, - len(self._read_buffer[0])) + self._read_buffer_size) elif self._read_regex is not None: if self._read_buffer: - while True: - m = self._read_regex.search(self._read_buffer[0]) - if m is not None: - self._check_max_bytes(self._read_regex, m.end()) - return m.end() - if len(self._read_buffer) == 1: - break - _double_prefix(self._read_buffer) - self._check_max_bytes(self._read_regex, - len(self._read_buffer[0])) + m = self._read_regex.search(self._read_buffer, + self._read_buffer_pos) + if m is not None: + loc = m.end() - self._read_buffer_pos + self._check_max_bytes(self._read_regex, loc) + return loc + self._check_max_bytes(self._read_regex, self._read_buffer_size) return None def _check_max_bytes(self, delimiter, size): @@ -824,35 +827,56 @@ class BaseIOStream(object): "delimiter %r not found within %d bytes" % ( delimiter, self._read_max_bytes)) + def _freeze_write_buffer(self, size): + self._write_buffer_frozen = size + + def _unfreeze_write_buffer(self): + self._write_buffer_frozen = False + self._write_buffer += b''.join(self._pending_writes_while_frozen) + self._write_buffer_size += sum(map(len, self._pending_writes_while_frozen)) + self._pending_writes_while_frozen[:] = [] + + def _got_empty_write(self, size): + """ + Called when a non-blocking write() failed writing anything. + Can be overridden in subclasses. + """ + def _handle_write(self): - while self._write_buffer: + while self._write_buffer_size: + assert self._write_buffer_size >= 0 try: - if not self._write_buffer_frozen: + start = self._write_buffer_pos + if self._write_buffer_frozen: + size = self._write_buffer_frozen + elif _WINDOWS: # On windows, socket.send blows up if given a # write buffer that's too large, instead of just # returning the number of bytes it was able to # process. Therefore we must not call socket.send # with more than 128KB at a time. - _merge_prefix(self._write_buffer, 128 * 1024) - num_bytes = self.write_to_fd(self._write_buffer[0]) + size = 128 * 1024 + else: + size = self._write_buffer_size + num_bytes = self.write_to_fd( + memoryview(self._write_buffer)[start:start + size]) if num_bytes == 0: - # With OpenSSL, if we couldn't write the entire buffer, - # the very same string object must be used on the - # next call to send. Therefore we suppress - # merging the write buffer after an incomplete send. - # A cleaner solution would be to set - # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is - # not yet accessible from python - # (http://bugs.python.org/issue8240) - self._write_buffer_frozen = True + self._got_empty_write(size) break - self._write_buffer_frozen = False - _merge_prefix(self._write_buffer, num_bytes) - self._write_buffer.popleft() + self._write_buffer_pos += num_bytes self._write_buffer_size -= num_bytes + # Amortized O(1) shrink + # (this heuristic is implemented natively in Python 3.4+ + # but is replicated here for Python 2) + if self._write_buffer_pos > self._write_buffer_size: + del self._write_buffer[:self._write_buffer_pos] + self._write_buffer_pos = 0 + if self._write_buffer_frozen: + self._unfreeze_write_buffer() + self._total_write_done_index += num_bytes except (socket.error, IOError, OSError) as e: if e.args[0] in _ERRNO_WOULDBLOCK: - self._write_buffer_frozen = True + self._got_empty_write(size) break else: if not self._is_connreset(e): @@ -863,22 +887,38 @@ class BaseIOStream(object): self.fileno(), e) self.close(exc_info=True) return - if not self._write_buffer: + + while self._write_futures: + index, future = self._write_futures[0] + if index > self._total_write_done_index: + break + self._write_futures.popleft() + future.set_result(None) + + if not self._write_buffer_size: if self._write_callback: callback = self._write_callback self._write_callback = None self._run_callback(callback) - if self._write_future: - future = self._write_future - self._write_future = None - future.set_result(None) def _consume(self, loc): + # Consume loc bytes from the read buffer and return them if loc == 0: return b"" - _merge_prefix(self._read_buffer, loc) + assert loc <= self._read_buffer_size + # Slice the bytearray buffer into bytes, without intermediate copying + b = (memoryview(self._read_buffer) + [self._read_buffer_pos:self._read_buffer_pos + loc] + ).tobytes() + self._read_buffer_pos += loc self._read_buffer_size -= loc - return self._read_buffer.popleft() + # Amortized O(1) shrink + # (this heuristic is implemented natively in Python 3.4+ + # but is replicated here for Python 2) + if self._read_buffer_pos > self._read_buffer_size: + del self._read_buffer[:self._read_buffer_pos] + self._read_buffer_pos = 0 + return b def _check_closed(self): if self.closed(): @@ -1124,7 +1164,7 @@ class IOStream(BaseIOStream): suitably-configured `ssl.SSLContext` to disable. """ if (self._read_callback or self._read_future or - self._write_callback or self._write_future or + self._write_callback or self._write_futures or self._connect_callback or self._connect_future or self._pending_callbacks or self._closed or self._read_buffer or self._write_buffer): @@ -1251,6 +1291,17 @@ class SSLIOStream(IOStream): def writing(self): return self._handshake_writing or super(SSLIOStream, self).writing() + def _got_empty_write(self, size): + # With OpenSSL, if we couldn't write the entire buffer, + # the very same string object must be used on the + # next call to send. Therefore we suppress + # merging the write buffer after an incomplete send. + # A cleaner solution would be to set + # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER, but this is + # not yet accessible from python + # (http://bugs.python.org/issue8240) + self._freeze_write_buffer(size) + def _do_ssl_handshake(self): # Based on code from test_ssl.py in the python stdlib try: @@ -1498,53 +1549,6 @@ class PipeIOStream(BaseIOStream): return chunk -def _double_prefix(deque): - """Grow by doubling, but don't split the second chunk just because the - first one is small. - """ - new_len = max(len(deque[0]) * 2, - (len(deque[0]) + len(deque[1]))) - _merge_prefix(deque, new_len) - - -def _merge_prefix(deque, size): - """Replace the first entries in a deque of strings with a single - string of up to size bytes. - - >>> d = collections.deque(['abc', 'de', 'fghi', 'j']) - >>> _merge_prefix(d, 5); print(d) - deque(['abcde', 'fghi', 'j']) - - Strings will be split as necessary to reach the desired size. - >>> _merge_prefix(d, 7); print(d) - deque(['abcdefg', 'hi', 'j']) - - >>> _merge_prefix(d, 3); print(d) - deque(['abc', 'defg', 'hi', 'j']) - - >>> _merge_prefix(d, 100); print(d) - deque(['abcdefghij']) - """ - if len(deque) == 1 and len(deque[0]) <= size: - return - prefix = [] - remaining = size - while deque and remaining > 0: - chunk = deque.popleft() - if len(chunk) > remaining: - deque.appendleft(chunk[remaining:]) - chunk = chunk[:remaining] - prefix.append(chunk) - remaining -= len(chunk) - # This data structure normally just contains byte strings, but - # the unittest gets messy if it doesn't use the default str() type, - # so do the merge based on the type of data that's actually present. - if prefix: - deque.appendleft(type(prefix[0])().join(prefix)) - if not deque: - deque.appendleft(b"") - - def doctests(): import doctest return doctest.DocTestSuite() diff --git a/server/www/packages/packages-common/tornado/locale.py b/server/www/packages/packages-common/tornado/locale.py index 8310c4d..7dba10d 100644 --- a/server/www/packages/packages-common/tornado/locale.py +++ b/server/www/packages/packages-common/tornado/locale.py @@ -19,7 +19,7 @@ To load a locale and generate a translated string:: user_locale = tornado.locale.get("es_LA") - print user_locale.translate("Sign out") + print(user_locale.translate("Sign out")) `tornado.locale.get()` returns the closest matching locale, not necessarily the specific locale you requested. You can support pluralization with @@ -28,7 +28,7 @@ additional arguments to `~Locale.translate()`, e.g.:: people = [...] message = user_locale.translate( "%(list)s is online", "%(list)s are online", len(people)) - print message % {"list": user_locale.list(people)} + print(message % {"list": user_locale.list(people)}) The first string is chosen if ``len(people) == 1``, otherwise the second string is chosen. @@ -39,7 +39,7 @@ supported by `gettext` and related tools). If neither method is called, the `Locale.translate` method will simply return the original string. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import codecs import csv @@ -51,12 +51,12 @@ import re from tornado import escape from tornado.log import gen_log -from tornado.util import u +from tornado.util import PY3 from tornado._locale_data import LOCALE_NAMES _default_locale = "en_US" -_translations = {} +_translations = {} # type: dict _supported_locales = frozenset([_default_locale]) _use_gettext = False CONTEXT_SEPARATOR = "\x04" @@ -148,11 +148,11 @@ def load_translations(directory, encoding=None): # in most cases but is common with CSV files because Excel # cannot read utf-8 files without a BOM. encoding = 'utf-8-sig' - try: + if PY3: # python 3: csv.reader requires a file open in text mode. # Force utf8 to avoid dependence on $LANG environment variable. f = open(full_path, "r", encoding=encoding) - except TypeError: + else: # python 2: csv can only handle byte strings (in ascii-compatible # encodings), which we decode below. Transcode everything into # utf8 before passing it to csv.reader. @@ -187,7 +187,7 @@ def load_gettext_translations(directory, domain): {directory}/{lang}/LC_MESSAGES/{domain}.mo - Three steps are required to have you app translated: + Three steps are required to have your app translated: 1. Generate POT translation file:: @@ -274,7 +274,7 @@ class Locale(object): def __init__(self, code, translations): self.code = code - self.name = LOCALE_NAMES.get(code, {}).get("name", u("Unknown")) + self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown") self.rtl = False for prefix in ["fa", "ar", "he"]: if self.code.startswith(prefix): @@ -376,7 +376,7 @@ class Locale(object): str_time = "%d:%02d" % (local_date.hour, local_date.minute) elif self.code == "zh_CN": str_time = "%s%d:%02d" % ( - (u('\u4e0a\u5348'), u('\u4e0b\u5348'))[local_date.hour >= 12], + (u'\u4e0a\u5348', u'\u4e0b\u5348')[local_date.hour >= 12], local_date.hour % 12 or 12, local_date.minute) else: str_time = "%d:%02d %s" % ( @@ -422,7 +422,7 @@ class Locale(object): return "" if len(parts) == 1: return parts[0] - comma = u(' \u0648 ') if self.code.startswith("fa") else u(", ") + comma = u' \u0648 ' if self.code.startswith("fa") else u", " return _("%(commas)s and %(last)s") % { "commas": comma.join(parts[:-1]), "last": parts[len(parts) - 1], diff --git a/server/www/packages/packages-common/tornado/locks.py b/server/www/packages/packages-common/tornado/locks.py index a181772..4f9ecf6 100644 --- a/server/www/packages/packages-common/tornado/locks.py +++ b/server/www/packages/packages-common/tornado/locks.py @@ -12,15 +12,15 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, with_statement - -__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] +from __future__ import absolute_import, division, print_function import collections from tornado import gen, ioloop from tornado.concurrent import Future +__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] + class _TimeoutGarbageCollector(object): """Base class for objects that periodically clean up timed-out waiters. @@ -465,7 +465,7 @@ class Lock(object): ... ... # Now the lock is released. - .. versionchanged:: 3.5 + .. versionchanged:: 4.3 Added ``async with`` support in Python 3.5. """ diff --git a/server/www/packages/packages-common/tornado/log.py b/server/www/packages/packages-common/tornado/log.py index 040889a..654afc0 100644 --- a/server/www/packages/packages-common/tornado/log.py +++ b/server/www/packages/packages-common/tornado/log.py @@ -28,7 +28,7 @@ These streams may be configured independently using the standard library's `logging` module. For example, you may wish to send ``tornado.access`` logs to a separate file for analysis. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import logging import logging.handlers @@ -38,7 +38,12 @@ from tornado.escape import _unicode from tornado.util import unicode_type, basestring_type try: - import curses + import colorama +except ImportError: + colorama = None + +try: + import curses # type: ignore except ImportError: curses = None @@ -49,15 +54,21 @@ gen_log = logging.getLogger("tornado.general") def _stderr_supports_color(): - color = False - if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): - try: - curses.setupterm() - if curses.tigetnum("colors") > 0: - color = True - except Exception: - pass - return color + try: + if hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): + if curses: + curses.setupterm() + if curses.tigetnum("colors") > 0: + return True + elif colorama: + if sys.stderr is getattr(colorama.initialise, 'wrapped_stderr', + object()): + return True + except Exception: + # Very broad exception handling because it's always better to + # fall back to non-colored logs than to break at startup. + pass + return False def _safe_unicode(s): @@ -77,8 +88,19 @@ class LogFormatter(logging.Formatter): * Robust against str/bytes encoding problems. This formatter is enabled automatically by - `tornado.options.parse_command_line` (unless ``--logging=none`` is - used). + `tornado.options.parse_command_line` or `tornado.options.parse_config_file` + (unless ``--logging=none`` is used). + + Color support on Windows versions that do not support ANSI color codes is + enabled by use of the colorama__ library. Applications that wish to use + this must first initialize colorama with a call to ``colorama.init``. + See the colorama documentation for details. + + __ https://pypi.python.org/pypi/colorama + + .. versionchanged:: 4.5 + Added support for ``colorama``. Changed the constructor + signature to be compatible with `logging.config.dictConfig`. """ DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' @@ -89,8 +111,8 @@ class LogFormatter(logging.Formatter): logging.ERROR: 1, # Red } - def __init__(self, color=True, fmt=DEFAULT_FORMAT, - datefmt=DEFAULT_DATE_FORMAT, colors=DEFAULT_COLORS): + def __init__(self, fmt=DEFAULT_FORMAT, datefmt=DEFAULT_DATE_FORMAT, + style='%', color=True, colors=DEFAULT_COLORS): r""" :arg bool color: Enables color support. :arg string fmt: Log message format. @@ -111,21 +133,28 @@ class LogFormatter(logging.Formatter): self._colors = {} if color and _stderr_supports_color(): - # The curses module has some str/bytes confusion in - # python3. Until version 3.2.3, most methods return - # bytes, but only accept strings. In addition, we want to - # output these strings with the logging module, which - # works with unicode strings. The explicit calls to - # unicode() below are harmless in python2 but will do the - # right conversion in python 3. - fg_color = (curses.tigetstr("setaf") or - curses.tigetstr("setf") or "") - if (3, 0) < sys.version_info < (3, 2, 3): - fg_color = unicode_type(fg_color, "ascii") + if curses is not None: + # The curses module has some str/bytes confusion in + # python3. Until version 3.2.3, most methods return + # bytes, but only accept strings. In addition, we want to + # output these strings with the logging module, which + # works with unicode strings. The explicit calls to + # unicode() below are harmless in python2 but will do the + # right conversion in python 3. + fg_color = (curses.tigetstr("setaf") or + curses.tigetstr("setf") or "") + if (3, 0) < sys.version_info < (3, 2, 3): + fg_color = unicode_type(fg_color, "ascii") - for levelno, code in colors.items(): - self._colors[levelno] = unicode_type(curses.tparm(fg_color, code), "ascii") - self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") + for levelno, code in colors.items(): + self._colors[levelno] = unicode_type(curses.tparm(fg_color, code), "ascii") + self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") + else: + # If curses is not present (currently we'll only get here for + # colorama on windows), assume hard-coded ANSI color codes. + for levelno, code in colors.items(): + self._colors[levelno] = '\033[2;3%dm' % code + self._normal = '\033[0m' else: self._normal = '' @@ -183,7 +212,8 @@ def enable_pretty_logging(options=None, logger=None): and `tornado.options.parse_config_file`. """ if options is None: - from tornado.options import options + import tornado.options + options = tornado.options.options if options.logging is None or options.logging.lower() == 'none': return if logger is None: @@ -228,7 +258,8 @@ def define_logging_options(options=None): """ if options is None: # late import to prevent cycle - from tornado.options import options + import tornado.options + options = tornado.options.options options.define("logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), diff --git a/server/www/packages/packages-common/tornado/netutil.py b/server/www/packages/packages-common/tornado/netutil.py index 4fc8d04..c34c8c8 100644 --- a/server/www/packages/packages-common/tornado/netutil.py +++ b/server/www/packages/packages-common/tornado/netutil.py @@ -16,7 +16,7 @@ """Miscellaneous network utility code.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import errno import os @@ -27,7 +27,7 @@ import stat from tornado.concurrent import dummy_executor, run_on_executor from tornado.ioloop import IOLoop from tornado.platform.auto import set_close_exec -from tornado.util import u, Configurable, errno_from_exception +from tornado.util import PY3, Configurable, errno_from_exception try: import ssl @@ -44,20 +44,18 @@ except ImportError: else: raise -try: - xrange # py2 -except NameError: - xrange = range # py3 +if PY3: + xrange = range if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ ssl_match_hostname = ssl.match_hostname SSLCertificateError = ssl.CertificateError elif ssl is None: - ssl_match_hostname = SSLCertificateError = None + ssl_match_hostname = SSLCertificateError = None # type: ignore else: import backports.ssl_match_hostname ssl_match_hostname = backports.ssl_match_hostname.match_hostname - SSLCertificateError = backports.ssl_match_hostname.CertificateError + SSLCertificateError = backports.ssl_match_hostname.CertificateError # type: ignore if hasattr(ssl, 'SSLContext'): if hasattr(ssl, 'create_default_context'): @@ -96,7 +94,10 @@ else: # module-import time, the import lock is already held by the main thread, # leading to deadlock. Avoid it by caching the idna encoder on the main # thread now. -u('foo').encode('idna') +u'foo'.encode('idna') + +# For undiagnosed reasons, 'latin1' codec may also need to be preloaded. +u'foo'.encode('latin1') # These errnos indicate that a non-blocking operation must be retried # at a later time. On most platforms they're the same value, but on @@ -104,7 +105,7 @@ u('foo').encode('idna') _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) if hasattr(errno, "WSAEWOULDBLOCK"): - _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore # Default backlog used when calling sock.listen() _DEFAULT_BACKLOG = 128 @@ -131,7 +132,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. - ``resuse_port`` option sets ``SO_REUSEPORT`` option for every socket + ``reuse_port`` option sets ``SO_REUSEPORT`` option for every socket in the list. If your platform doesn't support this option ValueError will be raised. """ @@ -199,6 +200,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, sockets.append(sock) return sockets + if hasattr(socket, 'AF_UNIX'): def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG): """Creates a listening unix socket. @@ -334,6 +336,11 @@ class Resolver(Configurable): port)`` pair for IPv4; additional fields may be present for IPv6). If a ``callback`` is passed, it will be run with the result as an argument when it is complete. + + :raises IOError: if the address cannot be resolved. + + .. versionchanged:: 4.4 + Standardized all implementations to raise `IOError`. """ raise NotImplementedError() @@ -413,8 +420,8 @@ class ThreadedResolver(ExecutorResolver): All ``ThreadedResolvers`` share a single thread pool, whose size is set by the first one to be created. """ - _threadpool = None - _threadpool_pid = None + _threadpool = None # type: ignore + _threadpool_pid = None # type: int def initialize(self, io_loop=None, num_threads=10): threadpool = ThreadedResolver._create_threadpool(num_threads) @@ -518,4 +525,4 @@ def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs): else: return context.wrap_socket(socket, **kwargs) else: - return ssl.wrap_socket(socket, **dict(context, **kwargs)) + return ssl.wrap_socket(socket, **dict(context, **kwargs)) # type: ignore diff --git a/server/www/packages/packages-common/tornado/options.py b/server/www/packages/packages-common/tornado/options.py index ba16b1a..0a72cc6 100644 --- a/server/www/packages/packages-common/tornado/options.py +++ b/server/www/packages/packages-common/tornado/options.py @@ -41,6 +41,12 @@ either:: # or tornado.options.parse_config_file("/etc/server.conf") +.. note: + + When using tornado.options.parse_command_line or + tornado.options.parse_config_file, the only options that are set are + ones that were previously defined with tornado.options.define. + Command line formats are what you would expect (``--myoption=myvalue``). Config files are just Python files. Global names become options, e.g.:: @@ -76,7 +82,7 @@ instances to define isolated sets of options, such as for subcommands. underscores. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import datetime import numbers @@ -132,8 +138,10 @@ class OptionParser(object): return name in self._options def __getitem__(self, name): - name = self._normalize_name(name) - return self._options[name].value() + return self.__getattr__(name) + + def __setitem__(self, name, value): + return self.__setattr__(name, value) def items(self): """A sequence of (name, value) pairs. @@ -300,8 +308,12 @@ class OptionParser(object): .. versionchanged:: 4.1 Config files are now always interpreted as utf-8 instead of the system default encoding. + + .. versionchanged:: 4.4 + The special variable ``__file__`` is available inside config + files, specifying the absolute path to the config file itself. """ - config = {} + config = {'__file__': os.path.abspath(path)} with open(path, 'rb') as f: exec_in(native_str(f.read()), config, config) for name in config: diff --git a/server/www/packages/packages-common/tornado/platform/asyncio.py b/server/www/packages/packages-common/tornado/platform/asyncio.py index bf0428e..830ee1f 100644 --- a/server/www/packages/packages-common/tornado/platform/asyncio.py +++ b/server/www/packages/packages-common/tornado/platform/asyncio.py @@ -14,12 +14,12 @@ loops. .. note:: - Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods, - so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows. - Use the `~asyncio.SelectorEventLoop` instead. + Tornado requires the `~asyncio.AbstractEventLoop.add_reader` family of + methods, so it is not compatible with the `~asyncio.ProactorEventLoop` on + Windows. Use the `~asyncio.SelectorEventLoop` instead. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import functools import tornado.concurrent @@ -30,11 +30,11 @@ from tornado import stack_context try: # Import the real asyncio module for py33+ first. Older versions of the # trollius backport also use this name. - import asyncio + import asyncio # type: ignore except ImportError as e: # Asyncio itself isn't available; see if trollius is (backport to py26+). try: - import trollius as asyncio + import trollius as asyncio # type: ignore except ImportError: # Re-raise the original asyncio error, not the trollius one. raise e @@ -141,6 +141,8 @@ class BaseAsyncIOLoop(IOLoop): def add_callback(self, callback, *args, **kwargs): if self.closing: + # TODO: this is racy; we need a lock to ensure that the + # loop isn't closed during call_soon_threadsafe. raise RuntimeError("IOLoop is closing") self.asyncio_loop.call_soon_threadsafe( self._run_callback, @@ -158,6 +160,9 @@ class AsyncIOMainLoop(BaseAsyncIOLoop): import asyncio AsyncIOMainLoop().install() asyncio.get_event_loop().run_forever() + + See also :meth:`tornado.ioloop.IOLoop.install` for general notes on + installing alternative IOLoops. """ def initialize(self, **kwargs): super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), @@ -212,5 +217,6 @@ def to_asyncio_future(tornado_future): tornado.concurrent.chain_future(tornado_future, af) return af + if hasattr(convert_yielded, 'register'): - convert_yielded.register(asyncio.Future, to_tornado_future) + convert_yielded.register(asyncio.Future, to_tornado_future) # type: ignore diff --git a/server/www/packages/packages-common/tornado/platform/auto.py b/server/www/packages/packages-common/tornado/platform/auto.py index fc40c9d..1f4d700 100644 --- a/server/www/packages/packages-common/tornado/platform/auto.py +++ b/server/www/packages/packages-common/tornado/platform/auto.py @@ -23,7 +23,7 @@ Most code that needs access to this functionality should do e.g.:: from tornado.platform.auto import set_close_exec """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import os @@ -47,8 +47,13 @@ try: except ImportError: pass try: - from time import monotonic as monotonic_time + # monotonic can provide a monotonic function in versions of python before + # 3.3, too. + from monotonic import monotonic as monotonic_time except ImportError: - monotonic_time = None + try: + from time import monotonic as monotonic_time + except ImportError: + monotonic_time = None __all__ = ['Waker', 'set_close_exec', 'monotonic_time'] diff --git a/server/www/packages/packages-common/tornado/platform/caresresolver.py b/server/www/packages/packages-common/tornado/platform/caresresolver.py index 5559614..fd6e9d2 100644 --- a/server/www/packages/packages-common/tornado/platform/caresresolver.py +++ b/server/www/packages/packages-common/tornado/platform/caresresolver.py @@ -1,5 +1,5 @@ -from __future__ import absolute_import, division, print_function, with_statement -import pycares +from __future__ import absolute_import, division, print_function +import pycares # type: ignore import socket from tornado import gen @@ -61,8 +61,8 @@ class CaresResolver(Resolver): assert not callback_args.kwargs result, error = callback_args.args if error: - raise Exception('C-Ares returned error %s: %s while resolving %s' % - (error, pycares.errno.strerror(error), host)) + raise IOError('C-Ares returned error %s: %s while resolving %s' % + (error, pycares.errno.strerror(error), host)) addresses = result.addresses addrinfo = [] for address in addresses: @@ -73,7 +73,7 @@ class CaresResolver(Resolver): else: address_family = socket.AF_UNSPEC if family != socket.AF_UNSPEC and family != address_family: - raise Exception('Requested socket family %d but got %d' % - (family, address_family)) + raise IOError('Requested socket family %d but got %d' % + (family, address_family)) addrinfo.append((address_family, (address, port))) raise gen.Return(addrinfo) diff --git a/server/www/packages/packages-common/tornado/platform/common.py b/server/www/packages/packages-common/tornado/platform/common.py index b409a90..a73f8db 100644 --- a/server/www/packages/packages-common/tornado/platform/common.py +++ b/server/www/packages/packages-common/tornado/platform/common.py @@ -1,10 +1,27 @@ """Lowest-common-denominator implementations of platform functionality.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import errno import socket +import time from tornado.platform import interface +from tornado.util import errno_from_exception + + +def try_close(f): + # Avoid issue #875 (race condition when using the file in another + # thread). + for i in range(10): + try: + f.close() + except IOError: + # Yield to another thread + time.sleep(1e-3) + else: + break + # Try a last time and let raise + f.close() class Waker(interface.Waker): @@ -45,7 +62,7 @@ class Waker(interface.Waker): break # success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or - detail[0] != errno.WSAEADDRINUSE): + errno_from_exception(detail) != errno.WSAEADDRINUSE): # "Address already in use" is the only error # I've seen on two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. @@ -75,7 +92,7 @@ class Waker(interface.Waker): def wake(self): try: self.writer.send(b"x") - except (IOError, socket.error): + except (IOError, socket.error, ValueError): pass def consume(self): @@ -89,4 +106,4 @@ class Waker(interface.Waker): def close(self): self.reader.close() - self.writer.close() + try_close(self.writer) diff --git a/server/www/packages/packages-common/tornado/platform/epoll.py b/server/www/packages/packages-common/tornado/platform/epoll.py index b08cc62..80bfd8a 100644 --- a/server/www/packages/packages-common/tornado/platform/epoll.py +++ b/server/www/packages/packages-common/tornado/platform/epoll.py @@ -14,7 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. """EPoll-based IOLoop implementation for Linux systems.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import select diff --git a/server/www/packages/packages-common/tornado/platform/interface.py b/server/www/packages/packages-common/tornado/platform/interface.py index 07da6ba..c0ef290 100644 --- a/server/www/packages/packages-common/tornado/platform/interface.py +++ b/server/www/packages/packages-common/tornado/platform/interface.py @@ -21,7 +21,7 @@ for other tornado.platform modules. Most code should import the appropriate implementation from `tornado.platform.auto`. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function def set_close_exec(fd): @@ -61,3 +61,7 @@ class Waker(object): def close(self): """Closes the waker's file descriptor(s).""" raise NotImplementedError() + + +def monotonic_time(): + raise NotImplementedError() diff --git a/server/www/packages/packages-common/tornado/platform/kqueue.py b/server/www/packages/packages-common/tornado/platform/kqueue.py index f8f3e4a..3a5d417 100644 --- a/server/www/packages/packages-common/tornado/platform/kqueue.py +++ b/server/www/packages/packages-common/tornado/platform/kqueue.py @@ -14,7 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. """KQueue-based IOLoop implementation for BSD/Mac systems.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import select diff --git a/server/www/packages/packages-common/tornado/platform/posix.py b/server/www/packages/packages-common/tornado/platform/posix.py index 41a5794..9bf1f18 100644 --- a/server/www/packages/packages-common/tornado/platform/posix.py +++ b/server/www/packages/packages-common/tornado/platform/posix.py @@ -16,12 +16,12 @@ """Posix implementations of platform-specific functionality.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import fcntl import os -from tornado.platform import interface +from tornado.platform import common, interface def set_close_exec(fd): @@ -53,7 +53,7 @@ class Waker(interface.Waker): def wake(self): try: self.writer.write(b"x") - except IOError: + except (IOError, ValueError): pass def consume(self): @@ -67,4 +67,4 @@ class Waker(interface.Waker): def close(self): self.reader.close() - self.writer.close() + common.try_close(self.writer) diff --git a/server/www/packages/packages-common/tornado/platform/select.py b/server/www/packages/packages-common/tornado/platform/select.py index db52ef9..a18049f 100644 --- a/server/www/packages/packages-common/tornado/platform/select.py +++ b/server/www/packages/packages-common/tornado/platform/select.py @@ -17,7 +17,7 @@ Used as a fallback for systems that don't support epoll or kqueue. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import select diff --git a/server/www/packages/packages-common/tornado/platform/twisted.py b/server/www/packages/packages-common/tornado/platform/twisted.py index d3a4e75..0f9787e 100644 --- a/server/www/packages/packages-common/tornado/platform/twisted.py +++ b/server/www/packages/packages-common/tornado/platform/twisted.py @@ -21,7 +21,7 @@ depending on which library's underlying event loop you want to use. This module has been tested with Twisted versions 11.0.0 and newer. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import datetime import functools @@ -29,19 +29,18 @@ import numbers import socket import sys -import twisted.internet.abstract -from twisted.internet.defer import Deferred -from twisted.internet.posixbase import PosixReactorBase -from twisted.internet.interfaces import \ - IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor -from twisted.python import failure, log -from twisted.internet import error -import twisted.names.cache -import twisted.names.client -import twisted.names.hosts -import twisted.names.resolve +import twisted.internet.abstract # type: ignore +from twisted.internet.defer import Deferred # type: ignore +from twisted.internet.posixbase import PosixReactorBase # type: ignore +from twisted.internet.interfaces import IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor # type: ignore +from twisted.python import failure, log # type: ignore +from twisted.internet import error # type: ignore +import twisted.names.cache # type: ignore +import twisted.names.client # type: ignore +import twisted.names.hosts # type: ignore +import twisted.names.resolve # type: ignore -from zope.interface import implementer +from zope.interface import implementer # type: ignore from tornado.concurrent import Future from tornado.escape import utf8 @@ -354,7 +353,7 @@ def install(io_loop=None): if not io_loop: io_loop = tornado.ioloop.IOLoop.current() reactor = TornadoReactor(io_loop) - from twisted.internet.main import installReactor + from twisted.internet.main import installReactor # type: ignore installReactor(reactor) return reactor @@ -408,11 +407,14 @@ class TwistedIOLoop(tornado.ioloop.IOLoop): Not compatible with `tornado.process.Subprocess.set_exit_callback` because the ``SIGCHLD`` handlers used by Tornado and Twisted conflict with each other. + + See also :meth:`tornado.ioloop.IOLoop.install` for general notes on + installing alternative IOLoops. """ def initialize(self, reactor=None, **kwargs): super(TwistedIOLoop, self).initialize(**kwargs) if reactor is None: - import twisted.internet.reactor + import twisted.internet.reactor # type: ignore reactor = twisted.internet.reactor self.reactor = reactor self.fds = {} @@ -554,7 +556,10 @@ class TwistedResolver(Resolver): deferred = self.resolver.getHostByName(utf8(host)) resolved = yield gen.Task(deferred.addBoth) if isinstance(resolved, failure.Failure): - resolved.raiseException() + try: + resolved.raiseException() + except twisted.names.error.DomainError as e: + raise IOError(e) elif twisted.internet.abstract.isIPAddress(resolved): resolved_family = socket.AF_INET elif twisted.internet.abstract.isIPv6Address(resolved): @@ -569,8 +574,9 @@ class TwistedResolver(Resolver): ] raise gen.Return(result) + if hasattr(gen.convert_yielded, 'register'): - @gen.convert_yielded.register(Deferred) + @gen.convert_yielded.register(Deferred) # type: ignore def _(d): f = Future() diff --git a/server/www/packages/packages-common/tornado/platform/windows.py b/server/www/packages/packages-common/tornado/platform/windows.py index 817bdca..e94a0cf 100644 --- a/server/www/packages/packages-common/tornado/platform/windows.py +++ b/server/www/packages/packages-common/tornado/platform/windows.py @@ -2,9 +2,9 @@ # for production use. -from __future__ import absolute_import, division, print_function, with_statement -import ctypes -import ctypes.wintypes +from __future__ import absolute_import, division, print_function +import ctypes # type: ignore +import ctypes.wintypes # type: ignore # See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation @@ -17,4 +17,4 @@ HANDLE_FLAG_INHERIT = 0x00000001 def set_close_exec(fd): success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0) if not success: - raise ctypes.GetLastError() + raise ctypes.WinError() diff --git a/server/www/packages/packages-common/tornado/process.py b/server/www/packages/packages-common/tornado/process.py index daa9677..fae94f3 100644 --- a/server/www/packages/packages-common/tornado/process.py +++ b/server/www/packages/packages-common/tornado/process.py @@ -18,7 +18,7 @@ the server into multiple processes and managing subprocesses. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import errno import os @@ -35,7 +35,7 @@ from tornado.iostream import PipeIOStream from tornado.log import gen_log from tornado.platform.auto import set_close_exec from tornado import stack_context -from tornado.util import errno_from_exception +from tornado.util import errno_from_exception, PY3 try: import multiprocessing @@ -43,11 +43,8 @@ except ImportError: # Multiprocessing is not available on Google App Engine. multiprocessing = None -try: - long # py2 -except NameError: - long = int # py3 - +if PY3: + long = int # Re-export this exception for convenience. try: @@ -70,7 +67,7 @@ def cpu_count(): pass try: return os.sysconf("SC_NPROCESSORS_CONF") - except ValueError: + except (AttributeError, ValueError): pass gen_log.error("Could not detect number of processors; assuming 1") return 1 @@ -147,6 +144,7 @@ def fork_processes(num_processes, max_restarts=100): else: children[pid] = i return None + for i in range(num_processes): id = start_child(i) if id is not None: @@ -204,13 +202,19 @@ class Subprocess(object): attribute of the resulting Subprocess a `.PipeIOStream`. * A new keyword argument ``io_loop`` may be used to pass in an IOLoop. + The ``Subprocess.STREAM`` option and the ``set_exit_callback`` and + ``wait_for_exit`` methods do not work on Windows. There is + therefore no reason to use this class instead of + ``subprocess.Popen`` on that platform. + .. versionchanged:: 4.1 The ``io_loop`` argument is deprecated. + """ STREAM = object() _initialized = False - _waiting = {} + _waiting = {} # type: ignore def __init__(self, *args, **kwargs): self.io_loop = kwargs.pop('io_loop', None) or ioloop.IOLoop.current() @@ -351,6 +355,10 @@ class Subprocess(object): else: assert os.WIFEXITED(status) self.returncode = os.WEXITSTATUS(status) + # We've taken over wait() duty from the subprocess.Popen + # object. If we don't inform it of the process's return code, + # it will log a warning at destruction in python 3.6+. + self.proc.returncode = self.returncode if self._exit_callback: callback = self._exit_callback self._exit_callback = None diff --git a/server/www/packages/packages-common/tornado/queues.py b/server/www/packages/packages-common/tornado/queues.py index 129b204..0041a80 100644 --- a/server/www/packages/packages-common/tornado/queues.py +++ b/server/www/packages/packages-common/tornado/queues.py @@ -12,9 +12,17 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, with_statement +"""Asynchronous queues for coroutines. -__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] +.. warning:: + + Unlike the standard library's `queue` module, the classes defined here + are *not* thread-safe. To use these queues from another thread, + use `.IOLoop.add_callback` to transfer control to the `.IOLoop` thread + before calling any queue methods. +""" + +from __future__ import absolute_import, division, print_function import collections import heapq @@ -23,6 +31,8 @@ from tornado import gen, ioloop from tornado.concurrent import Future from tornado.locks import Event +__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] + class QueueEmpty(Exception): """Raised by `.Queue.get_nowait` when the queue has no items.""" diff --git a/server/www/packages/packages-common/tornado/routing.py b/server/www/packages/packages-common/tornado/routing.py new file mode 100644 index 0000000..6762dc0 --- /dev/null +++ b/server/www/packages/packages-common/tornado/routing.py @@ -0,0 +1,625 @@ +# Copyright 2015 The Tornado Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Flexible routing implementation. + +Tornado routes HTTP requests to appropriate handlers using `Router` +class implementations. The `tornado.web.Application` class is a +`Router` implementation and may be used directly, or the classes in +this module may be used for additional flexibility. The `RuleRouter` +class can match on more criteria than `.Application`, or the `Router` +interface can be subclassed for maximum customization. + +`Router` interface extends `~.httputil.HTTPServerConnectionDelegate` +to provide additional routing capabilities. This also means that any +`Router` implementation can be used directly as a ``request_callback`` +for `~.httpserver.HTTPServer` constructor. + +`Router` subclass must implement a ``find_handler`` method to provide +a suitable `~.httputil.HTTPMessageDelegate` instance to handle the +request: + +.. code-block:: python + + class CustomRouter(Router): + def find_handler(self, request, **kwargs): + # some routing logic providing a suitable HTTPMessageDelegate instance + return MessageDelegate(request.connection) + + class MessageDelegate(HTTPMessageDelegate): + def __init__(self, connection): + self.connection = connection + + def finish(self): + self.connection.write_headers( + ResponseStartLine("HTTP/1.1", 200, "OK"), + HTTPHeaders({"Content-Length": "2"}), + b"OK") + self.connection.finish() + + router = CustomRouter() + server = HTTPServer(router) + +The main responsibility of `Router` implementation is to provide a +mapping from a request to `~.httputil.HTTPMessageDelegate` instance +that will handle this request. In the example above we can see that +routing is possible even without instantiating an `~.web.Application`. + +For routing to `~.web.RequestHandler` implementations we need an +`~.web.Application` instance. `~.web.Application.get_handler_delegate` +provides a convenient way to create `~.httputil.HTTPMessageDelegate` +for a given request and `~.web.RequestHandler`. + +Here is a simple example of how we can we route to +`~.web.RequestHandler` subclasses by HTTP method: + +.. code-block:: python + + resources = {} + + class GetResource(RequestHandler): + def get(self, path): + if path not in resources: + raise HTTPError(404) + + self.finish(resources[path]) + + class PostResource(RequestHandler): + def post(self, path): + resources[path] = self.request.body + + class HTTPMethodRouter(Router): + def __init__(self, app): + self.app = app + + def find_handler(self, request, **kwargs): + handler = GetResource if request.method == "GET" else PostResource + return self.app.get_handler_delegate(request, handler, path_args=[request.path]) + + router = HTTPMethodRouter(Application()) + server = HTTPServer(router) + +`ReversibleRouter` interface adds the ability to distinguish between +the routes and reverse them to the original urls using route's name +and additional arguments. `~.web.Application` is itself an +implementation of `ReversibleRouter` class. + +`RuleRouter` and `ReversibleRuleRouter` are implementations of +`Router` and `ReversibleRouter` interfaces and can be used for +creating rule-based routing configurations. + +Rules are instances of `Rule` class. They contain a `Matcher`, which +provides the logic for determining whether the rule is a match for a +particular request and a target, which can be one of the following. + +1) An instance of `~.httputil.HTTPServerConnectionDelegate`: + +.. code-block:: python + + router = RuleRouter([ + Rule(PathMatches("/handler"), ConnectionDelegate()), + # ... more rules + ]) + + class ConnectionDelegate(HTTPServerConnectionDelegate): + def start_request(self, server_conn, request_conn): + return MessageDelegate(request_conn) + +2) A callable accepting a single argument of `~.httputil.HTTPServerRequest` type: + +.. code-block:: python + + router = RuleRouter([ + Rule(PathMatches("/callable"), request_callable) + ]) + + def request_callable(request): + request.write(b"HTTP/1.1 200 OK\\r\\nContent-Length: 2\\r\\n\\r\\nOK") + request.finish() + +3) Another `Router` instance: + +.. code-block:: python + + router = RuleRouter([ + Rule(PathMatches("/router.*"), CustomRouter()) + ]) + +Of course a nested `RuleRouter` or a `~.web.Application` is allowed: + +.. code-block:: python + + router = RuleRouter([ + Rule(HostMatches("example.com"), RuleRouter([ + Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)]))), + ])) + ]) + + server = HTTPServer(router) + +In the example below `RuleRouter` is used to route between applications: + +.. code-block:: python + + app1 = Application([ + (r"/app1/handler", Handler1), + # other handlers ... + ]) + + app2 = Application([ + (r"/app2/handler", Handler2), + # other handlers ... + ]) + + router = RuleRouter([ + Rule(PathMatches("/app1.*"), app1), + Rule(PathMatches("/app2.*"), app2) + ]) + + server = HTTPServer(router) + +For more information on application-level routing see docs for `~.web.Application`. + +.. versionadded:: 4.5 + +""" + +from __future__ import absolute_import, division, print_function + +import re +from functools import partial + +from tornado import httputil +from tornado.httpserver import _CallableAdapter +from tornado.escape import url_escape, url_unescape, utf8 +from tornado.log import app_log +from tornado.util import basestring_type, import_object, re_unescape, unicode_type + +try: + import typing # noqa +except ImportError: + pass + + +class Router(httputil.HTTPServerConnectionDelegate): + """Abstract router interface.""" + + def find_handler(self, request, **kwargs): + # type: (httputil.HTTPServerRequest, typing.Any)->httputil.HTTPMessageDelegate + """Must be implemented to return an appropriate instance of `~.httputil.HTTPMessageDelegate` + that can serve the request. + Routing implementations may pass additional kwargs to extend the routing logic. + + :arg httputil.HTTPServerRequest request: current HTTP request. + :arg kwargs: additional keyword arguments passed by routing implementation. + :returns: an instance of `~.httputil.HTTPMessageDelegate` that will be used to + process the request. + """ + raise NotImplementedError() + + def start_request(self, server_conn, request_conn): + return _RoutingDelegate(self, server_conn, request_conn) + + +class ReversibleRouter(Router): + """Abstract router interface for routers that can handle named routes + and support reversing them to original urls. + """ + + def reverse_url(self, name, *args): + """Returns url string for a given route name and arguments + or ``None`` if no match is found. + + :arg str name: route name. + :arg args: url parameters. + :returns: parametrized url string for a given route name (or ``None``). + """ + raise NotImplementedError() + + +class _RoutingDelegate(httputil.HTTPMessageDelegate): + def __init__(self, router, server_conn, request_conn): + self.server_conn = server_conn + self.request_conn = request_conn + self.delegate = None + self.router = router # type: Router + + def headers_received(self, start_line, headers): + request = httputil.HTTPServerRequest( + connection=self.request_conn, + server_connection=self.server_conn, + start_line=start_line, headers=headers) + + self.delegate = self.router.find_handler(request) + return self.delegate.headers_received(start_line, headers) + + def data_received(self, chunk): + return self.delegate.data_received(chunk) + + def finish(self): + self.delegate.finish() + + def on_connection_close(self): + self.delegate.on_connection_close() + + +class RuleRouter(Router): + """Rule-based router implementation.""" + + def __init__(self, rules=None): + """Constructs a router from an ordered list of rules:: + + RuleRouter([ + Rule(PathMatches("/handler"), Target), + # ... more rules + ]) + + You can also omit explicit `Rule` constructor and use tuples of arguments:: + + RuleRouter([ + (PathMatches("/handler"), Target), + ]) + + `PathMatches` is a default matcher, so the example above can be simplified:: + + RuleRouter([ + ("/handler", Target), + ]) + + In the examples above, ``Target`` can be a nested `Router` instance, an instance of + `~.httputil.HTTPServerConnectionDelegate` or an old-style callable, accepting a request argument. + + :arg rules: a list of `Rule` instances or tuples of `Rule` + constructor arguments. + """ + self.rules = [] # type: typing.List[Rule] + if rules: + self.add_rules(rules) + + def add_rules(self, rules): + """Appends new rules to the router. + + :arg rules: a list of Rule instances (or tuples of arguments, which are + passed to Rule constructor). + """ + for rule in rules: + if isinstance(rule, (tuple, list)): + assert len(rule) in (2, 3, 4) + if isinstance(rule[0], basestring_type): + rule = Rule(PathMatches(rule[0]), *rule[1:]) + else: + rule = Rule(*rule) + + self.rules.append(self.process_rule(rule)) + + def process_rule(self, rule): + """Override this method for additional preprocessing of each rule. + + :arg Rule rule: a rule to be processed. + :returns: the same or modified Rule instance. + """ + return rule + + def find_handler(self, request, **kwargs): + for rule in self.rules: + target_params = rule.matcher.match(request) + if target_params is not None: + if rule.target_kwargs: + target_params['target_kwargs'] = rule.target_kwargs + + delegate = self.get_target_delegate( + rule.target, request, **target_params) + + if delegate is not None: + return delegate + + return None + + def get_target_delegate(self, target, request, **target_params): + """Returns an instance of `~.httputil.HTTPMessageDelegate` for a + Rule's target. This method is called by `~.find_handler` and can be + extended to provide additional target types. + + :arg target: a Rule's target. + :arg httputil.HTTPServerRequest request: current request. + :arg target_params: additional parameters that can be useful + for `~.httputil.HTTPMessageDelegate` creation. + """ + if isinstance(target, Router): + return target.find_handler(request, **target_params) + + elif isinstance(target, httputil.HTTPServerConnectionDelegate): + return target.start_request(request.server_connection, request.connection) + + elif callable(target): + return _CallableAdapter( + partial(target, **target_params), request.connection + ) + + return None + + +class ReversibleRuleRouter(ReversibleRouter, RuleRouter): + """A rule-based router that implements ``reverse_url`` method. + + Each rule added to this router may have a ``name`` attribute that can be + used to reconstruct an original uri. The actual reconstruction takes place + in a rule's matcher (see `Matcher.reverse`). + """ + + def __init__(self, rules=None): + self.named_rules = {} # type: typing.Dict[str] + super(ReversibleRuleRouter, self).__init__(rules) + + def process_rule(self, rule): + rule = super(ReversibleRuleRouter, self).process_rule(rule) + + if rule.name: + if rule.name in self.named_rules: + app_log.warning( + "Multiple handlers named %s; replacing previous value", + rule.name) + self.named_rules[rule.name] = rule + + return rule + + def reverse_url(self, name, *args): + if name in self.named_rules: + return self.named_rules[name].matcher.reverse(*args) + + for rule in self.rules: + if isinstance(rule.target, ReversibleRouter): + reversed_url = rule.target.reverse_url(name, *args) + if reversed_url is not None: + return reversed_url + + return None + + +class Rule(object): + """A routing rule.""" + + def __init__(self, matcher, target, target_kwargs=None, name=None): + """Constructs a Rule instance. + + :arg Matcher matcher: a `Matcher` instance used for determining + whether the rule should be considered a match for a specific + request. + :arg target: a Rule's target (typically a ``RequestHandler`` or + `~.httputil.HTTPServerConnectionDelegate` subclass or even a nested `Router`, + depending on routing implementation). + :arg dict target_kwargs: a dict of parameters that can be useful + at the moment of target instantiation (for example, ``status_code`` + for a ``RequestHandler`` subclass). They end up in + ``target_params['target_kwargs']`` of `RuleRouter.get_target_delegate` + method. + :arg str name: the name of the rule that can be used to find it + in `ReversibleRouter.reverse_url` implementation. + """ + if isinstance(target, str): + # import the Module and instantiate the class + # Must be a fully qualified name (module.ClassName) + target = import_object(target) + + self.matcher = matcher # type: Matcher + self.target = target + self.target_kwargs = target_kwargs if target_kwargs else {} + self.name = name + + def reverse(self, *args): + return self.matcher.reverse(*args) + + def __repr__(self): + return '%s(%r, %s, kwargs=%r, name=%r)' % \ + (self.__class__.__name__, self.matcher, + self.target, self.target_kwargs, self.name) + + +class Matcher(object): + """Represents a matcher for request features.""" + + def match(self, request): + """Matches current instance against the request. + + :arg httputil.HTTPServerRequest request: current HTTP request + :returns: a dict of parameters to be passed to the target handler + (for example, ``handler_kwargs``, ``path_args``, ``path_kwargs`` + can be passed for proper `~.web.RequestHandler` instantiation). + An empty dict is a valid (and common) return value to indicate a match + when the argument-passing features are not used. + ``None`` must be returned to indicate that there is no match.""" + raise NotImplementedError() + + def reverse(self, *args): + """Reconstructs full url from matcher instance and additional arguments.""" + return None + + +class AnyMatches(Matcher): + """Matches any request.""" + + def match(self, request): + return {} + + +class HostMatches(Matcher): + """Matches requests from hosts specified by ``host_pattern`` regex.""" + + def __init__(self, host_pattern): + if isinstance(host_pattern, basestring_type): + if not host_pattern.endswith("$"): + host_pattern += "$" + self.host_pattern = re.compile(host_pattern) + else: + self.host_pattern = host_pattern + + def match(self, request): + if self.host_pattern.match(request.host_name): + return {} + + return None + + +class DefaultHostMatches(Matcher): + """Matches requests from host that is equal to application's default_host. + Always returns no match if ``X-Real-Ip`` header is present. + """ + + def __init__(self, application, host_pattern): + self.application = application + self.host_pattern = host_pattern + + def match(self, request): + # Look for default host if not behind load balancer (for debugging) + if "X-Real-Ip" not in request.headers: + if self.host_pattern.match(self.application.default_host): + return {} + return None + + +class PathMatches(Matcher): + """Matches requests with paths specified by ``path_pattern`` regex.""" + + def __init__(self, path_pattern): + if isinstance(path_pattern, basestring_type): + if not path_pattern.endswith('$'): + path_pattern += '$' + self.regex = re.compile(path_pattern) + else: + self.regex = path_pattern + + assert len(self.regex.groupindex) in (0, self.regex.groups), \ + ("groups in url regexes must either be all named or all " + "positional: %r" % self.regex.pattern) + + self._path, self._group_count = self._find_groups() + + def match(self, request): + match = self.regex.match(request.path) + if match is None: + return None + if not self.regex.groups: + return {} + + path_args, path_kwargs = [], {} + + # Pass matched groups to the handler. Since + # match.groups() includes both named and + # unnamed groups, we want to use either groups + # or groupdict but not both. + if self.regex.groupindex: + path_kwargs = dict( + (str(k), _unquote_or_none(v)) + for (k, v) in match.groupdict().items()) + else: + path_args = [_unquote_or_none(s) for s in match.groups()] + + return dict(path_args=path_args, path_kwargs=path_kwargs) + + def reverse(self, *args): + if self._path is None: + raise ValueError("Cannot reverse url regex " + self.regex.pattern) + assert len(args) == self._group_count, "required number of arguments " \ + "not found" + if not len(args): + return self._path + converted_args = [] + for a in args: + if not isinstance(a, (unicode_type, bytes)): + a = str(a) + converted_args.append(url_escape(utf8(a), plus=False)) + return self._path % tuple(converted_args) + + def _find_groups(self): + """Returns a tuple (reverse string, group count) for a url. + + For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method + would return ('/%s/%s/', 2). + """ + pattern = self.regex.pattern + if pattern.startswith('^'): + pattern = pattern[1:] + if pattern.endswith('$'): + pattern = pattern[:-1] + + if self.regex.groups != pattern.count('('): + # The pattern is too complicated for our simplistic matching, + # so we can't support reversing it. + return None, None + + pieces = [] + for fragment in pattern.split('('): + if ')' in fragment: + paren_loc = fragment.index(')') + if paren_loc >= 0: + pieces.append('%s' + fragment[paren_loc + 1:]) + else: + try: + unescaped_fragment = re_unescape(fragment) + except ValueError as exc: + # If we can't unescape part of it, we can't + # reverse this url. + return (None, None) + pieces.append(unescaped_fragment) + + return ''.join(pieces), self.regex.groups + + +class URLSpec(Rule): + """Specifies mappings between URLs and handlers. + + .. versionchanged: 4.5 + `URLSpec` is now a subclass of a `Rule` with `PathMatches` matcher and is preserved for + backwards compatibility. + """ + def __init__(self, pattern, handler, kwargs=None, name=None): + """Parameters: + + * ``pattern``: Regular expression to be matched. Any capturing + groups in the regex will be passed in to the handler's + get/post/etc methods as arguments (by keyword if named, by + position if unnamed. Named and unnamed capturing groups may + may not be mixed in the same rule). + + * ``handler``: `~.web.RequestHandler` subclass to be invoked. + + * ``kwargs`` (optional): A dictionary of additional arguments + to be passed to the handler's constructor. + + * ``name`` (optional): A name for this handler. Used by + `~.web.Application.reverse_url`. + + """ + super(URLSpec, self).__init__(PathMatches(pattern), handler, kwargs, name) + + self.regex = self.matcher.regex + self.handler_class = self.target + self.kwargs = kwargs + + def __repr__(self): + return '%s(%r, %s, kwargs=%r, name=%r)' % \ + (self.__class__.__name__, self.regex.pattern, + self.handler_class, self.kwargs, self.name) + + +def _unquote_or_none(s): + """None-safe wrapper around url_unescape to handle unmatched optional + groups correctly. + + Note that args are passed as bytes so the handler can decide what + encoding to use. + """ + if s is None: + return s + return url_unescape(s, encoding=None, plus=False) diff --git a/server/www/packages/packages-common/tornado/simple_httpclient.py b/server/www/packages/packages-common/tornado/simple_httpclient.py index 37b0bc2..8fb7070 100644 --- a/server/www/packages/packages-common/tornado/simple_httpclient.py +++ b/server/www/packages/packages-common/tornado/simple_httpclient.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function from tornado.escape import utf8, _unicode from tornado import gen @@ -11,6 +11,7 @@ from tornado.netutil import Resolver, OverrideResolver, _client_ssl_defaults from tornado.log import gen_log from tornado import stack_context from tornado.tcpclient import TCPClient +from tornado.util import PY3 import base64 import collections @@ -22,10 +23,10 @@ import sys from io import BytesIO -try: - import urlparse # py2 -except ImportError: - import urllib.parse as urlparse # py3 +if PY3: + import urllib.parse as urlparse +else: + import urlparse try: import ssl @@ -126,7 +127,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): timeout_handle = self.io_loop.add_timeout( self.io_loop.time() + min(request.connect_timeout, request.request_timeout), - functools.partial(self._on_timeout, key)) + functools.partial(self._on_timeout, key, "in request queue")) else: timeout_handle = None self.waiting[key] = (request, callback, timeout_handle) @@ -167,11 +168,20 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): self.io_loop.remove_timeout(timeout_handle) del self.waiting[key] - def _on_timeout(self, key): + def _on_timeout(self, key, info=None): + """Timeout callback of request. + + Construct a timeout HTTPResponse when a timeout occurs. + + :arg object key: A simple object to mark the request. + :info string key: More detailed timeout information. + """ request, callback, timeout_handle = self.waiting[key] self.queue.remove((key, request, callback)) + + error_message = "Timeout {0}".format(info) if info else "Timeout" timeout_response = HTTPResponse( - request, 599, error=HTTPError(599, "Timeout"), + request, 599, error=HTTPError(599, error_message), request_time=self.io_loop.time() - request.start_time) self.io_loop.add_callback(callback, timeout_response) del self.waiting[key] @@ -229,7 +239,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): if timeout: self._timeout = self.io_loop.add_timeout( self.start_time + timeout, - stack_context.wrap(self._on_timeout)) + stack_context.wrap(functools.partial(self._on_timeout, "while connecting"))) self.tcp_client.connect(host, port, af=af, ssl_options=ssl_options, max_buffer_size=self.max_buffer_size, @@ -284,10 +294,17 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): return ssl_options return None - def _on_timeout(self): + def _on_timeout(self, info=None): + """Timeout callback of _HTTPConnection instance. + + Raise a timeout HTTPError when a timeout occurs. + + :info string key: More detailed timeout information. + """ self._timeout = None + error_message = "Timeout {0}".format(info) if info else "Timeout" if self.final_callback is not None: - raise HTTPError(599, "Timeout") + raise HTTPError(599, error_message) def _remove_timeout(self): if self._timeout is not None: @@ -307,13 +324,14 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): if self.request.request_timeout: self._timeout = self.io_loop.add_timeout( self.start_time + self.request.request_timeout, - stack_context.wrap(self._on_timeout)) + stack_context.wrap(functools.partial(self._on_timeout, "during request"))) if (self.request.method not in self._SUPPORTED_METHODS and not self.request.allow_nonstandard_methods): raise KeyError("unknown method %s" % self.request.method) for key in ('network_interface', 'proxy_host', 'proxy_port', - 'proxy_username', 'proxy_password'): + 'proxy_username', 'proxy_password', + 'proxy_auth_mode'): if getattr(self.request, key, None): raise NotImplementedError('%s not supported' % key) if "Connection" not in self.request.headers: @@ -481,7 +499,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): def _should_follow_redirect(self): return (self.request.follow_redirects and self.request.max_redirects > 0 and - self.code in (301, 302, 303, 307)) + self.code in (301, 302, 303, 307, 308)) def finish(self): data = b''.join(self.chunks) diff --git a/server/www/packages/packages-common/tornado/stack_context.py b/server/www/packages/packages-common/tornado/stack_context.py index 2c0d9ee..61ae51f 100644 --- a/server/www/packages/packages-common/tornado/stack_context.py +++ b/server/www/packages/packages-common/tornado/stack_context.py @@ -67,7 +67,7 @@ Here are a few rules of thumb for when it's necessary: block that references your `StackContext`. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import sys import threading @@ -82,6 +82,8 @@ class StackContextInconsistentError(Exception): class _State(threading.local): def __init__(self): self.contexts = (tuple(), None) + + _state = _State() diff --git a/server/www/packages/packages-common/tornado/tcpclient.py b/server/www/packages/packages-common/tornado/tcpclient.py index f594d91..33074bd 100644 --- a/server/www/packages/packages-common/tornado/tcpclient.py +++ b/server/www/packages/packages-common/tornado/tcpclient.py @@ -16,7 +16,7 @@ """A non-blocking TCP connection factory. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import functools import socket @@ -155,16 +155,30 @@ class TCPClient(object): @gen.coroutine def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None, - max_buffer_size=None): + max_buffer_size=None, source_ip=None, source_port=None): """Connect to the given host and port. Asynchronously returns an `.IOStream` (or `.SSLIOStream` if ``ssl_options`` is not None). + + Using the ``source_ip`` kwarg, one can specify the source + IP address to use when establishing the connection. + In case the user needs to resolve and + use a specific interface, it has to be handled outside + of Tornado as this depends very much on the platform. + + Similarly, when the user requires a certain source port, it can + be specified using the ``source_port`` arg. + + .. versionchanged:: 4.5 + Added the ``source_ip`` and ``source_port`` arguments. """ addrinfo = yield self.resolver.resolve(host, port, af) connector = _Connector( addrinfo, self.io_loop, - functools.partial(self._create_stream, max_buffer_size)) + functools.partial(self._create_stream, max_buffer_size, + source_ip=source_ip, source_port=source_port) + ) af, addr, stream = yield connector.start() # TODO: For better performance we could cache the (af, addr) # information here and re-use it on subsequent connections to @@ -174,10 +188,35 @@ class TCPClient(object): server_hostname=host) raise gen.Return(stream) - def _create_stream(self, max_buffer_size, af, addr): + def _create_stream(self, max_buffer_size, af, addr, source_ip=None, + source_port=None): # Always connect in plaintext; we'll convert to ssl if necessary # after one connection has completed. - stream = IOStream(socket.socket(af), - io_loop=self.io_loop, - max_buffer_size=max_buffer_size) - return stream.connect(addr) + source_port_bind = source_port if isinstance(source_port, int) else 0 + source_ip_bind = source_ip + if source_port_bind and not source_ip: + # User required a specific port, but did not specify + # a certain source IP, will bind to the default loopback. + source_ip_bind = '::1' if af == socket.AF_INET6 else '127.0.0.1' + # Trying to use the same address family as the requested af socket: + # - 127.0.0.1 for IPv4 + # - ::1 for IPv6 + socket_obj = socket.socket(af) + if source_port_bind or source_ip_bind: + # If the user requires binding also to a specific IP/port. + try: + socket_obj.bind((source_ip_bind, source_port_bind)) + except socket.error: + socket_obj.close() + # Fail loudly if unable to use the IP/port. + raise + try: + stream = IOStream(socket_obj, + io_loop=self.io_loop, + max_buffer_size=max_buffer_size) + except socket.error as e: + fu = Future() + fu.set_exception(e) + return fu + else: + return stream.connect(addr) diff --git a/server/www/packages/packages-common/tornado/tcpserver.py b/server/www/packages/packages-common/tornado/tcpserver.py index c9d148a..f47ec89 100644 --- a/server/www/packages/packages-common/tornado/tcpserver.py +++ b/server/www/packages/packages-common/tornado/tcpserver.py @@ -15,12 +15,13 @@ # under the License. """A non-blocking, single-threaded TCP server.""" -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import errno import os import socket +from tornado import gen from tornado.log import app_log from tornado.ioloop import IOLoop from tornado.iostream import IOStream, SSLIOStream @@ -39,7 +40,21 @@ class TCPServer(object): r"""A non-blocking, single-threaded TCP server. To use `TCPServer`, define a subclass which overrides the `handle_stream` - method. + method. For example, a simple echo server could be defined like this:: + + from tornado.tcpserver import TCPServer + from tornado.iostream import StreamClosedError + from tornado import gen + + class EchoServer(TCPServer): + @gen.coroutine + def handle_stream(self, stream, address): + while True: + try: + data = yield stream.read_until(b"\n") + yield stream.write(data) + except StreamClosedError: + break To make this server serve SSL traffic, send the ``ssl_options`` keyword argument with an `ssl.SSLContext` object. For compatibility with older @@ -95,6 +110,7 @@ class TCPServer(object): self._sockets = {} # fd -> socket object self._pending_sockets = [] self._started = False + self._stopped = False self.max_buffer_size = max_buffer_size self.read_chunk_size = read_chunk_size @@ -147,7 +163,8 @@ class TCPServer(object): """Singular version of `add_sockets`. Takes a single socket object.""" self.add_sockets([socket]) - def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128): + def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128, + reuse_port=False): """Binds this server to the given port on the given address. To start the server, call `start`. If you want to run this server @@ -162,13 +179,17 @@ class TCPServer(object): both will be used if available. The ``backlog`` argument has the same meaning as for - `socket.listen `. + `socket.listen `. The ``reuse_port`` argument + has the same meaning as for `.bind_sockets`. This method may be called multiple times prior to `start` to listen on multiple ports or interfaces. + + .. versionchanged:: 4.4 + Added the ``reuse_port`` argument. """ sockets = bind_sockets(port, address=address, family=family, - backlog=backlog) + backlog=backlog, reuse_port=reuse_port) if self._started: self.add_sockets(sockets) else: @@ -208,7 +229,11 @@ class TCPServer(object): Requests currently in progress may still continue after the server is stopped. """ + if self._stopped: + return + self._stopped = True for fd, sock in self._sockets.items(): + assert sock.fileno() == fd self.io_loop.remove_handler(fd) sock.close() @@ -266,8 +291,10 @@ class TCPServer(object): stream = IOStream(connection, io_loop=self.io_loop, max_buffer_size=self.max_buffer_size, read_chunk_size=self.read_chunk_size) + future = self.handle_stream(stream, address) if future is not None: - self.io_loop.add_future(future, lambda f: f.result()) + self.io_loop.add_future(gen.convert_yielded(future), + lambda f: f.result()) except Exception: app_log.error("Error in connection callback", exc_info=True) diff --git a/server/www/packages/packages-common/tornado/template.py b/server/www/packages/packages-common/tornado/template.py index fa58899..3b2fa3f 100644 --- a/server/www/packages/packages-common/tornado/template.py +++ b/server/www/packages/packages-common/tornado/template.py @@ -19,13 +19,13 @@ Basic usage looks like:: t = template.Template("{{ myvalue }}") - print t.generate(myvalue="XXX") + print(t.generate(myvalue="XXX")) `Loader` is a class that loads templates from a root directory and caches the compiled templates:: loader = template.Loader("/home/btaylor") - print loader.load("test.html").generate(myvalue="XXX") + print(loader.load("test.html").generate(myvalue="XXX")) We compile all templates to raw Python. Error-reporting is currently... uh, interesting. Syntax for the templates:: @@ -94,12 +94,15 @@ Syntax Reference Template expressions are surrounded by double curly braces: ``{{ ... }}``. The contents may be any python expression, which will be escaped according to the current autoescape setting and inserted into the output. Other -template directives use ``{% %}``. These tags may be escaped as ``{{!`` -and ``{%!`` if you need to include a literal ``{{`` or ``{%`` in the output. +template directives use ``{% %}``. To comment out a section so that it is omitted from the output, surround it with ``{# ... #}``. +These tags may be escaped as ``{{!``, ``{%!``, and ``{#!`` +if you need to include a literal ``{{``, ``{%``, or ``{#`` in the output. + + ``{% apply *function* %}...{% end %}`` Applies a function to the output of all template code between ``apply`` and ``end``:: @@ -193,7 +196,7 @@ with ``{# ... #}``. `filter_whitespace` for available options. New in Tornado 4.3. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import datetime import linecache @@ -204,12 +207,12 @@ import threading from tornado import escape from tornado.log import app_log -from tornado.util import ObjectDict, exec_in, unicode_type +from tornado.util import ObjectDict, exec_in, unicode_type, PY3 -try: - from cStringIO import StringIO # py2 -except ImportError: - from io import StringIO # py3 +if PY3: + from io import StringIO +else: + from cStringIO import StringIO _DEFAULT_AUTOESCAPE = "xhtml_escape" _UNSET = object() @@ -665,7 +668,7 @@ class ParseError(Exception): .. versionchanged:: 4.3 Added ``filename`` and ``lineno`` attributes. """ - def __init__(self, message, filename, lineno): + def __init__(self, message, filename=None, lineno=0): self.message = message # The names "filename" and "lineno" are chosen for consistency # with python SyntaxError. diff --git a/server/www/packages/packages-common/tornado/testing.py b/server/www/packages/packages-common/tornado/testing.py index 54d76fe..74d04b6 100644 --- a/server/www/packages/packages-common/tornado/testing.py +++ b/server/www/packages/packages-common/tornado/testing.py @@ -2,7 +2,7 @@ """Support classes for automated testing. * `AsyncTestCase` and `AsyncHTTPTestCase`: Subclasses of unittest.TestCase - with additional support for testing asynchronous (`.IOLoop` based) code. + with additional support for testing asynchronous (`.IOLoop`-based) code. * `ExpectLog` and `LogTrapTestCase`: Make test logs less spammy. @@ -10,7 +10,7 @@ for the tornado.autoreload module to rerun the tests when code changes. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function try: from tornado import gen @@ -23,16 +23,16 @@ try: except ImportError: # These modules are not importable on app engine. Parts of this module # won't work, but e.g. LogTrapTestCase and main() will. - AsyncHTTPClient = None - gen = None - HTTPServer = None - IOLoop = None - netutil = None - SimpleAsyncHTTPClient = None - Subprocess = None + AsyncHTTPClient = None # type: ignore + gen = None # type: ignore + HTTPServer = None # type: ignore + IOLoop = None # type: ignore + netutil = None # type: ignore + SimpleAsyncHTTPClient = None # type: ignore + Subprocess = None # type: ignore from tornado.log import gen_log, app_log from tornado.stack_context import ExceptionStackContext -from tornado.util import raise_exc_info, basestring_type +from tornado.util import raise_exc_info, basestring_type, PY3 import functools import inspect import logging @@ -42,19 +42,19 @@ import signal import socket import sys -try: - from cStringIO import StringIO # py2 -except ImportError: - from io import StringIO # py3 +if PY3: + from io import StringIO +else: + from cStringIO import StringIO try: - from collections.abc import Generator as GeneratorType # py35+ + from collections.abc import Generator as GeneratorType # type: ignore except ImportError: - from types import GeneratorType + from types import GeneratorType # type: ignore if sys.version_info >= (3, 5): - iscoroutine = inspect.iscoroutine - iscoroutinefunction = inspect.iscoroutinefunction + iscoroutine = inspect.iscoroutine # type: ignore + iscoroutinefunction = inspect.iscoroutinefunction # type: ignore else: iscoroutine = iscoroutinefunction = lambda f: False @@ -62,16 +62,16 @@ else: # (either py27+ or unittest2) so tornado.test.util enforces # this requirement, but for other users of tornado.testing we want # to allow the older version if unitest2 is not available. -if sys.version_info >= (3,): +if PY3: # On python 3, mixing unittest2 and unittest (including doctest) # doesn't seem to work, so always use unittest. import unittest else: # On python 2, prefer unittest2 when available. try: - import unittest2 as unittest + import unittest2 as unittest # type: ignore except ImportError: - import unittest + import unittest # type: ignore _next_port = 10000 @@ -96,9 +96,13 @@ def bind_unused_port(reuse_port=False): """Binds a server socket to an available port on localhost. Returns a tuple (socket, port). + + .. versionchanged:: 4.4 + Always binds to ``127.0.0.1`` without resolving the name + ``localhost``. """ - [sock] = netutil.bind_sockets(None, 'localhost', family=socket.AF_INET, - reuse_port=reuse_port) + sock = netutil.bind_sockets(None, '127.0.0.1', family=socket.AF_INET, + reuse_port=reuse_port)[0] port = sock.getsockname()[1] return sock, port @@ -123,7 +127,7 @@ class _TestMethodWrapper(object): method yields it must use a decorator to consume the generator), but will also detect other kinds of return values (these are not necessarily errors, but we alert anyway since there is no good - reason to return a value from a test. + reason to return a value from a test). """ def __init__(self, orig_method): self.orig_method = orig_method @@ -208,8 +212,8 @@ class AsyncTestCase(unittest.TestCase): self.assertIn("FriendFeed", response.body) self.stop() """ - def __init__(self, methodName='runTest', **kwargs): - super(AsyncTestCase, self).__init__(methodName, **kwargs) + def __init__(self, methodName='runTest'): + super(AsyncTestCase, self).__init__(methodName) self.__stopped = False self.__running = False self.__failure = None @@ -547,7 +551,7 @@ def gen_test(func=None, timeout=None): # Without this attribute, nosetests will try to run gen_test as a test # anywhere it is imported. -gen_test.__test__ = False +gen_test.__test__ = False # type: ignore class LogTrapTestCase(unittest.TestCase): @@ -617,7 +621,7 @@ class ExpectLog(logging.Filter): an empty string to watch the root logger. :param regex: Regular expression to match. Any log entries on the specified logger that match this regex will be suppressed. - :param required: If true, an exeption will be raised if the end of + :param required: If true, an exception will be raised if the end of the ``with`` statement is reached without matching any log entries. """ if isinstance(logger, basestring_type): @@ -652,7 +656,9 @@ def main(**kwargs): This test runner is essentially equivalent to `unittest.main` from the standard library, but adds support for tornado-style option - parsing and log formatting. + parsing and log formatting. It is *not* necessary to use this + `main` function to run tests using `AsyncTestCase`; these tests + are self-contained and can run with any test runner. The easiest way to run a test is via the command line:: @@ -731,5 +737,6 @@ def main(**kwargs): gen_log.error('FAIL') raise + if __name__ == '__main__': main() diff --git a/server/www/packages/packages-common/tornado/util.py b/server/www/packages/packages-common/tornado/util.py index a67ddf5..981b94c 100644 --- a/server/www/packages/packages-common/tornado/util.py +++ b/server/www/packages/packages-common/tornado/util.py @@ -10,37 +10,92 @@ interface of its subclasses, including `.AsyncHTTPClient`, `.IOLoop`, and `.Resolver`. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import array +import atexit import os +import re import sys import zlib +PY3 = sys.version_info >= (3,) -try: - xrange # py2 -except NameError: - xrange = range # py3 +if PY3: + xrange = range # inspect.getargspec() raises DeprecationWarnings in Python 3.5. # The two functions have compatible interfaces for the parts we need. +if PY3: + from inspect import getfullargspec as getargspec +else: + from inspect import getargspec + +# Aliases for types that are spelled differently in different Python +# versions. bytes_type is deprecated and no longer used in Tornado +# itself but is left in case anyone outside Tornado is using it. +bytes_type = bytes +if PY3: + unicode_type = str + basestring_type = str +else: + # The names unicode and basestring don't exist in py3 so silence flake8. + unicode_type = unicode # noqa + basestring_type = basestring # noqa + + try: - from inspect import getfullargspec as getargspec # py3 + import typing # noqa + from typing import cast + + _ObjectDictBase = typing.Dict[str, typing.Any] except ImportError: - from inspect import getargspec # py2 + _ObjectDictBase = dict + + def cast(typ, x): + return x +else: + # More imports that are only needed in type comments. + import datetime # noqa + import types # noqa + from typing import Any, AnyStr, Union, Optional, Dict, Mapping # noqa + from typing import Tuple, Match, Callable # noqa + + if PY3: + _BaseString = str + else: + _BaseString = Union[bytes, unicode_type] -class ObjectDict(dict): +try: + from sys import is_finalizing +except ImportError: + # Emulate it + def _get_emulated_is_finalizing(): + L = [] + atexit.register(lambda: L.append(None)) + + def is_finalizing(): + # Not referencing any globals here + return L != [] + + return is_finalizing + + is_finalizing = _get_emulated_is_finalizing() + + +class ObjectDict(_ObjectDictBase): """Makes a dictionary behave like an object, with attribute-style access. """ def __getattr__(self, name): + # type: (str) -> Any try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): + # type: (str, Any) -> None self[name] = value @@ -57,6 +112,7 @@ class GzipDecompressor(object): self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) def decompress(self, value, max_length=None): + # type: (bytes, Optional[int]) -> bytes """Decompress a chunk, returning newly-available data. Some data may be buffered for later processing; `flush` must @@ -71,11 +127,13 @@ class GzipDecompressor(object): @property def unconsumed_tail(self): + # type: () -> bytes """Returns the unconsumed portion left over """ return self.decompressobj.unconsumed_tail def flush(self): + # type: () -> bytes """Return any remaining buffered data not yet returned by decompress. Also checks for errors such as truncated input. @@ -84,26 +142,8 @@ class GzipDecompressor(object): return self.decompressobj.flush() -# Fake unicode literal support: Python 3.2 doesn't have the u'' marker for -# literal strings, and alternative solutions like "from __future__ import -# unicode_literals" have other problems (see PEP 414). u() can be applied -# to ascii strings that include \u escapes (but they must not contain -# literal non-ascii characters). -if not isinstance(b'', type('')): - def u(s): - return s - unicode_type = str - basestring_type = str -else: - def u(s): - return s.decode('unicode_escape') - # These names don't exist in py3, so use noqa comments to disable - # warnings in flake8. - unicode_type = unicode # noqa - basestring_type = basestring # noqa - - def import_object(name): + # type: (_BaseString) -> Any """Imports an object by name. import_object('x') is equivalent to 'import x'. @@ -121,8 +161,8 @@ def import_object(name): ... ImportError: No module named missing_module """ - if isinstance(name, unicode_type) and str is not unicode_type: - # On python 2 a byte string is required. + if not isinstance(name, str): + # on python 2 a byte string is required. name = name.encode('utf-8') if name.count('.') == 0: return __import__(name, None, None) @@ -135,35 +175,39 @@ def import_object(name): raise ImportError("No module named %s" % parts[-1]) -# Deprecated alias that was used before we dropped py25 support. -# Left here in case anyone outside Tornado is using it. -bytes_type = bytes - -if sys.version_info > (3,): - exec(""" +# Stubs to make mypy happy (and later for actual type-checking). def raise_exc_info(exc_info): - raise exc_info[1].with_traceback(exc_info[2]) + # type: (Tuple[type, BaseException, types.TracebackType]) -> None + pass + def exec_in(code, glob, loc=None): - if isinstance(code, str): + # type: (Any, Dict[str, Any], Optional[Mapping[str, Any]]) -> Any + if isinstance(code, basestring_type): + # exec(string) inherits the caller's future imports; compile + # the string first to prevent that. code = compile(code, '', 'exec', dont_inherit=True) exec(code, glob, loc) + + +if PY3: + exec(""" +def raise_exc_info(exc_info): + try: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + """) else: exec(""" def raise_exc_info(exc_info): raise exc_info[0], exc_info[1], exc_info[2] - -def exec_in(code, glob, loc=None): - if isinstance(code, basestring): - # exec(string) inherits the caller's future imports; compile - # the string first to prevent that. - code = compile(code, '', 'exec', dont_inherit=True) - exec code in glob, loc """) def errno_from_exception(e): + # type: (BaseException) -> Optional[int] """Provides the errno from an Exception object. There are cases that the errno attribute was not set so we pull @@ -174,13 +218,41 @@ def errno_from_exception(e): """ if hasattr(e, 'errno'): - return e.errno + return e.errno # type: ignore elif e.args: return e.args[0] else: return None +_alphanum = frozenset( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") + + +def _re_unescape_replacement(match): + # type: (Match[str]) -> str + group = match.group(1) + if group[0] in _alphanum: + raise ValueError("cannot unescape '\\\\%s'" % group[0]) + return group + + +_re_unescape_pattern = re.compile(r'\\(.)', re.DOTALL) + + +def re_unescape(s): + # type: (str) -> str + """Unescape a string escaped by `re.escape`. + + May raise ``ValueError`` for regular expressions which could not + have been produced by `re.escape` (for example, strings containing + ``\d`` cannot be unescaped). + + .. versionadded:: 4.4 + """ + return _re_unescape_pattern.sub(_re_unescape_replacement, s) + + class Configurable(object): """Base class for configurable interfaces. @@ -201,8 +273,8 @@ class Configurable(object): `configurable_base` and `configurable_default`, and use the instance method `initialize` instead of ``__init__``. """ - __impl_class = None - __impl_kwargs = None + __impl_class = None # type: type + __impl_kwargs = None # type: Dict[str, Any] def __new__(cls, *args, **kwargs): base = cls.configurable_base() @@ -223,6 +295,9 @@ class Configurable(object): @classmethod def configurable_base(cls): + # type: () -> Any + # TODO: This class needs https://github.com/python/typing/issues/107 + # to be fully typeable. """Returns the base class of a configurable hierarchy. This will normally return the class in which it is defined. @@ -232,10 +307,12 @@ class Configurable(object): @classmethod def configurable_default(cls): + # type: () -> type """Returns the implementation class to be used if none is configured.""" raise NotImplementedError() def initialize(self): + # type: () -> None """Initialize a `Configurable` subclass instance. Configurable classes should use `initialize` instead of ``__init__``. @@ -246,6 +323,7 @@ class Configurable(object): @classmethod def configure(cls, impl, **kwargs): + # type: (Any, **Any) -> None """Sets the class to use when the base class is instantiated. Keyword arguments will be saved and added to the arguments passed @@ -253,7 +331,7 @@ class Configurable(object): some parameters. """ base = cls.configurable_base() - if isinstance(impl, (unicode_type, bytes)): + if isinstance(impl, (str, unicode_type)): impl = import_object(impl) if impl is not None and not issubclass(impl, cls): raise ValueError("Invalid subclass of %s" % cls) @@ -262,6 +340,7 @@ class Configurable(object): @classmethod def configured_class(cls): + # type: () -> type """Returns the currently configured class.""" base = cls.configurable_base() if cls.__impl_class is None: @@ -270,11 +349,13 @@ class Configurable(object): @classmethod def _save_configuration(cls): + # type: () -> Tuple[type, Dict[str, Any]] base = cls.configurable_base() return (base.__impl_class, base.__impl_kwargs) @classmethod def _restore_configuration(cls, saved): + # type: (Tuple[type, Dict[str, Any]]) -> None base = cls.configurable_base() base.__impl_class = saved[0] base.__impl_kwargs = saved[1] @@ -288,6 +369,7 @@ class ArgReplacer(object): and similar wrappers. """ def __init__(self, func, name): + # type: (Callable, str) -> None self.name = name try: self.arg_pos = self._getargnames(func).index(name) @@ -296,6 +378,7 @@ class ArgReplacer(object): self.arg_pos = None def _getargnames(self, func): + # type: (Callable) -> List[str] try: return getargspec(func).args except TypeError: @@ -306,11 +389,12 @@ class ArgReplacer(object): # getargspec that we need here. Note that for static # functions the @cython.binding(True) decorator must # be used (for methods it works out of the box). - code = func.func_code + code = func.func_code # type: ignore return code.co_varnames[:code.co_argcount] raise def get_old_value(self, args, kwargs, default=None): + # type: (List[Any], Dict[str, Any], Any) -> Any """Returns the old value of the named argument without replacing it. Returns ``default`` if the argument is not present. @@ -321,6 +405,7 @@ class ArgReplacer(object): return kwargs.get(self.name, default) def replace(self, new_value, args, kwargs): + # type: (Any, List[Any], Dict[str, Any]) -> Tuple[Any, List[Any], Dict[str, Any]] """Replace the named argument in ``args, kwargs`` with ``new_value``. Returns ``(old_value, args, kwargs)``. The returned ``args`` and @@ -343,11 +428,13 @@ class ArgReplacer(object): def timedelta_to_seconds(td): + # type: (datetime.timedelta) -> float """Equivalent to td.total_seconds() (introduced in python 2.7).""" return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6) def _websocket_mask_python(mask, data): + # type: (bytes, bytes) -> bytes """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. @@ -356,17 +443,18 @@ def _websocket_mask_python(mask, data): This pure-python implementation may be replaced by an optimized version when available. """ - mask = array.array("B", mask) - unmasked = array.array("B", data) + mask_arr = array.array("B", mask) + unmasked_arr = array.array("B", data) for i in xrange(len(data)): - unmasked[i] = unmasked[i] ^ mask[i % 4] - if hasattr(unmasked, 'tobytes'): + unmasked_arr[i] = unmasked_arr[i] ^ mask_arr[i % 4] + if PY3: # tostring was deprecated in py32. It hasn't been removed, # but since we turn on deprecation warnings in our tests # we need to use the right one. - return unmasked.tobytes() + return unmasked_arr.tobytes() else: - return unmasked.tostring() + return unmasked_arr.tostring() + if (os.environ.get('TORNADO_NO_EXTENSION') or os.environ.get('TORNADO_EXTENSION') == '0'): diff --git a/server/www/packages/packages-common/tornado/web.py b/server/www/packages/packages-common/tornado/web.py index 1c2ac8c..d79889f 100644 --- a/server/www/packages/packages-common/tornado/web.py +++ b/server/www/packages/packages-common/tornado/web.py @@ -56,7 +56,7 @@ request. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import base64 import binascii @@ -77,6 +77,7 @@ import time import tornado import traceback import types +from inspect import isclass from io import BytesIO from tornado.concurrent import Future @@ -89,25 +90,32 @@ from tornado.log import access_log, app_log, gen_log from tornado import stack_context from tornado import template from tornado.escape import utf8, _unicode -from tornado.util import (import_object, ObjectDict, raise_exc_info, - unicode_type, _websocket_mask) -from tornado.httputil import split_host_and_port +from tornado.routing import (AnyMatches, DefaultHostMatches, HostMatches, + ReversibleRouter, Rule, ReversibleRuleRouter, + URLSpec) +from tornado.util import (ObjectDict, raise_exc_info, + unicode_type, _websocket_mask, PY3) +url = URLSpec + +if PY3: + import http.cookies as Cookie + import urllib.parse as urlparse + from urllib.parse import urlencode +else: + import Cookie + import urlparse + from urllib import urlencode try: - import Cookie # py2 -except ImportError: - import http.cookies as Cookie # py3 + import typing # noqa -try: - import urlparse # py2 + # The following types are accepted by RequestHandler.set_header + # and related methods. + _HeaderTypes = typing.Union[bytes, unicode_type, + numbers.Integral, datetime.datetime] except ImportError: - import urllib.parse as urlparse # py3 - -try: - from urllib import urlencode # py2 -except ImportError: - from urllib.parse import urlencode # py3 + pass MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1 @@ -152,7 +160,7 @@ class RequestHandler(object): SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT", "OPTIONS") - _template_loaders = {} # {path: template.BaseLoader} + _template_loaders = {} # type: typing.Dict[str, template.BaseLoader] _template_loader_lock = threading.Lock() _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]") @@ -166,6 +174,7 @@ class RequestHandler(object): self._auto_finish = True self._transforms = None # will be set in _execute self._prepared_future = None + self._headers = None # type: httputil.HTTPHeaders self.path_args = None self.path_kwargs = None self.ui = ObjectDict((n, self._ui_method(m)) for n, m in @@ -183,7 +192,7 @@ class RequestHandler(object): self.initialize(**kwargs) def initialize(self): - """Hook for subclass initialization. + """Hook for subclass initialization. Called for each request. A dictionary passed as the third argument of a url spec will be supplied as keyword arguments to initialize(). @@ -313,13 +322,14 @@ class RequestHandler(object): try: self._reason = httputil.responses[status_code] except KeyError: - raise ValueError("unknown status code %d", status_code) + raise ValueError("unknown status code %d" % status_code) def get_status(self): """Returns the status code for our response.""" return self._status_code def set_header(self, name, value): + # type: (str, _HeaderTypes) -> None """Sets the given response header name and value. If a datetime is given, we automatically format it according to the @@ -329,6 +339,7 @@ class RequestHandler(object): self._headers[name] = self._convert_header_value(value) def add_header(self, name, value): + # type: (str, _HeaderTypes) -> None """Adds the given response header and value. Unlike `set_header`, `add_header` may be called multiple times @@ -345,13 +356,25 @@ class RequestHandler(object): if name in self._headers: del self._headers[name] - _INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]") + _INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]") def _convert_header_value(self, value): - if isinstance(value, bytes): - pass - elif isinstance(value, unicode_type): - value = value.encode('utf-8') + # type: (_HeaderTypes) -> str + + # Convert the input value to a str. This type check is a bit + # subtle: The bytes case only executes on python 3, and the + # unicode case only executes on python 2, because the other + # cases are covered by the first match for str. + if isinstance(value, str): + retval = value + elif isinstance(value, bytes): # py3 + # Non-ascii characters in headers are not well supported, + # but if you pass bytes, use latin1 so they pass through as-is. + retval = value.decode('latin1') + elif isinstance(value, unicode_type): # py2 + # TODO: This is inconsistent with the use of latin1 above, + # but it's been that way for a long time. Should it change? + retval = escape.utf8(value) elif isinstance(value, numbers.Integral): # return immediately since we know the converted value will be safe return str(value) @@ -361,11 +384,11 @@ class RequestHandler(object): raise TypeError("Unsupported header value %r" % value) # If \n is allowed into the header, it is possible to inject # additional headers or split the request. - if RequestHandler._INVALID_HEADER_CHAR_RE.search(value): - raise ValueError("Unsafe header value %r", value) - return value + if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval): + raise ValueError("Unsafe header value %r", retval) + return retval - _ARG_DEFAULT = [] + _ARG_DEFAULT = object() def get_argument(self, name, default=_ARG_DEFAULT, strip=True): """Returns the value of the argument with the given name. @@ -509,7 +532,7 @@ class RequestHandler(object): Additional keyword arguments are set on the Cookie.Morsel directly. - See http://docs.python.org/library/cookie.html#morsel-objects + See https://docs.python.org/2/library/cookie.html#Cookie.Morsel for available attributes. """ # The cookie library only accepts type str, in both python 2 and 3 @@ -696,6 +719,8 @@ class RequestHandler(object): def render(self, template_name, **kwargs): """Renders the template with the given arguments as the response.""" + if self._finished: + raise RuntimeError("Cannot render() after finish()") html = self.render_string(template_name, **kwargs) # Insert the additional JS and CSS added by the modules on the page @@ -731,45 +756,21 @@ class RequestHandler(object): if body_part: html_bodies.append(utf8(body_part)) - def is_absolute(path): - return any(path.startswith(x) for x in ["/", "http:", "https:"]) if js_files: # Maintain order of JavaScript files given by modules - paths = [] - unique_paths = set() - for path in js_files: - if not is_absolute(path): - path = self.static_url(path) - if path not in unique_paths: - paths.append(path) - unique_paths.add(path) - js = ''.join('' - for p in paths) + js = self.render_linked_js(js_files) sloc = html.rindex(b'') html = html[:sloc] + utf8(js) + b'\n' + html[sloc:] if js_embed: - js = b'' + js = self.render_embed_js(js_embed) sloc = html.rindex(b'') html = html[:sloc] + js + b'\n' + html[sloc:] if css_files: - paths = [] - unique_paths = set() - for path in css_files: - if not is_absolute(path): - path = self.static_url(path) - if path not in unique_paths: - paths.append(path) - unique_paths.add(path) - css = ''.join('' - for p in paths) + css = self.render_linked_css(css_files) hloc = html.index(b'') html = html[:hloc] + utf8(css) + b'\n' + html[hloc:] if css_embed: - css = b'' + css = self.render_embed_css(css_embed) hloc = html.index(b'') html = html[:hloc] + css + b'\n' + html[hloc:] if html_heads: @@ -780,6 +781,64 @@ class RequestHandler(object): html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:] self.finish(html) + def render_linked_js(self, js_files): + """Default method used to render the final js links for the + rendered webpage. + + Override this method in a sub-classed controller to change the output. + """ + paths = [] + unique_paths = set() + + for path in js_files: + if not is_absolute(path): + path = self.static_url(path) + if path not in unique_paths: + paths.append(path) + unique_paths.add(path) + + return ''.join('' + for p in paths) + + def render_embed_js(self, js_embed): + """Default method used to render the final embedded js for the + rendered webpage. + + Override this method in a sub-classed controller to change the output. + """ + return b'' + + def render_linked_css(self, css_files): + """Default method used to render the final css links for the + rendered webpage. + + Override this method in a sub-classed controller to change the output. + """ + paths = [] + unique_paths = set() + + for path in css_files: + if not is_absolute(path): + path = self.static_url(path) + if path not in unique_paths: + paths.append(path) + unique_paths.add(path) + + return ''.join('' + for p in paths) + + def render_embed_css(self, css_embed): + """Default method used to render the final embedded css for the + rendered webpage. + + Override this method in a sub-classed controller to change the output. + """ + return b'' + def render_string(self, template_name, **kwargs): """Generate the given template with the given arguments. @@ -915,8 +974,8 @@ class RequestHandler(object): if self.check_etag_header(): self._write_buffer = [] self.set_status(304) - if self._status_code == 304: - assert not self._write_buffer, "Cannot send body with 304" + if self._status_code in (204, 304): + assert not self._write_buffer, "Cannot send body with %s" % self._status_code self._clear_headers_for_304() elif "Content-Length" not in self._headers: content_length = sum(len(part) for part in self._write_buffer) @@ -934,6 +993,9 @@ class RequestHandler(object): self._log() self._finished = True self.on_finish() + self._break_cycles() + + def _break_cycles(self): # Break up a reference cycle between this handler and the # _ui_module closures to allow for faster GC on CPython. self.ui = None @@ -1072,8 +1134,8 @@ class RequestHandler(object): def get_current_user(self): user_cookie = self.get_secure_cookie("user") - if user_cookie: - return json.loads(user_cookie) + if user_cookie: + return json.loads(user_cookie) return None * It may be set as a normal variable, typically from an overridden @@ -1089,7 +1151,7 @@ class RequestHandler(object): may not, so the latter form is necessary if loading the user requires asynchronous operations. - The user object may any type of the application's choosing. + The user object may be any type of the application's choosing. """ if not hasattr(self, "_current_user"): self._current_user = self.get_current_user() @@ -1265,6 +1327,8 @@ class RequestHandler(object): raise HTTPError(403, "'_xsrf' argument missing from POST") _, token, _ = self._decode_xsrf_token(token) _, expected_token, _ = self._get_raw_xsrf_token() + if not token: + raise HTTPError(403, "'_xsrf' argument has invalid format") if not _time_independent_equals(utf8(token), utf8(expected_token)): raise HTTPError(403, "XSRF cookie does not match POST argument") @@ -1385,7 +1449,9 @@ class RequestHandler(object): match = True else: # Use a weak comparison when comparing entity-tags. - val = lambda x: x[2:] if x.startswith(b'W/') else x + def val(x): + return x[2:] if x.startswith(b'W/') else x + for etag in etags: if val(etag) == val(computed_etag): match = True @@ -1603,6 +1669,7 @@ def asynchronous(method): result = method(self, *args, **kwargs) if result is not None: result = gen.convert_yielded(result) + # If @asynchronous is used with @gen.coroutine, (but # not @gen.engine), we can automatically finish the # request when the future resolves. Additionally, @@ -1642,9 +1709,8 @@ def stream_request_body(cls): * The regular HTTP method (``post``, ``put``, etc) will be called after the entire body has been read. - There is a subtle interaction between ``data_received`` and asynchronous - ``prepare``: The first call to ``data_received`` may occur at any point - after the call to ``prepare`` has returned *or yielded*. + See the `file receiver demo `_ + for example usage. """ if not issubclass(cls, RequestHandler): raise TypeError("expected subclass of RequestHandler, got %r", cls) @@ -1702,7 +1768,38 @@ def addslash(method): return wrapper -class Application(httputil.HTTPServerConnectionDelegate): +class _ApplicationRouter(ReversibleRuleRouter): + """Routing implementation used internally by `Application`. + + Provides a binding between `Application` and `RequestHandler`. + This implementation extends `~.routing.ReversibleRuleRouter` in a couple of ways: + * it allows to use `RequestHandler` subclasses as `~.routing.Rule` target and + * it allows to use a list/tuple of rules as `~.routing.Rule` target. + ``process_rule`` implementation will substitute this list with an appropriate + `_ApplicationRouter` instance. + """ + + def __init__(self, application, rules=None): + assert isinstance(application, Application) + self.application = application + super(_ApplicationRouter, self).__init__(rules) + + def process_rule(self, rule): + rule = super(_ApplicationRouter, self).process_rule(rule) + + if isinstance(rule.target, (list, tuple)): + rule.target = _ApplicationRouter(self.application, rule.target) + + return rule + + def get_target_delegate(self, target, request, **target_params): + if isclass(target) and issubclass(target, RequestHandler): + return self.application.get_handler_delegate(request, target, **target_params) + + return super(_ApplicationRouter, self).get_target_delegate(target, request, **target_params) + + +class Application(ReversibleRouter): """A collection of request handlers that make up a web application. Instances of this class are callable and can be passed directly to @@ -1715,20 +1812,35 @@ class Application(httputil.HTTPServerConnectionDelegate): http_server.listen(8080) ioloop.IOLoop.current().start() - The constructor for this class takes in a list of `URLSpec` objects - or (regexp, request_class) tuples. When we receive requests, we - iterate over the list in order and instantiate an instance of the - first request class whose regexp matches the request path. - The request class can be specified as either a class object or a - (fully-qualified) name. + The constructor for this class takes in a list of `~.routing.Rule` + objects or tuples of values corresponding to the arguments of + `~.routing.Rule` constructor: ``(matcher, target, [target_kwargs], [name])``, + the values in square brackets being optional. The default matcher is + `~.routing.PathMatches`, so ``(regexp, target)`` tuples can also be used + instead of ``(PathMatches(regexp), target)``. - Each tuple can contain additional elements, which correspond to the - arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, - only tuples of two or three elements were allowed). + A common routing target is a `RequestHandler` subclass, but you can also + use lists of rules as a target, which create a nested routing configuration:: - A dictionary may be passed as the third element of the tuple, - which will be used as keyword arguments to the handler's - constructor and `~RequestHandler.initialize` method. This pattern + application = web.Application([ + (HostMatches("example.com"), [ + (r"/", MainPageHandler), + (r"/feed", FeedHandler), + ]), + ]) + + In addition to this you can use nested `~.routing.Router` instances, + `~.httputil.HTTPMessageDelegate` subclasses and callables as routing targets + (see `~.routing` module docs for more information). + + When we receive requests, we iterate over the list in order and + instantiate an instance of the first request class whose regexp + matches the request path. The request class can be specified as + either a class object or a (fully-qualified) name. + + A dictionary may be passed as the third element (``target_kwargs``) + of the tuple, which will be used as keyword arguments to the handler's + constructor and `~RequestHandler.initialize` method. This pattern is used for the `StaticFileHandler` in this example (note that a `StaticFileHandler` can be installed automatically with the static_path setting described below):: @@ -1744,6 +1856,9 @@ class Application(httputil.HTTPServerConnectionDelegate): (r"/article/([0-9]+)", ArticleHandler), ]) + If there's no match for the current request's host, then ``default_host`` + parameter value is matched against host regular expressions. + You can serve static files by sending the ``static_path`` setting as a keyword argument. We will serve those files from the ``/static/`` URI (this is configurable with the @@ -1752,8 +1867,10 @@ class Application(httputil.HTTPServerConnectionDelegate): `StaticFileHandler` can be specified with the ``static_handler_class`` setting. + .. versionchanged:: 4.5 + Integration with the new `tornado.routing` module. """ - def __init__(self, handlers=None, default_host="", transforms=None, + def __init__(self, handlers=None, default_host=None, transforms=None, **settings): if transforms is None: self.transforms = [] @@ -1761,8 +1878,6 @@ class Application(httputil.HTTPServerConnectionDelegate): self.transforms.append(GZipContentEncoding) else: self.transforms = transforms - self.handlers = [] - self.named_handlers = {} self.default_host = default_host self.settings = settings self.ui_modules = {'linkify': _linkify, @@ -1785,8 +1900,6 @@ class Application(httputil.HTTPServerConnectionDelegate): r"/(favicon\.ico)", r"/(robots\.txt)"]: handlers.insert(0, (pattern, static_handler_class, static_handler_args)) - if handlers: - self.add_handlers(".*$", handlers) if self.settings.get('debug'): self.settings.setdefault('autoreload', True) @@ -1794,6 +1907,11 @@ class Application(httputil.HTTPServerConnectionDelegate): self.settings.setdefault('static_hash_cache', False) self.settings.setdefault('serve_traceback', True) + self.wildcard_router = _ApplicationRouter(self, handlers) + self.default_router = _ApplicationRouter(self, [ + Rule(AnyMatches(), self.wildcard_router) + ]) + # Automatically reload modified modules if self.settings.get('autoreload'): from tornado import autoreload @@ -1831,47 +1949,20 @@ class Application(httputil.HTTPServerConnectionDelegate): Host patterns are processed sequentially in the order they were added. All matching patterns will be considered. """ - if not host_pattern.endswith("$"): - host_pattern += "$" - handlers = [] - # The handlers with the wildcard host_pattern are a special - # case - they're added in the constructor but should have lower - # precedence than the more-precise handlers added later. - # If a wildcard handler group exists, it should always be last - # in the list, so insert new groups just before it. - if self.handlers and self.handlers[-1][0].pattern == '.*$': - self.handlers.insert(-1, (re.compile(host_pattern), handlers)) - else: - self.handlers.append((re.compile(host_pattern), handlers)) + host_matcher = HostMatches(host_pattern) + rule = Rule(host_matcher, _ApplicationRouter(self, host_handlers)) - for spec in host_handlers: - if isinstance(spec, (tuple, list)): - assert len(spec) in (2, 3, 4) - spec = URLSpec(*spec) - handlers.append(spec) - if spec.name: - if spec.name in self.named_handlers: - app_log.warning( - "Multiple handlers named %s; replacing previous value", - spec.name) - self.named_handlers[spec.name] = spec + self.default_router.rules.insert(-1, rule) + + if self.default_host is not None: + self.wildcard_router.add_rules([( + DefaultHostMatches(self, host_matcher.host_pattern), + host_handlers + )]) def add_transform(self, transform_class): self.transforms.append(transform_class) - def _get_host_handlers(self, request): - host = split_host_and_port(request.host.lower())[0] - matches = [] - for pattern, handlers in self.handlers: - if pattern.match(host): - matches.extend(handlers) - # Look for default host if not behind load balancer (for debugging) - if not matches and "X-Real-Ip" not in request.headers: - for pattern, handlers in self.handlers: - if pattern.match(self.default_host): - matches.extend(handlers) - return matches or None - def _load_ui_methods(self, methods): if isinstance(methods, types.ModuleType): self._load_ui_methods(dict((n, getattr(methods, n)) @@ -1901,16 +1992,40 @@ class Application(httputil.HTTPServerConnectionDelegate): except TypeError: pass - def start_request(self, server_conn, request_conn): - # Modern HTTPServer interface - return _RequestDispatcher(self, request_conn) - def __call__(self, request): # Legacy HTTPServer interface - dispatcher = _RequestDispatcher(self, None) - dispatcher.set_request(request) + dispatcher = self.find_handler(request) return dispatcher.execute() + def find_handler(self, request, **kwargs): + route = self.default_router.find_handler(request) + if route is not None: + return route + + if self.settings.get('default_handler_class'): + return self.get_handler_delegate( + request, + self.settings['default_handler_class'], + self.settings.get('default_handler_args', {})) + + return self.get_handler_delegate( + request, ErrorHandler, {'status_code': 404}) + + def get_handler_delegate(self, request, target_class, target_kwargs=None, + path_args=None, path_kwargs=None): + """Returns `~.httputil.HTTPMessageDelegate` that can serve a request + for application and `RequestHandler` subclass. + + :arg httputil.HTTPServerRequest request: current HTTP request. + :arg RequestHandler target_class: a `RequestHandler` class. + :arg dict target_kwargs: keyword arguments for ``target_class`` constructor. + :arg list path_args: positional arguments for ``target_class`` HTTP method that + will be executed while handling a request (``get``, ``post`` or any other). + :arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method. + """ + return _HandlerDelegate( + self, request, target_class, target_kwargs, path_args, path_kwargs) + def reverse_url(self, name, *args): """Returns a URL path for handler named ``name`` @@ -1920,8 +2035,10 @@ class Application(httputil.HTTPServerConnectionDelegate): They will be converted to strings if necessary, encoded as utf8, and url-escaped. """ - if name in self.named_handlers: - return self.named_handlers[name].reverse(*args) + reversed_url = self.default_router.reverse_url(name, *args) + if reversed_url is not None: + return reversed_url + raise KeyError("%s not found in named urls" % name) def log_request(self, handler): @@ -1946,67 +2063,24 @@ class Application(httputil.HTTPServerConnectionDelegate): handler._request_summary(), request_time) -class _RequestDispatcher(httputil.HTTPMessageDelegate): - def __init__(self, application, connection): +class _HandlerDelegate(httputil.HTTPMessageDelegate): + def __init__(self, application, request, handler_class, handler_kwargs, + path_args, path_kwargs): self.application = application - self.connection = connection - self.request = None + self.connection = request.connection + self.request = request + self.handler_class = handler_class + self.handler_kwargs = handler_kwargs or {} + self.path_args = path_args or [] + self.path_kwargs = path_kwargs or {} self.chunks = [] - self.handler_class = None - self.handler_kwargs = None - self.path_args = [] - self.path_kwargs = {} + self.stream_request_body = _has_stream_request_body(self.handler_class) def headers_received(self, start_line, headers): - self.set_request(httputil.HTTPServerRequest( - connection=self.connection, start_line=start_line, - headers=headers)) if self.stream_request_body: self.request.body = Future() return self.execute() - def set_request(self, request): - self.request = request - self._find_handler() - self.stream_request_body = _has_stream_request_body(self.handler_class) - - def _find_handler(self): - # Identify the handler to use as soon as we have the request. - # Save url path arguments for later. - app = self.application - handlers = app._get_host_handlers(self.request) - if not handlers: - self.handler_class = RedirectHandler - self.handler_kwargs = dict(url="%s://%s/" - % (self.request.protocol, - app.default_host)) - return - for spec in handlers: - match = spec.regex.match(self.request.path) - if match: - self.handler_class = spec.handler_class - self.handler_kwargs = spec.kwargs - if spec.regex.groups: - # Pass matched groups to the handler. Since - # match.groups() includes both named and - # unnamed groups, we want to use either groups - # or groupdict but not both. - if spec.regex.groupindex: - self.path_kwargs = dict( - (str(k), _unquote_or_none(v)) - for (k, v) in match.groupdict().items()) - else: - self.path_args = [_unquote_or_none(s) - for s in match.groups()] - return - if app.settings.get('default_handler_class'): - self.handler_class = app.settings['default_handler_class'] - self.handler_kwargs = app.settings.get( - 'default_handler_args', {}) - else: - self.handler_class = ErrorHandler - self.handler_kwargs = dict(status_code=404) - def data_received(self, data): if self.stream_request_body: return self.handler.data_received(data) @@ -2163,13 +2237,32 @@ class RedirectHandler(RequestHandler): application = web.Application([ (r"/oldpath", web.RedirectHandler, {"url": "/newpath"}), ]) + + `RedirectHandler` supports regular expression substitutions. E.g., to + swap the first and second parts of a path while preserving the remainder:: + + application = web.Application([ + (r"/(.*?)/(.*?)/(.*)", web.RedirectHandler, {"url": "/{1}/{0}/{2}"}), + ]) + + The final URL is formatted with `str.format` and the substrings that match + the capturing groups. In the above example, a request to "/a/b/c" would be + formatted like:: + + str.format("/{1}/{0}/{2}", "a", "b", "c") # -> "/b/a/c" + + Use Python's :ref:`format string syntax ` to customize how + values are substituted. + + .. versionchanged:: 4.5 + Added support for substitutions into the destination URL. """ def initialize(self, url, permanent=True): self._url = url self._permanent = permanent - def get(self): - self.redirect(self._url, permanent=self._permanent) + def get(self, *args): + self.redirect(self._url.format(*args), permanent=self._permanent) class StaticFileHandler(RequestHandler): @@ -2194,8 +2287,8 @@ class StaticFileHandler(RequestHandler): the ``path`` argument to the get() method (different than the constructor argument above); see `URLSpec` for details. - To serve a file like ``admin_index.mako`` automatically when a directory is - requested, set ``static_handler_args=dict(default_filename="admin_index.mako")`` + To serve a file like ``index.html`` automatically when a directory is + requested, set ``static_handler_args=dict(default_filename="index.html")`` in your application settings, or add ``default_filename`` as an initializer argument for your ``StaticFileHandler``. @@ -2240,7 +2333,7 @@ class StaticFileHandler(RequestHandler): """ CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years - _static_hashes = {} + _static_hashes = {} # type: typing.Dict _lock = threading.Lock() # protects _static_hashes def initialize(self, path, default_filename=None): @@ -2693,6 +2786,7 @@ class OutputTransform(object): pass def transform_first_chunk(self, status_code, headers, chunk, finishing): + # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] return status_code, headers, chunk def transform_chunk(self, chunk, finishing): @@ -2713,7 +2807,8 @@ class GZipContentEncoding(OutputTransform): # beginning with "text/"). CONTENT_TYPES = set(["application/javascript", "application/x-javascript", "application/xml", "application/atom+xml", - "application/json", "application/xhtml+xml"]) + "application/json", "application/xhtml+xml", + "image/svg+xml"]) # Python's GzipFile defaults to level 9, while most other gzip # tools (including gzip itself) default to 6, which is probably a # better CPU/size tradeoff. @@ -2732,10 +2827,12 @@ class GZipContentEncoding(OutputTransform): return ctype.startswith('text/') or ctype in self.CONTENT_TYPES def transform_first_chunk(self, status_code, headers, chunk, finishing): + # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] + # TODO: can/should this type be inherited from the superclass? if 'Vary' in headers: - headers['Vary'] += b', Accept-Encoding' + headers['Vary'] += ', Accept-Encoding' else: - headers['Vary'] = b'Accept-Encoding' + headers['Vary'] = 'Accept-Encoding' if self._gzipping: ctype = _unicode(headers.get("Content-Type", "")).split(";")[0] self._gzipping = self._compressible_type(ctype) and \ @@ -2961,90 +3058,6 @@ class _UIModuleNamespace(object): raise AttributeError(str(e)) -class URLSpec(object): - """Specifies mappings between URLs and handlers.""" - def __init__(self, pattern, handler, kwargs=None, name=None): - """Parameters: - - * ``pattern``: Regular expression to be matched. Any groups - in the regex will be passed in to the handler's get/post/etc - methods as arguments. - - * ``handler``: `RequestHandler` subclass to be invoked. - - * ``kwargs`` (optional): A dictionary of additional arguments - to be passed to the handler's constructor. - - * ``name`` (optional): A name for this handler. Used by - `Application.reverse_url`. - """ - if not pattern.endswith('$'): - pattern += '$' - self.regex = re.compile(pattern) - assert len(self.regex.groupindex) in (0, self.regex.groups), \ - ("groups in url regexes must either be all named or all " - "positional: %r" % self.regex.pattern) - - if isinstance(handler, str): - # import the Module and instantiate the class - # Must be a fully qualified name (module.ClassName) - handler = import_object(handler) - - self.handler_class = handler - self.kwargs = kwargs or {} - self.name = name - self._path, self._group_count = self._find_groups() - - def __repr__(self): - return '%s(%r, %s, kwargs=%r, name=%r)' % \ - (self.__class__.__name__, self.regex.pattern, - self.handler_class, self.kwargs, self.name) - - def _find_groups(self): - """Returns a tuple (reverse string, group count) for a url. - - For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method - would return ('/%s/%s/', 2). - """ - pattern = self.regex.pattern - if pattern.startswith('^'): - pattern = pattern[1:] - if pattern.endswith('$'): - pattern = pattern[:-1] - - if self.regex.groups != pattern.count('('): - # The pattern is too complicated for our simplistic matching, - # so we can't support reversing it. - return (None, None) - - pieces = [] - for fragment in pattern.split('('): - if ')' in fragment: - paren_loc = fragment.index(')') - if paren_loc >= 0: - pieces.append('%s' + fragment[paren_loc + 1:]) - else: - pieces.append(fragment) - - return (''.join(pieces), self.regex.groups) - - def reverse(self, *args): - assert self._path is not None, \ - "Cannot reverse url regex " + self.regex.pattern - assert len(args) == self._group_count, "required number of arguments "\ - "not found" - if not len(args): - return self._path - converted_args = [] - for a in args: - if not isinstance(a, (unicode_type, bytes)): - a = str(a) - converted_args.append(escape.url_escape(utf8(a), plus=False)) - return self._path % tuple(converted_args) - -url = URLSpec - - if hasattr(hmac, 'compare_digest'): # python 3.3 _time_independent_equals = hmac.compare_digest else: @@ -3109,6 +3122,7 @@ def create_signed_value(secret, name, value, version=None, clock=None, else: raise ValueError("Unsupported version %d" % version) + # A leading version number in decimal # with no leading zeros, followed by a pipe. _signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$") @@ -3267,13 +3281,5 @@ def _create_signature_v2(secret, s): return utf8(hash.hexdigest()) -def _unquote_or_none(s): - """None-safe wrapper around url_unescape to handle unamteched optional - groups correctly. - - Note that args are passed as bytes so the handler can decide what - encoding to use. - """ - if s is None: - return s - return escape.url_unescape(s, encoding=None, plus=False) +def is_absolute(path): + return any(path.startswith(x) for x in ["/", "http:", "https:"]) diff --git a/server/www/packages/packages-common/tornado/websocket.py b/server/www/packages/packages-common/tornado/websocket.py index 11e5266..69437ee 100644 --- a/server/www/packages/packages-common/tornado/websocket.py +++ b/server/www/packages/packages-common/tornado/websocket.py @@ -16,7 +16,7 @@ the protocol (known as "draft 76") and are not compatible with this module. Removed support for the draft 76 protocol version. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function # Author: Jacob Kristhammar, 2010 import base64 @@ -30,24 +30,20 @@ import zlib from tornado.concurrent import TracebackFuture from tornado.escape import utf8, native_str, to_unicode -from tornado import httpclient, httputil -from tornado.ioloop import IOLoop +from tornado import gen, httpclient, httputil +from tornado.ioloop import IOLoop, PeriodicCallback from tornado.iostream import StreamClosedError from tornado.log import gen_log, app_log from tornado import simple_httpclient from tornado.tcpclient import TCPClient -from tornado.util import _websocket_mask +from tornado.util import _websocket_mask, PY3 -try: +if PY3: from urllib.parse import urlparse # py2 -except ImportError: + xrange = range +else: from urlparse import urlparse # py3 -try: - xrange # py2 -except NameError: - xrange = range # py3 - class WebSocketError(Exception): pass @@ -69,6 +65,10 @@ class WebSocketHandler(tornado.web.RequestHandler): override `open` and `on_close` to handle opened and closed connections. + Custom upgrade response headers can be sent by overriding + `~tornado.web.RequestHandler.set_default_headers` or + `~tornado.web.RequestHandler.prepare`. + See http://dev.w3.org/html5/websockets/ for details on the JavaScript interface. The protocol is specified at http://tools.ietf.org/html/rfc6455. @@ -126,10 +126,20 @@ class WebSocketHandler(tornado.web.RequestHandler): to show the "accept this certificate" dialog but has nowhere to show it. You must first visit a regular HTML page using the same certificate to accept it before the websocket connection will succeed. + + If the application setting ``websocket_ping_interval`` has a non-zero + value, a ping will be sent periodically, and the connection will be + closed if a response is not received before the ``websocket_ping_timeout``. + + Messages larger than the ``websocket_max_message_size`` application setting + (default 10MiB) will not be accepted. + + .. versionchanged:: 4.5 + Added ``websocket_ping_interval``, ``websocket_ping_timeout``, and + ``websocket_max_message_size``. """ def __init__(self, application, request, **kwargs): - tornado.web.RequestHandler.__init__(self, application, request, - **kwargs) + super(WebSocketHandler, self).__init__(application, request, **kwargs) self.ws_connection = None self.close_code = None self.close_reason = None @@ -181,18 +191,42 @@ class WebSocketHandler(tornado.web.RequestHandler): gen_log.debug(log_msg) return - self.stream = self.request.connection.detach() - self.stream.set_close_callback(self.on_connection_close) - self.ws_connection = self.get_websocket_protocol() if self.ws_connection: self.ws_connection.accept_connection() else: - if not self.stream.closed(): - self.stream.write(tornado.escape.utf8( - "HTTP/1.1 426 Upgrade Required\r\n" - "Sec-WebSocket-Version: 7, 8, 13\r\n\r\n")) - self.stream.close() + self.set_status(426, "Upgrade Required") + self.set_header("Sec-WebSocket-Version", "7, 8, 13") + self.finish() + + stream = None + + @property + def ping_interval(self): + """The interval for websocket keep-alive pings. + + Set websocket_ping_interval = 0 to disable pings. + """ + return self.settings.get('websocket_ping_interval', None) + + @property + def ping_timeout(self): + """If no ping is received in this many seconds, + close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). + Default is max of 3 pings or 30 seconds. + """ + return self.settings.get('websocket_ping_timeout', None) + + @property + def max_message_size(self): + """Maximum allowed message size. + + If the remote peer sends a message larger than this, the connection + will be closed. + + Default is 10MiB. + """ + return self.settings.get('websocket_max_message_size', None) def write_message(self, message, binary=False): """Sends the given message to the client of this Web Socket. @@ -236,11 +270,22 @@ class WebSocketHandler(tornado.web.RequestHandler): If this method returns None (the default), compression will be disabled. If it returns a dict (even an empty one), it will be enabled. The contents of the dict may be used to - control the memory and CPU usage of the compression, - but no such options are currently implemented. + control the following compression options: + + ``compression_level`` specifies the compression level. + + ``mem_level`` specifies the amount of memory used for the internal compression state. + + These parameters are documented in details here: + https://docs.python.org/3.6/library/zlib.html#zlib.compressobj .. versionadded:: 4.1 + + .. versionchanged:: 4.5 + + Added ``compression_level`` and ``mem_level``. """ + # TODO: Add wbits option. return None def open(self, *args, **kwargs): @@ -256,6 +301,10 @@ class WebSocketHandler(tornado.web.RequestHandler): """Handle incoming messages on the WebSocket This method must be overridden. + + .. versionchanged:: 4.5 + + ``on_message`` can be a coroutine. """ raise NotImplementedError @@ -269,6 +318,10 @@ class WebSocketHandler(tornado.web.RequestHandler): """Invoked when the response to a ping frame is received.""" pass + def on_ping(self, data): + """Invoked when the a ping frame is received.""" + pass + def on_close(self): """Invoked when the WebSocket is closed. @@ -320,6 +373,19 @@ class WebSocketHandler(tornado.web.RequestHandler): browsers, since WebSockets are allowed to bypass the usual same-origin policies and don't use CORS headers. + .. warning:: + + This is an important security measure; don't disable it + without understanding the security implications. In + particular, if your authentication is cookie-based, you + must either restrict the origins allowed by + ``check_origin()`` or implement your own XSRF-like + protection for websocket connections. See `these + `_ + `articles + `_ + for more. + To accept all cross-origin traffic (which was the default prior to Tornado 4.0), simply override this method to always return true:: @@ -334,6 +400,7 @@ class WebSocketHandler(tornado.web.RequestHandler): return parsed_origin.netloc.endswith(".mydomain.com") .. versionadded:: 4.0 + """ parsed_origin = urlparse(origin) origin = parsed_origin.netloc @@ -367,6 +434,16 @@ class WebSocketHandler(tornado.web.RequestHandler): if not self._on_close_called: self._on_close_called = True self.on_close() + self._break_cycles() + + def _break_cycles(self): + # WebSocketHandlers call finish() early, but we don't want to + # break up reference cycles (which makes it impossible to call + # self.render_string) until after we've really closed the + # connection (if it was established in the first place, + # indicated by status code 101). + if self.get_status() != 101 or self._on_close_called: + super(WebSocketHandler, self)._break_cycles() def send_error(self, *args, **kwargs): if self.stream is None: @@ -384,18 +461,17 @@ class WebSocketHandler(tornado.web.RequestHandler): return WebSocketProtocol13( self, compression_options=self.get_compression_options()) + def _attach_stream(self): + self.stream = self.request.connection.detach() + self.stream.set_close_callback(self.on_connection_close) + # disable non-WS methods + for method in ["write", "redirect", "set_header", "set_cookie", + "set_status", "flush", "finish"]: + setattr(self, method, _raise_not_supported_for_websockets) -def _wrap_method(method): - def _disallow_for_websocket(self, *args, **kwargs): - if self.stream is None: - method(self, *args, **kwargs) - else: - raise RuntimeError("Method not supported for Web Sockets") - return _disallow_for_websocket -for method in ["write", "redirect", "set_header", "set_cookie", - "set_status", "flush", "finish"]: - setattr(WebSocketHandler, method, - _wrap_method(getattr(WebSocketHandler, method))) + +def _raise_not_supported_for_websockets(*args, **kwargs): + raise RuntimeError("Method not supported for Web Sockets") class WebSocketProtocol(object): @@ -411,14 +487,20 @@ class WebSocketProtocol(object): def _run_callback(self, callback, *args, **kwargs): """Runs the given callback with exception handling. - On error, aborts the websocket connection and returns False. + If the callback is a coroutine, returns its Future. On error, aborts the + websocket connection and returns None. """ try: - callback(*args, **kwargs) + result = callback(*args, **kwargs) except Exception: app_log.error("Uncaught exception in %s", - self.request.path, exc_info=True) + getattr(self.request, 'path', None), exc_info=True) self._abort() + else: + if result is not None: + result = gen.convert_yielded(result) + self.stream.io_loop.add_future(result, lambda f: f.result()) + return result def on_connection_close(self): self._abort() @@ -432,7 +514,7 @@ class WebSocketProtocol(object): class _PerMessageDeflateCompressor(object): - def __init__(self, persistent, max_wbits): + def __init__(self, persistent, max_wbits, compression_options=None): if max_wbits is None: max_wbits = zlib.MAX_WBITS # There is no symbolic constant for the minimum wbits value. @@ -440,14 +522,24 @@ class _PerMessageDeflateCompressor(object): raise ValueError("Invalid max_wbits value %r; allowed range 8-%d", max_wbits, zlib.MAX_WBITS) self._max_wbits = max_wbits + + if compression_options is None or 'compression_level' not in compression_options: + self._compression_level = tornado.web.GZipContentEncoding.GZIP_LEVEL + else: + self._compression_level = compression_options['compression_level'] + + if compression_options is None or 'mem_level' not in compression_options: + self._mem_level = 8 + else: + self._mem_level = compression_options['mem_level'] + if persistent: self._compressor = self._create_compressor() else: self._compressor = None def _create_compressor(self): - return zlib.compressobj(tornado.web.GZipContentEncoding.GZIP_LEVEL, - zlib.DEFLATED, -self._max_wbits) + return zlib.compressobj(self._compression_level, zlib.DEFLATED, -self._max_wbits, self._mem_level) def compress(self, data): compressor = self._compressor or self._create_compressor() @@ -458,7 +550,7 @@ class _PerMessageDeflateCompressor(object): class _PerMessageDeflateDecompressor(object): - def __init__(self, persistent, max_wbits): + def __init__(self, persistent, max_wbits, compression_options=None): if max_wbits is None: max_wbits = zlib.MAX_WBITS if not (8 <= max_wbits <= zlib.MAX_WBITS): @@ -517,6 +609,9 @@ class WebSocketProtocol13(WebSocketProtocol): # the effect of compression, frame overhead, and control frames. self._wire_bytes_in = 0 self._wire_bytes_out = 0 + self.ping_callback = None + self.last_ping = 0 + self.last_pong = 0 def accept_connection(self): try: @@ -553,46 +648,42 @@ class WebSocketProtocol13(WebSocketProtocol): self.request.headers.get("Sec-Websocket-Key")) def _accept_connection(self): - subprotocol_header = '' subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '') subprotocols = [s.strip() for s in subprotocols.split(',')] if subprotocols: selected = self.handler.select_subprotocol(subprotocols) if selected: assert selected in subprotocols - subprotocol_header = ("Sec-WebSocket-Protocol: %s\r\n" - % selected) + self.handler.set_header("Sec-WebSocket-Protocol", selected) - extension_header = '' extensions = self._parse_extensions_header(self.request.headers) for ext in extensions: if (ext[0] == 'permessage-deflate' and self._compression_options is not None): # TODO: negotiate parameters if compression_options # specifies limits. - self._create_compressors('server', ext[1]) + self._create_compressors('server', ext[1], self._compression_options) if ('client_max_window_bits' in ext[1] and ext[1]['client_max_window_bits'] is None): # Don't echo an offered client_max_window_bits # parameter with no value. del ext[1]['client_max_window_bits'] - extension_header = ('Sec-WebSocket-Extensions: %s\r\n' % - httputil._encode_header( - 'permessage-deflate', ext[1])) + self.handler.set_header("Sec-WebSocket-Extensions", + httputil._encode_header( + 'permessage-deflate', ext[1])) break - if self.stream.closed(): - self._abort() - return - self.stream.write(tornado.escape.utf8( - "HTTP/1.1 101 Switching Protocols\r\n" - "Upgrade: websocket\r\n" - "Connection: Upgrade\r\n" - "Sec-WebSocket-Accept: %s\r\n" - "%s%s" - "\r\n" % (self._challenge_response(), - subprotocol_header, extension_header))) + self.handler.clear_header("Content-Type") + self.handler.set_status(101) + self.handler.set_header("Upgrade", "websocket") + self.handler.set_header("Connection", "Upgrade") + self.handler.set_header("Sec-WebSocket-Accept", self._challenge_response()) + self.handler.finish() + self.handler._attach_stream() + self.stream = self.handler.stream + + self.start_pinging() self._run_callback(self.handler.open, *self.handler.open_args, **self.handler.open_kwargs) self._receive_frame() @@ -622,7 +713,7 @@ class WebSocketProtocol13(WebSocketProtocol): else: raise ValueError("unsupported extension %r", ext) - def _get_compressor_options(self, side, agreed_parameters): + def _get_compressor_options(self, side, agreed_parameters, compression_options=None): """Converts a websocket agreed_parameters set to keyword arguments for our compressor objects. """ @@ -633,9 +724,10 @@ class WebSocketProtocol13(WebSocketProtocol): options['max_wbits'] = zlib.MAX_WBITS else: options['max_wbits'] = int(wbits_header) + options['compression_options'] = compression_options return options - def _create_compressors(self, side, agreed_parameters): + def _create_compressors(self, side, agreed_parameters, compression_options=None): # TODO: handle invalid parameters gracefully allowed_keys = set(['server_no_context_takeover', 'client_no_context_takeover', @@ -646,9 +738,9 @@ class WebSocketProtocol13(WebSocketProtocol): raise ValueError("unsupported compression parameter %r" % key) other_side = 'client' if (side == 'server') else 'server' self._compressor = _PerMessageDeflateCompressor( - **self._get_compressor_options(side, agreed_parameters)) + **self._get_compressor_options(side, agreed_parameters, compression_options)) self._decompressor = _PerMessageDeflateDecompressor( - **self._get_compressor_options(other_side, agreed_parameters)) + **self._get_compressor_options(other_side, agreed_parameters, compression_options)) def _write_frame(self, fin, opcode, data, flags=0): if fin: @@ -729,8 +821,7 @@ class WebSocketProtocol13(WebSocketProtocol): if self._masked_frame: self.stream.read_bytes(4, self._on_masking_key) else: - self.stream.read_bytes(self._frame_length, - self._on_frame_data) + self._read_frame_data(False) elif payloadlen == 126: self.stream.read_bytes(2, self._on_frame_length_16) elif payloadlen == 127: @@ -738,6 +829,17 @@ class WebSocketProtocol13(WebSocketProtocol): except StreamClosedError: self._abort() + def _read_frame_data(self, masked): + new_len = self._frame_length + if self._fragmented_message_buffer is not None: + new_len += len(self._fragmented_message_buffer) + if new_len > (self.handler.max_message_size or 10 * 1024 * 1024): + self.close(1009, "message too big") + return + self.stream.read_bytes( + self._frame_length, + self._on_masked_frame_data if masked else self._on_frame_data) + def _on_frame_length_16(self, data): self._wire_bytes_in += len(data) self._frame_length = struct.unpack("!H", data)[0] @@ -745,7 +847,7 @@ class WebSocketProtocol13(WebSocketProtocol): if self._masked_frame: self.stream.read_bytes(4, self._on_masking_key) else: - self.stream.read_bytes(self._frame_length, self._on_frame_data) + self._read_frame_data(False) except StreamClosedError: self._abort() @@ -756,7 +858,7 @@ class WebSocketProtocol13(WebSocketProtocol): if self._masked_frame: self.stream.read_bytes(4, self._on_masking_key) else: - self.stream.read_bytes(self._frame_length, self._on_frame_data) + self._read_frame_data(False) except StreamClosedError: self._abort() @@ -764,8 +866,7 @@ class WebSocketProtocol13(WebSocketProtocol): self._wire_bytes_in += len(data) self._frame_mask = data try: - self.stream.read_bytes(self._frame_length, - self._on_masked_frame_data) + self._read_frame_data(True) except StreamClosedError: self._abort() @@ -774,6 +875,8 @@ class WebSocketProtocol13(WebSocketProtocol): self._on_frame_data(_websocket_mask(self._frame_mask, data)) def _on_frame_data(self, data): + handled_future = None + self._wire_bytes_in += len(data) if self._frame_opcode_is_control: # control frames may be interleaved with a series of fragmented @@ -806,12 +909,18 @@ class WebSocketProtocol13(WebSocketProtocol): self._fragmented_message_buffer = data if self._final_frame: - self._handle_message(opcode, data) + handled_future = self._handle_message(opcode, data) if not self.client_terminated: - self._receive_frame() + if handled_future: + # on_message is a coroutine, process more frames once it's done. + handled_future.add_done_callback( + lambda future: self._receive_frame()) + else: + self._receive_frame() def _handle_message(self, opcode, data): + """Execute on_message, returning its Future if it is a coroutine.""" if self.client_terminated: return @@ -826,11 +935,11 @@ class WebSocketProtocol13(WebSocketProtocol): except UnicodeDecodeError: self._abort() return - self._run_callback(self.handler.on_message, decoded) + return self._run_callback(self.handler.on_message, decoded) elif opcode == 0x2: # Binary data self._message_bytes_in += len(data) - self._run_callback(self.handler.on_message, data) + return self._run_callback(self.handler.on_message, data) elif opcode == 0x8: # Close self.client_terminated = True @@ -843,9 +952,11 @@ class WebSocketProtocol13(WebSocketProtocol): elif opcode == 0x9: # Ping self._write_frame(True, 0xA, data) + self._run_callback(self.handler.on_ping, data) elif opcode == 0xA: # Pong - self._run_callback(self.handler.on_pong, data) + self.last_pong = IOLoop.current().time() + return self._run_callback(self.handler.on_pong, data) else: self._abort() @@ -874,6 +985,51 @@ class WebSocketProtocol13(WebSocketProtocol): self._waiting = self.stream.io_loop.add_timeout( self.stream.io_loop.time() + 5, self._abort) + @property + def ping_interval(self): + interval = self.handler.ping_interval + if interval is not None: + return interval + return 0 + + @property + def ping_timeout(self): + timeout = self.handler.ping_timeout + if timeout is not None: + return timeout + return max(3 * self.ping_interval, 30) + + def start_pinging(self): + """Start sending periodic pings to keep the connection alive""" + if self.ping_interval > 0: + self.last_ping = self.last_pong = IOLoop.current().time() + self.ping_callback = PeriodicCallback( + self.periodic_ping, self.ping_interval * 1000) + self.ping_callback.start() + + def periodic_ping(self): + """Send a ping to keep the websocket alive + + Called periodically if the websocket_ping_interval is set and non-zero. + """ + if self.stream.closed() and self.ping_callback is not None: + self.ping_callback.stop() + return + + # Check for timeout on pong. Make sure that we really have + # sent a recent ping in case the machine with both server and + # client has been suspended since the last ping. + now = IOLoop.current().time() + since_last_pong = now - self.last_pong + since_last_ping = now - self.last_ping + if (since_last_ping < 2 * self.ping_interval and + since_last_pong > self.ping_timeout): + self.close() + return + + self.write_ping(b'') + self.last_ping = now + class WebSocketClientConnection(simple_httpclient._HTTPConnection): """WebSocket client connection. @@ -882,7 +1038,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): `websocket_connect` function instead. """ def __init__(self, io_loop, request, on_message_callback=None, - compression_options=None): + compression_options=None, ping_interval=None, ping_timeout=None, + max_message_size=None): self.compression_options = compression_options self.connect_future = TracebackFuture() self.protocol = None @@ -891,6 +1048,9 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): self.key = base64.b64encode(os.urandom(16)) self._on_message_callback = on_message_callback self.close_code = self.close_reason = None + self.ping_interval = ping_interval + self.ping_timeout = ping_timeout + self.max_message_size = max_message_size scheme, sep, rest = request.url.partition(':') scheme = {'ws': 'http', 'wss': 'https'}[scheme] @@ -954,6 +1114,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): self.headers = headers self.protocol = self.get_websocket_protocol() self.protocol._process_server_headers(self.key, self.headers) + self.protocol.start_pinging() self.protocol._receive_frame() if self._timeout is not None: @@ -1007,13 +1168,18 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): def on_pong(self, data): pass + def on_ping(self, data): + pass + def get_websocket_protocol(self): return WebSocketProtocol13(self, mask_outgoing=True, compression_options=self.compression_options) def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, - on_message_callback=None, compression_options=None): + on_message_callback=None, compression_options=None, + ping_interval=None, ping_timeout=None, + max_message_size=None): """Client-side websocket support. Takes a url and returns a Future whose result is a @@ -1042,6 +1208,10 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, .. versionchanged:: 4.1 Added ``compression_options`` and ``on_message_callback``. The ``io_loop`` argument is deprecated. + + .. versionchanged:: 4.5 + Added the ``ping_interval``, ``ping_timeout``, and ``max_message_size`` + arguments, which have the same meaning as in `WebSocketHandler`. """ if io_loop is None: io_loop = IOLoop.current() @@ -1057,7 +1227,10 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, request, httpclient.HTTPRequest._DEFAULTS) conn = WebSocketClientConnection(io_loop, request, on_message_callback=on_message_callback, - compression_options=compression_options) + compression_options=compression_options, + ping_interval=ping_interval, + ping_timeout=ping_timeout, + max_message_size=max_message_size) if callback is not None: io_loop.add_future(conn.connect_future, callback) return conn.connect_future diff --git a/server/www/packages/packages-common/tornado/wsgi.py b/server/www/packages/packages-common/tornado/wsgi.py index 59e6c55..68a7615 100644 --- a/server/www/packages/packages-common/tornado/wsgi.py +++ b/server/www/packages/packages-common/tornado/wsgi.py @@ -29,7 +29,7 @@ provides WSGI support in two ways: and Tornado handlers in a single server. """ -from __future__ import absolute_import, division, print_function, with_statement +from __future__ import absolute_import, division, print_function import sys from io import BytesIO @@ -41,12 +41,12 @@ from tornado import httputil from tornado.log import access_log from tornado import web from tornado.escape import native_str -from tornado.util import unicode_type +from tornado.util import unicode_type, PY3 -try: +if PY3: import urllib.parse as urllib_parse # py3 -except ImportError: +else: import urllib as urllib_parse # PEP 3333 specifies that WSGI on python 3 generally deals with byte strings diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libfreetype.6.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libfreetype.6.dylib new file mode 100644 index 0000000..11af9fe Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libfreetype.6.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libjpeg.9.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libjpeg.9.dylib new file mode 100644 index 0000000..0f6141e Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libjpeg.9.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/liblcms2.2.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/liblcms2.2.dylib new file mode 100644 index 0000000..81c171f Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/liblcms2.2.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libpng16.16.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libpng16.16.dylib new file mode 100644 index 0000000..58a5d7a Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libpng16.16.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libtiff.5.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libtiff.5.dylib new file mode 100644 index 0000000..7b89182 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libtiff.5.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebp.5.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebp.5.dylib new file mode 100644 index 0000000..5a868be Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebp.5.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebpdemux.1.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebpdemux.1.dylib new file mode 100644 index 0000000..8a17324 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebpdemux.1.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebpmux.1.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebpmux.1.dylib new file mode 100644 index 0000000..d34898f Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libwebpmux.1.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/.dylibs/libz.1.2.8.dylib b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libz.1.2.8.dylib new file mode 100644 index 0000000..c068d34 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/.dylibs/libz.1.2.8.dylib differ diff --git a/server/www/packages/packages-darwin/x64/PIL/BdfFontFile.py b/server/www/packages/packages-darwin/x64/PIL/BdfFontFile.py new file mode 100644 index 0000000..e6cc22f --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/BdfFontFile.py @@ -0,0 +1,132 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile + + +# -------------------------------------------------------------------- +# parse X Bitmap Distribution Format (BDF) +# -------------------------------------------------------------------- + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other" +} + +bdf_spacing = { + "P": "Proportional", + "M": "Monospaced", + "C": "Cell" +} + + +def bdf_char(f): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode('ascii') + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + + # load bitmap + bitmap = [] + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap.append(s[:-1]) + bitmap = b"".join(bitmap) + + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] + + bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y) + + try: + im = Image.frombytes("1", (x, y), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (x, y)) + + return id, int(props["ENCODING"]), bbox, im + + +## +# Font file plugin for the X11 BDF format. + +class BdfFontFile(FontFile.FontFile): + + def __init__(self, fp): + + FontFile.FontFile.__init__(self) + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + raise SyntaxError("not a valid BDF file") + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i+1:-1].decode('ascii')) + + # font = props["FONT"].split("-") + + # font[4] = bdf_slant[font[4].upper()] + # font[11] = bdf_spacing[font[11].upper()] + + # ascent = int(props["FONT_ASCENT"]) + # descent = int(props["FONT_DESCENT"]) + + # fontname = ";".join(font[1:]) + + # print "#", fontname + # for i in comments: + # print "#", i + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/server/www/packages/packages-darwin/x64/PIL/BmpImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/BmpImagePlugin.py new file mode 100644 index 0000000..e398445 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/BmpImagePlugin.py @@ -0,0 +1,288 @@ +# +# The Python Imaging Library. +# $Id$ +# +# BMP file handler +# +# Windows (and OS/2) native bitmap storage format. +# +# history: +# 1995-09-01 fl Created +# 1996-04-30 fl Added save +# 1997-08-27 fl Fixed save of 1-bit images +# 1998-03-06 fl Load P images as L where possible +# 1998-07-03 fl Load P images as 1 where possible +# 1998-12-29 fl Handle small palettes +# 2002-12-30 fl Fixed load of 1-bit palette images +# 2003-04-21 fl Fixed load of 1-bit monochrome images +# 2003-04-23 fl Added limited support for BI_BITFIELDS compression +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary +import math + +__version__ = "0.7" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +# +# -------------------------------------------------------------------- +# Read BMP file + +BIT2MODE = { + # bits => mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +# ============================================================================== +# Image plugin for the Windows BMP format. +# ============================================================================== +class BmpImageFile(ImageFile.ImageFile): + """ Image plugin for the Windows Bitmap format (BMP) """ + + # -------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + # --------------------------------------------------- BMP Compression values + COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5} + RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5 + + def _bitmap(self, header=0, offset=0): + """ Read relevant info about the BMP """ + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + file_info = dict() + file_info['header_size'] = i32(read(4)) # read bmp header size @offset 14 (this is part of the header size) + file_info['direction'] = -1 + # --------------------- If requested, read header at a specific position + header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4) # read the rest of the bmp header, without its size + # --------------------------------------------------- IBM OS/2 Bitmap v1 + # ------ This format has different offsets because of width/height types + if file_info['header_size'] == 12: + file_info['width'] = i16(header_data[0:2]) + file_info['height'] = i16(header_data[2:4]) + file_info['planes'] = i16(header_data[4:6]) + file_info['bits'] = i16(header_data[6:8]) + file_info['compression'] = self.RAW + file_info['palette_padding'] = 3 + # ---------------------------------------------- Windows Bitmap v2 to v5 + elif file_info['header_size'] in (40, 64, 108, 124): # v3, OS/2 v2, v4, v5 + if file_info['header_size'] >= 40: # v3 and OS/2 + file_info['y_flip'] = i8(header_data[7]) == 0xff + file_info['direction'] = 1 if file_info['y_flip'] else -1 + file_info['width'] = i32(header_data[0:4]) + file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8]) + file_info['planes'] = i16(header_data[8:10]) + file_info['bits'] = i16(header_data[10:12]) + file_info['compression'] = i32(header_data[12:16]) + file_info['data_size'] = i32(header_data[16:20]) # byte size of pixel data + file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28])) + file_info['colors'] = i32(header_data[28:32]) + file_info['palette_padding'] = 4 + self.info["dpi"] = tuple( + map(lambda x: int(math.ceil(x / 39.3701)), + file_info['pixels_per_meter'])) + if file_info['compression'] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']): + file_info[mask] = i32(header_data[36+idx*4:40+idx*4]) + else: + for mask in ['r_mask', 'g_mask', 'b_mask', 'a_mask']: + file_info[mask] = i32(read(4)) + file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask']) + file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask']) + else: + raise IOError("Unsupported BMP header type (%d)" % file_info['header_size']) + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self.size = file_info['width'], file_info['height'] + # -------- If color count was not found in the header, compute from bits + file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits']) + # -------------------------------- Check abnormal values for DOS attacks + if file_info['width'] * file_info['height'] > 2**31: + raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) + # ----------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None)) + if self.mode is None: + raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits']) + # ----------------- Process BMP with Bitfields compression (not palette) + if file_info['compression'] == self.BITFIELDS: + SUPPORTED = { + 32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0)], + 24: [(0xff0000, 0xff00, 0xff)], + 16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)] + } + MASK_MODES = { + (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX", + (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xff0000, 0xff00, 0xff)): "BGR", + (16, (0xf800, 0x7e0, 0x1f)): "BGR;16", + (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15" + } + if file_info['bits'] in SUPPORTED: + if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])] + self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode + elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])] + else: + raise IOError("Unsupported BMP bitfields layout") + else: + raise IOError("Unsupported BMP bitfields layout") + elif file_info['compression'] == self.RAW: + if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + else: + raise IOError("Unsupported BMP compression (%d)" % file_info['compression']) + # ---------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + # ----------------------------------------------------- 1-bit images + if not (0 < file_info['colors'] <= 65536): + raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors']) + else: + padding = file_info['palette_padding'] + palette = read(padding * file_info['colors']) + greyscale = True + indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors'])) + # ------------------ Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind*padding:ind*padding + 3] + if rgb != o8(val) * 3: + greyscale = False + # -------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info['colors'] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette) + + # ----------------------------- Finally set the tile data for the plugin + self.info['compression'] = file_info['compression'] + self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(), + (raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction']) + )] + + def _open(self): + """ Open file, check magic number and read header """ + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if head_data[0:2] != b"BM": + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data[10:14]) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +# ============================================================================== +# Image plugin for the DIB format (BMP alias) +# ============================================================================== +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + +# +# -------------------------------------------------------------------- +# Write BMP file + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _save(im, fp, filename, check=0): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as BMP" % im.mode) + + if check: + return check + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) + + stride = ((im.size[0]*bits+7)//8+3) & (~3) + header = 40 # or 64 for OS/2 version 2 + offset = 14 + header + colors * 4 + image = stride * im.size[1] + + # bitmap header + fp.write(b"BM" + # file type (magic) + o32(offset+image) + # file size + o32(0) + # reserved + o32(offset)) # image data offset + + # bitmap info header + fp.write(o32(header) + # info header size + o32(im.size[0]) + # width + o32(im.size[1]) + # height + o16(1) + # planes + o16(bits) + # depth + o32(0) + # compression (0=uncompressed) + o32(image) + # size of bitmap + o32(ppm[0]) + o32(ppm[1]) + # resolution + o32(colors) + # colors used + o32(colors)) # colors important + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if im.mode == "1": + for i in (0, 255): + fp.write(o8(i) * 4) + elif im.mode == "L": + for i in range(256): + fp.write(o8(i) * 4) + elif im.mode == "P": + fp.write(im.im.getpalette("RGB", "BGRX")) + + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, + (rawmode, stride, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") diff --git a/server/www/packages/packages-darwin/x64/PIL/BufrStubImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000..45ee547 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/BufrStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific BUFR image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + + format = "BUFR" + format_description = "BUFR" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a BUFR file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("BUFR save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/server/www/packages/packages-darwin/x64/PIL/ContainerIO.py b/server/www/packages/packages-darwin/x64/PIL/ContainerIO.py new file mode 100644 index 0000000..262f2af --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ContainerIO.py @@ -0,0 +1,117 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +## +# A file object that provides read access to a part of an existing +# file (for example a TAR file). + + +class ContainerIO(object): + + ## + # Create file object. + # + # @param file Existing file. + # @param offset Start of region, in bytes. + # @param length Size of region, in bytes. + + def __init__(self, file, offset, length): + self.fh = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self): + return 0 + + ## + # Move file pointer. + # + # @param offset Offset in bytes. + # @param mode Starting position. Use 0 for beginning of region, 1 + # for current offset, and 2 for end of region. You cannot move + # the pointer outside the defined region. + + def seek(self, offset, mode=0): + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + ## + # Get current file pointer. + # + # @return Offset from start of region, in bytes. + + def tell(self): + return self.pos + + ## + # Read data. + # + # @def read(bytes=0) + # @param bytes Number of bytes to read. If omitted or zero, + # read until end of region. + # @return An 8-bit string. + + def read(self, n=0): + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return "" + self.pos = self.pos + n + return self.fh.read(n) + + ## + # Read a line of text. + # + # @return An 8-bit string. + + def readline(self): + s = "" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == "\n": + break + return s + + ## + # Read multiple lines of text. + # + # @return A list of 8-bit strings. + + def readlines(self): + l = [] + while True: + s = self.readline() + if not s: + break + l.append(s) + return l diff --git a/server/www/packages/packages-darwin/x64/PIL/CurImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/CurImagePlugin.py new file mode 100644 index 0000000..4db4c40 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/CurImagePlugin.py @@ -0,0 +1,88 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, BmpImagePlugin, _binary + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le + + +def _accept(prefix): + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + +class CurImageFile(BmpImagePlugin.BmpImageFile): + + format = "CUR" + format_description = "Windows Cursor" + + def _open(self): + + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + raise SyntaxError("not a CUR file") + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s[4:])): + s = self.fp.read(16) + if not m: + m = s + elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]): + m = s + # print "width", i8(s[0]) + # print "height", i8(s[1]) + # print "colors", i8(s[2]) + # print "reserved", i8(s[3]) + # print "hotspot x", i16(s[4:]) + # print "hotspot y", i16(s[6:]) + # print "bytes", i32(s[8:]) + # print "offset", i32(s[12:]) + if not m: + raise TypeError("No cursors were found") + + # load as bitmap + self._bitmap(i32(m[12:]) + offset) + + # patch up the bitmap height + self.size = self.size[0], self.size[1]//2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0)+self.size, o, a + + return + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/server/www/packages/packages-darwin/x64/PIL/DcxImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/DcxImagePlugin.py new file mode 100644 index 0000000..f9034d1 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/DcxImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, _binary +from PIL.PcxImagePlugin import PcxImageFile + +__version__ = "0.2" + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + +i32 = _binary.i32le + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + +class DcxImageFile(PcxImageFile): + + format = "DCX" + format_description = "Intel DCX" + + def _open(self): + + # Header + s = self.fp.read(4) + if i32(s) != MAGIC: + raise SyntaxError("not a DCX file") + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self.__fp = self.fp + self.seek(0) + + @property + def n_frames(self): + return len(self._offset) + + @property + def is_animated(self): + return len(self._offset) > 1 + + def seek(self, frame): + if frame >= len(self._offset): + raise EOFError("attempt to seek outside DCX directory") + self.frame = frame + self.fp = self.__fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self): + return self.frame + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/server/www/packages/packages-darwin/x64/PIL/EpsImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/EpsImagePlugin.py new file mode 100644 index 0000000..a950c52 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/EpsImagePlugin.py @@ -0,0 +1,428 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EPS file handling +# +# History: +# 1995-09-01 fl Created (0.1) +# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2) +# 1996-08-22 fl Don't choke on floating point BoundingBox values +# 1996-08-23 fl Handle files from Macintosh (0.3) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5) +# 2014-05-07 e Handling of EPS with binary preview and fixed resolution +# resizing +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +import io +import sys +from PIL import Image, ImageFile, _binary + +__version__ = "0.5" + +# +# -------------------------------------------------------------------- + +i32 = _binary.i32le +o32 = _binary.o32le + +split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$") +field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$") + +gs_windows_binary = None +if sys.platform.startswith('win'): + import shutil + if hasattr(shutil, 'which'): + which = shutil.which + else: + # Python < 3.3 + import distutils.spawn + which = distutils.spawn.find_executable + for binary in ('gswin32c', 'gswin64c', 'gs'): + if which(binary) is not None: + gs_windows_binary = binary + break + else: + gs_windows_binary = False + + +def has_ghostscript(): + if gs_windows_binary: + return True + if not sys.platform.startswith('win'): + import subprocess + try: + gs = subprocess.Popen(['gs', '--version'], stdout=subprocess.PIPE) + gs.stdout.read() + return True + except OSError: + # no ghostscript + pass + return False + + +def Ghostscript(tile, size, fp, scale=1): + """Render an image using Ghostscript""" + + # Unpack decoder tile + decoder, tile, offset, data = tile[0] + length, bbox = data + + # Hack to support hi-res rendering + scale = int(scale) or 1 + # orig_size = size + # orig_bbox = bbox + size = (size[0] * scale, size[1] * scale) + # resolution is dependent on bbox and size + res = (float((72.0 * size[0]) / (bbox[2]-bbox[0])), + float((72.0 * size[1]) / (bbox[3]-bbox[1]))) + # print("Ghostscript", scale, size, orig_size, bbox, orig_bbox, res) + + import os + import subprocess + import tempfile + + out_fd, outfile = tempfile.mkstemp() + os.close(out_fd) + + infile_temp = None + if hasattr(fp, 'name') and os.path.exists(fp.name): + infile = fp.name + else: + in_fd, infile_temp = tempfile.mkstemp() + os.close(in_fd) + infile = infile_temp + + # ignore length and offset! + # ghostscript can read it + # copy whole file to read in ghostscript + with open(infile_temp, 'wb') as f: + # fetch length of fp + fp.seek(0, 2) + fsize = fp.tell() + # ensure start position + # go back + fp.seek(0) + lengthfile = fsize + while lengthfile > 0: + s = fp.read(min(lengthfile, 100*1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + # Build ghostscript command + command = ["gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dNOPAUSE", # don't pause between pages, + "-dSAFER", # safe mode + "-sDEVICE=ppmraw", # ppm driver + "-sOutputFile=%s" % outfile, # output file + "-c", "%d %d translate" % (-bbox[0], -bbox[1]), + # adjust for image origin + "-f", infile, # input file + ] + + if gs_windows_binary is not None: + if not gs_windows_binary: + raise WindowsError('Unable to locate Ghostscript on paths') + command[0] = gs_windows_binary + + # push data through ghostscript + try: + gs = subprocess.Popen(command, stdin=subprocess.PIPE, + stdout=subprocess.PIPE) + gs.stdin.close() + status = gs.wait() + if status: + raise IOError("gs failed (status %d)" % status) + im = Image.core.open_ppm(outfile) + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + return im + + +class PSFile(object): + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ + def __init__(self, fp): + self.fp = fp + self.char = None + + def seek(self, offset, whence=0): + self.char = None + self.fp.seek(offset, whence) + + def readline(self): + s = self.char or b"" + self.char = None + + c = self.fp.read(1) + while c not in b"\r\n": + s = s + c + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return s.decode('latin-1') + + +def _accept(prefix): + return prefix[:4] == b"%!PS" or \ + (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + +## +# Image plugin for Encapsulated Postscript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB"} + + def _open(self): + (length, offset) = self._find_offset(self.fp) + + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + try: + if bytes is str: + # Python2, no encoding conversion necessary + fp = open(self.fp.name, "Ur") + else: + # Python3, can use bare open command. + fp = open(self.fp.name, "Ur", encoding='latin-1') + except: + # Expect this for bytesio/stringio + fp = PSFile(self.fp) + + # go to offset - start of "%!PS" + fp.seek(offset) + + box = None + + self.mode = "RGB" + self.size = 1, 1 # FIXME: huh? + + # + # Load EPS header + + s = fp.readline().strip('\r\n') + + while s: + if len(s) > 255: + raise SyntaxError("not an EPS file") + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an EPS file") + + if m: + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self.size = box[2] - box[0], box[3] - box[1] + self.tile = [("eps", (0, 0) + self.size, offset, + (length, box))] + except: + pass + + else: + m = field.match(s) + if m: + k = m.group(1) + + if k == "EndComments": + break + if k[:8] == "PS-Adobe": + self.info[k[:8]] = k[9:] + else: + self.info[k] = "" + elif s[0] == '%': + # handle non-DSC Postscript comments that some + # tools mistakenly put in the Comments section + pass + else: + raise IOError("bad EPS header") + + s = fp.readline().strip('\r\n') + + if s[:1] != "%": + break + + # + # Scan for an "ImageData" descriptor + + while s[:1] == "%": + + if len(s) > 255: + raise SyntaxError("not an EPS file") + + if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] + + if int(bi) != 8: + break + try: + self.mode = self.mode_map[int(mo)] + except ValueError: + break + + self.size = int(x), int(y) + return + + s = fp.readline().strip('\r\n') + if not s: + break + + if not box: + raise IOError("cannot determine EPS bounding box") + + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, 2) + length = fp.tell() + offset = 0 + elif i32(s[0:4]) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s[4:8]) + length = i32(s[8:12]) + else: + raise SyntaxError("not an EPS file") + + return (length, offset) + + def load(self, scale=1): + # Load EPS via Ghostscript + if not self.tile: + return + self.im = Ghostscript(self.tile, self.size, self.fp, scale) + self.mode = self.im.mode + self.size = self.im.size + self.tile = [] + + def load_seek(self, *args, **kwargs): + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename, eps=1): + """EPS Writer for the Python Imaging Library.""" + + # + # make sure image data is available + im.load() + + # + # determine postscript image mode + if im.mode == "L": + operator = (8, 1, "image") + elif im.mode == "RGB": + operator = (8, 3, "false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, "false 4 colorimage") + else: + raise ValueError("image mode is not supported") + + class NoCloseStream(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def close(self): + pass + + base_fp = fp + if fp != sys.stdout: + fp = NoCloseStream(fp) + if sys.version_info[0] > 2: + fp = io.TextIOWrapper(fp, encoding='latin-1') + + if eps: + # + # write EPS header + fp.write("%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write("%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write("%%Pages: 1\n") + fp.write("%%EndComments\n") + fp.write("%%Page: 1 1\n") + fp.write("%%ImageData: %d %d " % im.size) + fp.write("%d %d 0 1 1 \"%s\"\n" % operator) + + # + # image header + fp.write("gsave\n") + fp.write("10 dict begin\n") + fp.write("/buf %d string def\n" % (im.size[0] * operator[1])) + fp.write("%d %d scale\n" % im.size) + fp.write("%d %d 8\n" % im.size) # <= bits + fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) + fp.write("{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + "\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)]) + + fp.write("\n%%%%EndBinary\n") + fp.write("grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extension(EpsImageFile.format, ".ps") +Image.register_extension(EpsImageFile.format, ".eps") + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/server/www/packages/packages-darwin/x64/PIL/ExifTags.py b/server/www/packages/packages-darwin/x64/PIL/ExifTags.py new file mode 100644 index 0000000..52e145f --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ExifTags.py @@ -0,0 +1,193 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known EXIF tags. +## + +## +# Maps EXIF tags to tag names. + +TAGS = { + + # possibly incomplete + 0x00fe: "NewSubfileType", + 0x00ff: "SubfileType", + 0x0100: "ImageWidth", + 0x0101: "ImageLength", + 0x0102: "BitsPerSample", + 0x0103: "Compression", + 0x0106: "PhotometricInterpretation", + 0x0107: "Threshholding", + 0x0108: "CellWidth", + 0x0109: "CellLenght", + 0x010a: "FillOrder", + 0x010d: "DocumentName", + 0x011d: "PageName", + 0x010e: "ImageDescription", + 0x010f: "Make", + 0x0110: "Model", + 0x0111: "StripOffsets", + 0x0112: "Orientation", + 0x0115: "SamplesPerPixel", + 0x0116: "RowsPerStrip", + 0x0117: "StripByteConunts", + 0x0118: "MinSampleValue", + 0x0119: "MaxSampleValue", + 0x011a: "XResolution", + 0x011b: "YResolution", + 0x011c: "PlanarConfiguration", + 0x0120: "FreeOffsets", + 0x0121: "FreeByteCounts", + 0x0122: "GrayResponseUnit", + 0x0123: "GrayResponseCurve", + 0x0128: "ResolutionUnit", + 0x012d: "TransferFunction", + 0x0131: "Software", + 0x0132: "DateTime", + 0x013b: "Artist", + 0x013c: "HostComputer", + 0x013e: "WhitePoint", + 0x013f: "PrimaryChromaticities", + 0x0140: "ColorMap", + 0x0152: "ExtraSamples", + 0x0201: "JpegIFOffset", + 0x0202: "JpegIFByteCount", + 0x0211: "YCbCrCoefficients", + 0x0212: "YCbCrSubSampling", + 0x0213: "YCbCrPositioning", + 0x0214: "ReferenceBlackWhite", + 0x1000: "RelatedImageFileFormat", + 0x1001: "RelatedImageWidth", + 0x1002: "RelatedImageLength", + 0x828d: "CFARepeatPatternDim", + 0x828e: "CFAPattern", + 0x828f: "BatteryLevel", + 0x8298: "Copyright", + 0x829a: "ExposureTime", + 0x829d: "FNumber", + 0x8769: "ExifOffset", + 0x8773: "InterColorProfile", + 0x8822: "ExposureProgram", + 0x8824: "SpectralSensitivity", + 0x8825: "GPSInfo", + 0x8827: "ISOSpeedRatings", + 0x8828: "OECF", + 0x8829: "Interlace", + 0x882a: "TimeZoneOffset", + 0x882b: "SelfTimerMode", + 0x9000: "ExifVersion", + 0x9003: "DateTimeOriginal", + 0x9004: "DateTimeDigitized", + 0x9101: "ComponentsConfiguration", + 0x9102: "CompressedBitsPerPixel", + 0x9201: "ShutterSpeedValue", + 0x9202: "ApertureValue", + 0x9203: "BrightnessValue", + 0x9204: "ExposureBiasValue", + 0x9205: "MaxApertureValue", + 0x9206: "SubjectDistance", + 0x9207: "MeteringMode", + 0x9208: "LightSource", + 0x9209: "Flash", + 0x920a: "FocalLength", + 0x920b: "FlashEnergy", + 0x920c: "SpatialFrequencyResponse", + 0x920d: "Noise", + 0x9211: "ImageNumber", + 0x9212: "SecurityClassification", + 0x9213: "ImageHistory", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x9216: "TIFF/EPStandardID", + 0x927c: "MakerNote", + 0x9286: "UserComment", + 0x9290: "SubsecTime", + 0x9291: "SubsecTimeOriginal", + 0x9292: "SubsecTimeDigitized", + 0xa000: "FlashPixVersion", + 0xa001: "ColorSpace", + 0xa002: "ExifImageWidth", + 0xa003: "ExifImageHeight", + 0xa004: "RelatedSoundFile", + 0xa005: "ExifInteroperabilityOffset", + 0xa20b: "FlashEnergy", + 0xa20c: "SpatialFrequencyResponse", + 0xa20e: "FocalPlaneXResolution", + 0xa20f: "FocalPlaneYResolution", + 0xa210: "FocalPlaneResolutionUnit", + 0xa214: "SubjectLocation", + 0xa215: "ExposureIndex", + 0xa217: "SensingMethod", + 0xa300: "FileSource", + 0xa301: "SceneType", + 0xa302: "CFAPattern", + 0xa401: "CustomRendered", + 0xa402: "ExposureMode", + 0xa403: "WhiteBalance", + 0xa404: "DigitalZoomRatio", + 0xa405: "FocalLengthIn35mmFilm", + 0xa406: "SceneCaptureType", + 0xa407: "GainControl", + 0xa408: "Contrast", + 0xa409: "Saturation", + 0xa40a: "Sharpness", + 0xa40b: "DeviceSettingDescription", + 0xa40c: "SubjectDistanceRange", + 0xa420: "ImageUniqueID", + 0xa430: "CameraOwnerName", + 0xa431: "BodySerialNumber", + 0xa432: "LensSpecification", + 0xa433: "LensMake", + 0xa434: "LensModel", + 0xa435: "LensSerialNumber", + 0xa500: "Gamma", + +} + +## +# Maps EXIF GPS tags to tag names. + +GPSTAGS = { + 0: "GPSVersionID", + 1: "GPSLatitudeRef", + 2: "GPSLatitude", + 3: "GPSLongitudeRef", + 4: "GPSLongitude", + 5: "GPSAltitudeRef", + 6: "GPSAltitude", + 7: "GPSTimeStamp", + 8: "GPSSatellites", + 9: "GPSStatus", + 10: "GPSMeasureMode", + 11: "GPSDOP", + 12: "GPSSpeedRef", + 13: "GPSSpeed", + 14: "GPSTrackRef", + 15: "GPSTrack", + 16: "GPSImgDirectionRef", + 17: "GPSImgDirection", + 18: "GPSMapDatum", + 19: "GPSDestLatitudeRef", + 20: "GPSDestLatitude", + 21: "GPSDestLongitudeRef", + 22: "GPSDestLongitude", + 23: "GPSDestBearingRef", + 24: "GPSDestBearing", + 25: "GPSDestDistanceRef", + 26: "GPSDestDistance", + 27: "GPSProcessingMethod", + 28: "GPSAreaInformation", + 29: "GPSDateStamp", + 30: "GPSDifferential", + 31: "GPSHPositioningError", +} diff --git a/server/www/packages/packages-darwin/x64/PIL/FitsStubImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/FitsStubImagePlugin.py new file mode 100644 index 0000000..7aefff2 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/FitsStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS stub adapter +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + +## +# Install application-specific FITS image handler. +# +# @param handler Handler object. + + +def register_handler(handler): + global _handler + _handler = handler + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:6] == b"SIMPLE" + + +class FITSStubImageFile(ImageFile.StubImageFile): + + format = "FITS" + format_description = "FITS" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(6)): + raise SyntaxError("Not a FITS file") + + # FIXME: add more sanity checks here; mandatory header items + # include SIMPLE, BITPIX, NAXIS, etc. + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("FITS save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept) +Image.register_save(FITSStubImageFile.format, _save) + +Image.register_extension(FITSStubImageFile.format, ".fit") +Image.register_extension(FITSStubImageFile.format, ".fits") diff --git a/server/www/packages/packages-darwin/x64/PIL/FliImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/FliImagePlugin.py new file mode 100644 index 0000000..a07dc29 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/FliImagePlugin.py @@ -0,0 +1,188 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 + + +# +# decoder + +def _accept(prefix): + return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12] + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + +class FliImageFile(ImageFile.ImageFile): + + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + + def _open(self): + + # HEAD + s = self.fp.read(128) + magic = i16(s[4:6]) + if not (magic in [0xAF11, 0xAF12] and + i16(s[14:16]) in [0, 3] and # flags + s[20:22] == b"\x00\x00"): # reserved + raise SyntaxError("not an FLI/FLC file") + + # image characteristics + self.mode = "P" + self.size = i16(s[8:10]), i16(s[10:12]) + + # animation speed + duration = i32(s[16:20]) + if magic == 0xAF11: + duration = (duration * 1000) / 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s[4:6]) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + s = self.fp.read(16) + + if i16(s[4:6]) == 0xF1FA: + # look for palette chunk + s = self.fp.read(6) + if i16(s[4:6]) == 11: + self._palette(palette, 2) + elif i16(s[4:6]) == 4: + self._palette(palette, 0) + + palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self.__fp = self.fp + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + i8(s[0]) + n = i8(s[1]) + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = i8(s[n]) << shift + g = i8(s[n+1]) << shift + b = i8(s[n+2]) << shift + palette[i] = (r, g, b) + i += 1 + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in FLI file") + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self.__fp.seek(self.__rewind) + self.__offset = 128 + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + # move to next frame + self.fp = self.__fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + raise EOFError + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0)+self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self): + return self.__frame + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extension(FliImageFile.format, ".fli") +Image.register_extension(FliImageFile.format, ".flc") diff --git a/server/www/packages/packages-darwin/x64/PIL/FontFile.py b/server/www/packages/packages-darwin/x64/PIL/FontFile.py new file mode 100644 index 0000000..db8e6be --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/FontFile.py @@ -0,0 +1,115 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import os +from PIL import Image, _binary + +WIDTH = 800 + + +def puti16(fp, values): + # write network order (big-endian) 16-bit sequence + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +## +# Base class for raster font file handlers. + +class FontFile(object): + + bitmap = None + + def __init__(self): + + self.info = {} + self.glyph = [None] * 256 + + def __getitem__(self, ix): + return self.glyph[ix] + + def compile(self): + "Create metrics and bitmap" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = (src[2] - src[0]) + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return "" + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + # yy = src[3] - src[1] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + # print chr(i), dst, s + self.metrics[i] = d, dst, s + + def save(self, filename): + "Save font" + + self.compile() + + # font data + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + fp = open(os.path.splitext(filename)[0] + ".pil", "wb") + fp.write(b"PILfont\n") + fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, [0] * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) + fp.close() + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/FpxImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/FpxImagePlugin.py new file mode 100644 index 0000000..aefc574 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/FpxImagePlugin.py @@ -0,0 +1,226 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL.OleFileIO import i8, i32, MAGIC, OleFileIO + +__version__ = "0.1" + + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007ffe): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017ffe): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for the FlashPix images. + +class FpxImageFile(ImageFile.ImageFile): + + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an FPX file; invalid OLE file") + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + raise SyntaxError("not an FPX file; bad root CLSID") + + self._open_index(1) + + def _open_index(self, index=1): + # + # get the Image Contents Property Set + + prop = self.ole.getproperties([ + "Data Object Store %06d" % index, + "\005Image Contents" + ]) + + # size (highest resolution) + + self.size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size / 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + colors = [] + for i in range(i32(s, 4)): + # note: for now, we ignore the "uncalibrated" flag + colors.append(i32(s, 8+i*4) & 0x7fffffff) + + self.mode, self.rawmode = MODES[tuple(colors)] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + # print len(self.jpeg), "tables loaded" + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index=1, subimage=0): + # + # setup tile descriptors for a given subimage + + stream = [ + "Data Object Store %06d" % index, + "Resolution %04d" % subimage, + "Subimage 0000 Header" + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + # print size, self.mode, self.rawmode + + if size != self.size: + raise IOError("subimage mismatch") + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + + compression = i32(s, i+8) + + if compression == 0: + self.tile.append(("raw", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode))) + + elif compression == 1: + + # FIXME: the fill decoder is not implemented + self.tile.append(("fill", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode, s[12:16]))) + + elif compression == 2: + + internal_color_conversion = i8(s[14]) + jpeg_tables = i8(s[15]) + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append(("jpeg", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (rawmode, jpegmode))) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + raise IOError("unknown/invalid compression") + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self.fp = None + + def load(self): + + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + + ["Subimage 0000 Data"]) + + ImageFile.ImageFile.load(self) + +# +# -------------------------------------------------------------------- + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/server/www/packages/packages-darwin/x64/PIL/GbrImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/GbrImagePlugin.py new file mode 100644 index 0000000..15282ec --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/GbrImagePlugin.py @@ -0,0 +1,71 @@ +# +# The Python Imaging Library +# $Id$ +# +# load a GIMP brush file +# +# History: +# 96-03-14 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +i32 = _binary.i32be + + +def _accept(prefix): + return len(prefix) >= 8 and i32(prefix) >= 20 and i32(prefix[4:8]) == 1 + + +## +# Image plugin for the GIMP brush format. + +class GbrImageFile(ImageFile.ImageFile): + + format = "GBR" + format_description = "GIMP brush file" + + def _open(self): + + header_size = i32(self.fp.read(4)) + version = i32(self.fp.read(4)) + if header_size < 20 or version != 1: + raise SyntaxError("not a GIMP brush") + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0 or color_depth != 1: + raise SyntaxError("not a GIMP brush") + + comment = self.fp.read(header_size - 20)[:-1] + + self.mode = "L" + self.size = width, height + + self.info["comment"] = comment + + # Since the brush is so small, we read the data immediately + self.data = self.fp.read(width * height) + + def load(self): + + if not self.data: + return + + # create an image out of the brush data block + self.im = Image.core.new(self.mode, self.size) + self.im.frombytes(self.data) + self.data = b"" + +# +# registry + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) + +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/server/www/packages/packages-darwin/x64/PIL/GdImageFile.py b/server/www/packages/packages-darwin/x64/PIL/GdImageFile.py new file mode 100644 index 0000000..ae3500f --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/GdImageFile.py @@ -0,0 +1,92 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +# NOTE: This format cannot be automatically recognized, so the +# class is not registered for use with Image.open(). To open a +# gd file, use the GdImageFile.open() function instead. + +# THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This +# implementation is provided for convenience and demonstrational +# purposes only. + + +from PIL import ImageFile, ImagePalette, _binary +from PIL._util import isPath + +__version__ = "0.1" + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i16 = _binary.i16be + + +## +# Image plugin for the GD uncompressed format. Note that this format +# is not supported by the standard Image.open function. To use +# this plugin, you have to import the GdImageFile module and +# use the GdImageFile.open function. + +class GdImageFile(ImageFile.ImageFile): + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self): + + # Header + s = self.fp.read(775) + + self.mode = "L" # FIXME: "P" + self.size = i16(s[0:2]), i16(s[2:4]) + + # transparency index + tindex = i16(s[5:7]) + if tindex < 256: + self.info["transparent"] = tindex + + self.palette = ImagePalette.raw("RGB", s[7:]) + + self.tile = [("raw", (0, 0)+self.size, 775, ("L", 0, -1))] + + +## +# Load texture from a GD image file. +# +# @param filename GD file name, or an opened file handle. +# @param mode Optional mode. In this version, if the mode argument +# is given, it must be "r". +# @return An image instance. +# @exception IOError If the image could not be read. + +def open(fp, mode="r"): + + if mode != "r": + raise ValueError("bad mode") + + if isPath(fp): + filename = fp + fp = builtins.open(fp, "rb") + else: + filename = "" + + try: + return GdImageFile(fp, filename) + except SyntaxError: + raise IOError("cannot identify this image file") diff --git a/server/www/packages/packages-darwin/x64/PIL/GifImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/GifImagePlugin.py new file mode 100644 index 0000000..b9d2588 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/GifImagePlugin.py @@ -0,0 +1,685 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, ImagePalette, \ + ImageChops, ImageSequence, _binary + +__version__ = "0.9" + + +# -------------------------------------------------------------------- +# Helpers + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 +o16 = _binary.o16le + + +# -------------------------------------------------------------------- +# Identify/read GIF files + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and i8(s): + return self.fp.read(i8(s)) + return None + + def _open(self): + + # Screen + s = self.fp.read(13) + if s[:6] not in [b"GIF87a", b"GIF89a"]: + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self.size = i16(s[6:]), i16(s[8:]) + self.tile = [] + flags = i8(s[10]) + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = i8(s[11]) + # check if palette contains colour indices + p = self.fp.read(3 << bits) + for i in range(0, len(p), 3): + if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + break + + self.__fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in GIF file") + + def _seek(self, frame): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1 + self.__frame = -1 + self.__fp.seek(self.__rewind) + self._prev_im = None + self.disposal_method = 0 + else: + # ensure that the previous frame was loaded + if not self.im: + self.load() + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + self.tile = [] + + self.fp = self.__fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + from copy import copy + self.palette = copy(self.global_palette) + + while True: + + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if i8(s) == 249: + # + # graphic control extension + # + flags = i8(block[0]) + if flags & 1: + self.info["transparency"] = i8(block[3]) + self.info["duration"] = i16(block[1:3]) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif i8(s) == 255: + # + # application extension + # + self.info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and i8(block[0]) == 1: + self.info["loop"] = i16(block[1:3]) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s[0:]), i16(s[2:]) + x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) + self.dispose_extent = x0, y0, x1, y1 + flags = i8(s[8]) + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + self.palette =\ + ImagePalette.raw("RGB", self.fp.read(3 << bits)) + + # image data + bits = i8(self.fp.read(1)) + self.__offset = self.fp.tell() + self.tile = [("gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace))] + break + + else: + pass + # raise IOError, "illegal GIF tag `%x`" % i8(s) + + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + self.dispose = Image.core.fill("P", self.size, + self.info["background"]) + else: + # replace with previous contents + if self.im: + self.dispose = self.im.copy() + + # only dispose the extent in this frame + if self.dispose: + self.dispose = self.dispose.crop(self.dispose_extent) + except (AttributeError, KeyError): + pass + + if not self.tile: + # self.__fp = None + raise EOFError + + self.mode = "L" + if self.palette: + self.mode = "P" + + def tell(self): + return self.__frame + + def load_end(self): + ImageFile.ImageFile.load_end(self) + + # if the disposal method is 'do not dispose', transparent + # pixels should show the content of the previous frame + if self._prev_im and self.disposal_method == 1: + # we do this by pasting the updated area onto the previous + # frame which we then use as the current image content + updated = self.im.crop(self.dispose_extent) + self._prev_im.paste(updated, self.dispose_extent, + updated.convert('RGBA')) + self.im = self._prev_im + self._prev_im = self.im.copy() + +# -------------------------------------------------------------------- +# Write GIF files + +try: + import _imaging_gif +except ImportError: + _imaging_gif = None + +RAWMODE = { + "1": "L", + "L": "L", + "P": "P", +} + + +def _convert_mode(im, initial_call=False): + # convert on the fly (EXPERIMENTAL -- I'm not sure PIL + # should automatically convert images on save...) + if Image.getmodebase(im.mode) == "RGB": + if initial_call: + palette_size = 256 + if im.palette: + palette_size = len(im.palette.getdata()[1]) // 3 + return im.convert("P", palette=1, colors=palette_size) + else: + return im.convert("P") + return im.convert("L") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + + im.encoderinfo.update(im.info) + if _imaging_gif: + # call external driver + try: + _imaging_gif.save(im, fp, filename) + return + except IOError: + pass # write uncompressed file + + if im.mode in RAWMODE: + im_out = im.copy() + else: + im_out = _convert_mode(im, True) + + # header + try: + palette = im.encoderinfo["palette"] + except KeyError: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if save_all: + previous = None + + first_frame = None + for im_frame in ImageSequence.Iterator(im): + im_frame = _convert_mode(im_frame) + + # To specify duration, add the time in milliseconds to getdata(), + # e.g. getdata(im_frame, duration=1000) + if not previous: + # global header + first_frame = getheader(im_frame, palette, im.encoderinfo)[0] + first_frame += getdata(im_frame, (0, 0), **im.encoderinfo) + else: + if first_frame: + for s in first_frame: + fp.write(s) + first_frame = None + + # delta frame + delta = ImageChops.subtract_modulo(im_frame, previous.copy()) + bbox = delta.getbbox() + + if bbox: + # compress difference + for s in getdata(im_frame.crop(bbox), + bbox[:2], **im.encoderinfo): + fp.write(s) + else: + # FIXME: what should we do in this case? + pass + previous = im_frame + if first_frame: + save_all = False + if not save_all: + header = getheader(im_out, palette, im.encoderinfo)[0] + for s in header: + fp.write(s) + + flags = 0 + + if get_interlace(im): + flags = flags | 64 + + # local image header + _get_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0, + RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im): + try: + interlace = im.encoderinfo["interlace"] + except KeyError: + interlace = 1 + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _get_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + pass + else: + transparency = int(transparency) + # optimize the block away if transparent color is not used + transparent_color_exists = True + + if _get_optimize(im, im.encoderinfo): + used_palette_colors = _get_used_palette_colors(im) + + # adjust the transparency index after optimize + if len(used_palette_colors) < 256: + for i in range(len(used_palette_colors)): + if used_palette_colors[i] == transparency: + transparency = i + transparent_color_exists = True + break + else: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + if transparent_color_exists or duration != 0: + transparency_flag = 1 if transparent_color_exists else 0 + if not transparent_color_exists: + transparency = 0 + + fp.write(b"!" + + o8(249) + # extension intro + o8(4) + # length + o8(transparency_flag) + # transparency info present + o16(duration) + # duration + o8(transparency) + # transparency index + o8(0)) + + if "loop" in im.encoderinfo: + number_of_loops = im.encoderinfo["loop"] + fp.write(b"!" + + o8(255) + # extension intro + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) + # number of loops + o8(0)) + fp.write(b"," + + o16(offset[0]) + # offset + o16(offset[1]) + + o16(im.size[0]) + # size + o16(im.size[1]) + + o8(flags) + # flags + o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + + import os + from subprocess import Popen, check_call, PIPE, CalledProcessError + import tempfile + file = im._dump() + + if im.mode != "RGB": + with open(filename, 'wb') as f: + stderr = tempfile.TemporaryFile() + check_call(["ppmtogif", file], stdout=f, stderr=stderr) + else: + with open(filename, 'wb') as f: + + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (file, filename) + quant_cmd = ["ppmquant", "256", file] + togif_cmd = ["ppmtogif"] + stderr = tempfile.TemporaryFile() + quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr) + stderr = tempfile.TemporaryFile() + togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f, + stderr=stderr) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise CalledProcessError(retcode, togif_cmd) + + try: + os.unlink(file) + except OSError: + pass + + +# -------------------------------------------------------------------- +# GIF utilities + +def _get_optimize(im, info): + return im.mode in ("P", "L") and info and info.get("optimize", 0) + + +def _get_used_palette_colors(im): + used_palette_colors = [] + + # check which colors are used + i = 0 + for count in im.histogram(): + if count: + used_palette_colors.append(i) + i += 1 + + return used_palette_colors + + +def getheader(im, palette=None, info=None): + """Return a list of strings representing a GIF header""" + + # Header Block + # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + for extensionKey in ["transparency", "duration", "loop"]: + if info and extensionKey in info and \ + not (extensionKey == "duration" and info[extensionKey] == 0): + version = b"89a" + break + else: + if im.info.get("version") == "89a": + version = b"89a" + + header = [ + b"GIF"+version + # signature + version + o16(im.size[0]) + # canvas width + o16(im.size[1]) # canvas height + ] + + if im.mode == "P": + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = bytearray([i//3 for i in range(768)]) + + used_palette_colors = palette_bytes = None + + if _get_optimize(im, info): + used_palette_colors = _get_used_palette_colors(im) + + # create the new palette if not every color is used + if len(used_palette_colors) < 256: + palette_bytes = b"" + new_positions = {} + + i = 0 + # pick only the used colors from the palette + for oldPosition in used_palette_colors: + palette_bytes += source_palette[oldPosition*3:oldPosition*3+3] + new_positions[oldPosition] = i + i += 1 + + # replace the palette color id of all pixel with the new id + image_bytes = bytearray(im.tobytes()) + for i in range(len(image_bytes)): + image_bytes[i] = new_positions[image_bytes[i]] + im.frombytes(bytes(image_bytes)) + new_palette_bytes = (palette_bytes + + (768 - len(palette_bytes)) * b'\x00') + im.putpalette(new_palette_bytes) + im.palette = ImagePalette.ImagePalette("RGB", + palette=palette_bytes, + size=len(palette_bytes)) + + if not palette_bytes: + palette_bytes = source_palette + + # Logical Screen Descriptor + # calculate the palette size for the header + import math + color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1 + if color_table_size < 0: + color_table_size = 0 + # size of global color table + global color table flag + header.append(o8(color_table_size + 128)) + # background + reserved/aspect + if info and "background" in info: + background = info["background"] + elif "background" in im.info: + # This elif is redundant within GifImagePlugin + # since im.info parameters are bundled into the info dictionary + # However, external scripts may call getheader directly + # So this maintains earlier behaviour + background = im.info["background"] + else: + background = 0 + header.append(o8(background) + o8(0)) + # end of Logical Screen Descriptor + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + + # Header + Logical Screen Descriptor + Global Color Table + header.append(palette_bytes) + return header, used_palette_colors + + +def getdata(im, offset=(0, 0), **params): + """Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data.""" + + class Collector(object): + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + try: + im.encoderinfo = params + + # local image header + _get_local_header(fp, im, offset, 0) + + ImageFile._save(im, fp, [("gif", (0, 0)+im.size, 0, RAWMODE[im.mode])]) + + fp.write(b"\0") # end of image data + + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/server/www/packages/packages-darwin/x64/PIL/GimpGradientFile.py b/server/www/packages/packages-darwin/x64/PIL/GimpGradientFile.py new file mode 100644 index 0000000..45af573 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/GimpGradientFile.py @@ -0,0 +1,137 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from math import pi, log, sin, sqrt +from PIL._binary import o8 + +# -------------------------------------------------------------------- +# Stuff to translate curve segments to palette values (derived from +# the corresponding code in GIMP, written by Federico Mena Quintero. +# See the GIMP distribution for more information.) +# + +EPSILON = 1e-10 + + +def linear(middle, pos): + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle, pos): + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle, pos): + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle, pos): + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle, pos): + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] + + +class GradientFile(object): + + gradient = None + + def getpalette(self, entries=256): + + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + + x = i / float(entries-1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +## +# File handler for GIMP's gradient format. + +class GimpGradientFile(GradientFile): + + def __init__(self, fp): + + if fp.readline()[:13] != b"GIMP Gradient": + raise SyntaxError("not a GIMP gradient file") + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + gradient = [] + + for i in range(count): + + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + raise IOError("cannot handle HSV colour space") + + gradient.append((x0, x1, xm, rgb0, rgb1, segment)) + + self.gradient = gradient diff --git a/server/www/packages/packages-darwin/x64/PIL/GimpPaletteFile.py b/server/www/packages/packages-darwin/x64/PIL/GimpPaletteFile.py new file mode 100644 index 0000000..4bf3ca3 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/GimpPaletteFile.py @@ -0,0 +1,62 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL._binary import o8 + + +## +# File handler for GIMP's palette format. + +class GimpPaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [o8(i)*3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + raise SyntaxError("not a GIMP palette file") + + i = 0 + + while i <= 255: + + s = fp.readline() + + if not s: + break + # skip fields and comment lines + if re.match(b"\w+:|#", s): + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + raise ValueError("bad palette entry") + + if 0 <= i <= 255: + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + i += 1 + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/server/www/packages/packages-darwin/x64/PIL/GribStubImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000..8ffad81 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/GribStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific GRIB image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[0:4] == b"GRIB" and prefix[7] == b'\x01' + + +class GribStubImageFile(ImageFile.StubImageFile): + + format = "GRIB" + format_description = "GRIB" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a GRIB file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("GRIB save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/server/www/packages/packages-darwin/x64/PIL/Hdf5StubImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000..f7945be --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific HDF5 image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + + format = "HDF5" + format_description = "HDF5" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not an HDF file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("HDF5 save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extension(HDF5StubImageFile.format, ".h5") +Image.register_extension(HDF5StubImageFile.format, ".hdf") diff --git a/server/www/packages/packages-darwin/x64/PIL/IcnsImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000..a4366e9 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/IcnsImagePlugin.py @@ -0,0 +1,366 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Mac OS X icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, PngImagePlugin, _binary +import io +import os +import shutil +import struct +import sys +import tempfile + +enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') +if enable_jpeg2k: + from PIL import Jpeg2KImagePlugin + +i8 = _binary.i8 + +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack('>4sI', fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b'\x00\x00\x00\x00': + raise SyntaxError('Unknown signature, expecting 0x00000000') + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = i8(byte) + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + raise SyntaxError( + "Error reading channel [%r left]" % bytesleft + ) + band = Image.frombuffer( + "L", pixel_size, b"".join(data), "raw", "L", 0, 1 + ) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer( + "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1 + ) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a': + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + return {"RGBA": im} + elif sig[:4] == b'\xff\x4f\xff\x51' \ + or sig[:4] == b'\x0d\x0a\x87\x0a' \ + or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + if not enable_jpeg2k: + raise ValueError('Unsupported icon subimage format (rebuild PIL ' + 'with JPEG 2000 support to fix this)') + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + if im.mode != 'RGBA': + im = im.convert('RGBA') + return {"RGBA": im} + else: + raise ValueError('Unsupported icon subimage format') + + +class IcnsFile(object): + + SIZES = { + (512, 512, 2): [ + (b'ic10', read_png_or_jpeg2000), + ], + (512, 512, 1): [ + (b'ic09', read_png_or_jpeg2000), + ], + (256, 256, 2): [ + (b'ic14', read_png_or_jpeg2000), + ], + (256, 256, 1): [ + (b'ic08', read_png_or_jpeg2000), + ], + (128, 128, 2): [ + (b'ic13', read_png_or_jpeg2000), + ], + (128, 128, 1): [ + (b'ic07', read_png_or_jpeg2000), + (b'it32', read_32t), + (b't8mk', read_mk), + ], + (64, 64, 1): [ + (b'icp6', read_png_or_jpeg2000), + ], + (32, 32, 2): [ + (b'ic12', read_png_or_jpeg2000), + ], + (48, 48, 1): [ + (b'ih32', read_32), + (b'h8mk', read_mk), + ], + (32, 32, 1): [ + (b'icp5', read_png_or_jpeg2000), + (b'il32', read_32), + (b'l8mk', read_mk), + ], + (16, 16, 2): [ + (b'ic11', read_png_or_jpeg2000), + ], + (16, 16, 1): [ + (b'icp4', read_png_or_jpeg2000), + (b'is32', read_32), + (b's8mk', read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if sig != b'icns': + raise SyntaxError('not an icns file') + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + raise SyntaxError('invalid block header') + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, 1) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for (fmt, reader) in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + raise SyntaxError("No 32bit icon resources found") + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get('RGBA', None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self): + self.icns = IcnsFile(self.fp) + self.mode = 'RGBA' + self.best_size = self.icns.bestsize() + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + self.info['sizes'] = self.icns.itersizes() + # Just use this to see if it's loaded or not yet. + self.tile = ('',) + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + + Image.Image.load(self) + if not self.tile: + return + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + im.load() + + self.im = im.im + self.mode = im.mode + self.size = im.size + self.fp = None + self.icns = None + self.tile = () + self.load_end() + + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then converted to a .icns file + using the OS X command line utility 'iconutil'. + + OS X only. + """ + if hasattr(fp, "flush"): + fp.flush() + + # create the temporary set of pngs + iconset = tempfile.mkdtemp('.iconset') + last_w = None + last_im = None + for w in [16, 32, 128, 256, 512]: + prefix = 'icon_{}x{}'.format(w, w) + + if last_w == w: + im_scaled = last_im + else: + im_scaled = im.resize((w, w), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'.png')) + + im_scaled = im.resize((w*2, w*2), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'@2x.png')) + last_im = im_scaled + + # iconutil -c icns -o {} {} + from subprocess import Popen, PIPE, CalledProcessError + + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + stderr = tempfile.TemporaryFile() + convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=stderr) + + convert_proc.stdout.close() + + retcode = convert_proc.wait() + + # remove the temporary files + shutil.rmtree(iconset) + + if retcode: + raise CalledProcessError(retcode, convert_cmd) + +Image.register_open(IcnsImageFile.format, IcnsImageFile, + lambda x: x[:4] == b'icns') +Image.register_extension(IcnsImageFile.format, '.icns') + +if sys.platform == 'darwin': + Image.register_save(IcnsImageFile.format, _save) + + Image.register_mime(IcnsImageFile.format, "image/icns") + + +if __name__ == '__main__': + imf = IcnsImageFile(open(sys.argv[1], 'rb')) + for size in imf.info['sizes']: + imf.size = size + imf.load() + im = imf.im + im.save('out-%s-%s-%s.png' % size) + im = Image.open(open(sys.argv[1], "rb")) + im.save("out.png") + if sys.platform == 'windows': + os.startfile("out.png") diff --git a/server/www/packages/packages-darwin/x64/PIL/IcoImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/IcoImagePlugin.py new file mode 100644 index 0000000..4aa7687 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/IcoImagePlugin.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * http://msdn.microsoft.com/en-us/library/ms997538.aspx + + +import struct +from io import BytesIO + +from PIL import Image, ImageFile, BmpImagePlugin, PngImagePlugin, _binary +from math import log, ceil + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le + +_MAGIC = b"\0\0\1\0" + + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + sizes = im.encoderinfo.get("sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), + (64, 64), (128, 128), (255, 255)]) + width, height = im.size + filter(lambda x: False if (x[0] > width or x[1] > height or + x[0] > 255 or x[1] > 255) else True, sizes) + fp.write(struct.pack("=8bpp) + 'reserved': i8(s[3]), + 'planes': i16(s[4:]), + 'bpp': i16(s[6:]), + 'size': i32(s[8:]), + 'offset': i32(s[12:]) + } + + # See Wikipedia + for j in ('width', 'height'): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header['color_depth'] = (icon_header['bpp'] or + (icon_header['nb_color'] != 0 and + ceil(log(icon_header['nb_color'], + 2))) or 256) + + icon_header['dim'] = (icon_header['width'], icon_header['height']) + icon_header['square'] = (icon_header['width'] * + icon_header['height']) + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x['color_depth']) + # ICO images are usually squares + # self.entry = sorted(self.entry, key=lambda x: x['width']) + self.entry = sorted(self.entry, key=lambda x: x['square']) + self.entry.reverse() + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return set((h['width'], h['height']) for h in self.entry) + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + for (i, h) in enumerate(self.entry): + if size == h['dim'] and (bpp is False or bpp == h['color_depth']): + return self.frame(i) + return self.frame(0) + + def frame(self, idx): + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header['offset']) + data = self.buf.read(8) + self.buf.seek(header['offset']) + + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + + # change tile dimension to only encompass XOR image + im.size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + mode = a[0] + bpp = 8 + for k in BmpImagePlugin.BIT2MODE.keys(): + if mode == BmpImagePlugin.BIT2MODE[k][1]: + bpp = k + break + + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + 'L', # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + 'raw', # raw decoder + ('L', 0, -1) # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + and_mask_offset = o + int(im.size[0] * im.size[1] * + (bpp / 8.0)) + total_bytes = int((w * im.size[1]) / 8) + + self.buf.seek(and_mask_offset) + maskData = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + '1', # 1 bpp + im.size, # (w, h) + maskData, # source chars + 'raw', # raw decoder + ('1;I', int(w/8), -1) # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert('RGBA') + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin + """ + format = "ICO" + format_description = "Windows Icon" + + def _open(self): + self.ico = IcoFile(self.fp) + self.info['sizes'] = self.ico.sizes() + self.size = self.ico.entry[0]['dim'] + self.load() + + def load(self): + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.mode = im.mode + self.size = im.size + + def load_seek(self): + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass +# +# -------------------------------------------------------------------- + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") diff --git a/server/www/packages/packages-darwin/x64/PIL/ImImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/ImImagePlugin.py new file mode 100644 index 0000000..dd4f829 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImImagePlugin.py @@ -0,0 +1,355 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8 + +__version__ = "0.7" + + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0, + SCALE: 0, SIZE: 0, MODE: 0} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGBX", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN["L %s image" % i] = ("F", "F;%s" % i) + OPEN["L*%s image" % i] = ("F", "F;%s" % i) +for i in ["16", "16L", "16B"]: + OPEN["L %s image" % i] = ("I;%s" % i, "I;%s" % i) + OPEN["L*%s image" % i] = ("I;%s" % i, "I;%s" % i) +for i in ["32S"]: + OPEN["L %s image" % i] = ("I", "I;%s" % i) + OPEN["L*%s image" % i] = ("I", "I;%s" % i) +for i in range(2, 33): + OPEN["L*%s image" % i] = ("F", "F;%s" % i) + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s): + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + +class ImImageFile(ImageFile.ImageFile): + + format = "IM" + format_description = "IFUNC Image Memory" + + def _open(self): + + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b'\0' or s == b'\x1A': + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + raise SyntaxError("not an IM file") + + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] == b'\n': + s = s[:-1] + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an IM file") + + if m: + + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode('latin-1', 'replace') + v = v.decode('latin-1', 'replace') + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + + raise SyntaxError("Syntax error in IM header: " + + s.decode('ascii', 'replace')) + + if not n: + raise SyntaxError("Not an IM file") + + # Basic attributes + self.size = self.info[SIZE] + self.mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[0:1] != b'\x1A': + s = self.fp.read(1) + if not s: + raise SyntaxError("File truncated") + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i+256] == palette[i+512]: + if i8(palette[i]) != i: + linear = 0 + else: + greyscale = 0 + if self.mode == "L" or self.mode == "LA": + if greyscale: + if not linear: + self.lut = [i8(c) for c in palette[:256]] + else: + if self.mode == "L": + self.mode = self.rawmode = "P" + elif self.mode == "LA": + self.mode = self.rawmode = "PA" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = [i8(c) for c in palette] + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self.__fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0)+self.size, offs, + (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)), + ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)), + ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0)+self.size, offs, + (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 + + def seek(self, frame): + + if frame < 0 or frame >= self.info[FRAMES]: + raise EOFError("seek outside sequence") + + if self.frame == frame: + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self.__fp + + self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))] + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- +# Save IM files + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L") +} + + +def _save(im, fp, filename, check=0): + + try: + image_type, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as IM" % im.mode) + + try: + frames = im.encoderinfo["frames"] + except KeyError: + frames = 1 + + if check: + return check + + fp.write(("Image type: %s image\r\n" % image_type).encode('ascii')) + if filename: + fp.write(("Name: %s\r\n" % filename).encode('ascii')) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii')) + fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii')) + if im.mode == "P": + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511-fp.tell()) + b"\032") + if im.mode == "P": + fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/server/www/packages/packages-darwin/x64/PIL/Image.py b/server/www/packages/packages-darwin/x64/PIL/Image.py new file mode 100644 index 0000000..89549ea --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/Image.py @@ -0,0 +1,2500 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import VERSION, PILLOW_VERSION, _plugins + +import logging +import warnings + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class _imaging_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imaging C module is not installed") + + +# Limit to around a quarter gigabyte for a 24 bit (3 bpp) image +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 / 4 / 3) + +try: + # give Tk a chance to set up the environment, in case we're + # using an _imaging module linked against libtcl/libtk (use + # __import__ to hide this from naive packagers; we don't really + # depend on Tk unless ImageTk is used, and that module already + # imports Tkinter) + __import__("FixTk") +except ImportError: + pass + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from PIL import _imaging as core + if PILLOW_VERSION != getattr(core, 'PILLOW_VERSION', None): + raise ImportError("The _imaging extension was built for another " + " version of Pillow or PIL") + +except ImportError as v: + core = _imaging_not_installed() + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version " + "of Python.", + RuntimeWarning + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + elif "Symbol not found: _PyUnicodeUCS2_FromString" in str(v): + warnings.warn( + "The _imaging extension was built for Python with UCS2 support; " + "recompile PIL or build Python --without-wide-unicode. ", + RuntimeWarning + ) + elif "Symbol not found: _PyUnicodeUCS4_FromString" in str(v): + warnings.warn( + "The _imaging extension was built for Python with UCS4 support; " + "recompile PIL or build Python --with-wide-unicode. ", + RuntimeWarning + ) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting-pil-to-pillow.rst + raise + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +from PIL import ImageMode +from PIL._binary import i8 +from PIL._util import isPath +from PIL._util import isStringType +from PIL._util import deferred_error + +import os +import sys +import io +import struct + +# type stuff +import collections +import numbers + +# works everywhere, win for pypy, not cpython +USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info') +try: + import cffi + HAS_CFFI = True +except ImportError: + HAS_CFFI = False + + +def isImageType(t): + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + +# +# Constants (also defined in _imagingmodule.c!) + +NONE = 0 + +# transpose +FLIP_LEFT_RIGHT = 0 +FLIP_TOP_BOTTOM = 1 +ROTATE_90 = 2 +ROTATE_180 = 3 +ROTATE_270 = 4 +TRANSPOSE = 5 + +# transforms +AFFINE = 0 +EXTENT = 1 +PERSPECTIVE = 2 +QUAD = 3 +MESH = 4 + +# resampling filters +NEAREST = NONE = 0 +LANCZOS = ANTIALIAS = 1 +BILINEAR = LINEAR = 2 +BICUBIC = CUBIC = 3 + +# dithers +NONE = 0 +NEAREST = 0 +ORDERED = 1 # Not yet implemented +RASTERIZE = 2 # Not yet implemented +FLOYDSTEINBERG = 3 # default + +# palettes/quantizers +WEB = 0 +ADAPTIVE = 1 + +MEDIANCUT = 0 +MAXCOVERAGE = 1 +FASTOCTREE = 2 + +# categories +NORMAL = 0 +SEQUENCE = 1 +CONTAINER = 2 + +if hasattr(core, 'DEFAULT_STRATEGY'): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +ID = [] +OPEN = {} +MIME = {} +SAVE = {} +SAVE_ALL = {} +EXTENSION = {} + +# -------------------------------------------------------------------- +# Modes supported by this version + +_MODEINFO = { + # NOTE: this table will be removed in future versions. use + # getmode* functions or ImageMode descriptors instead. + + # official modes + "1": ("L", "L", ("1",)), + "L": ("L", "L", ("L",)), + "I": ("L", "I", ("I",)), + "F": ("L", "F", ("F",)), + "P": ("RGB", "L", ("P",)), + "RGB": ("RGB", "L", ("R", "G", "B")), + "RGBX": ("RGB", "L", ("R", "G", "B", "X")), + "RGBA": ("RGB", "L", ("R", "G", "B", "A")), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), + "LAB": ("RGB", "L", ("L", "A", "B")), + "HSV": ("RGB", "L", ("H", "S", "V")), + + # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and + # BGR;24. Use these modes only if you know exactly what you're + # doing... + +} + +if sys.byteorder == 'little': + _ENDIAN = '<' +else: + _ENDIAN = '>' + +_MODE_CONV = { + # official modes + "1": ('|b1', None), # broken + "L": ('|u1', None), + "I": (_ENDIAN + 'i4', None), + "F": (_ENDIAN + 'f4', None), + "P": ('|u1', None), + "RGB": ('|u1', 3), + "RGBX": ('|u1', 4), + "RGBA": ('|u1', 4), + "CMYK": ('|u1', 4), + "YCbCr": ('|u1', 3), + "LAB": ('|u1', 3), # UNDONE - unsigned |u1i1i1 + "HSV": ('|u1', 3), + # I;16 == I;16L, and I;32 == I;32L + "I;16": ('u2', None), + "I;16L": ('i2', None), + "I;16LS": ('u4', None), + "I;32L": ('i4', None), + "I;32LS": ('= 1: + return + + try: + from PIL import BmpImagePlugin + except ImportError: + pass + try: + from PIL import GifImagePlugin + except ImportError: + pass + try: + from PIL import JpegImagePlugin + except ImportError: + pass + try: + from PIL import PpmImagePlugin + except ImportError: + pass + try: + from PIL import PngImagePlugin + except ImportError: + pass +# try: +# import TiffImagePlugin +# except ImportError: +# pass + + _initialized = 1 + + +def init(): + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + """ + + global _initialized + if _initialized >= 2: + return 0 + + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__("PIL.%s" % plugin, globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return 1 + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + +def _getdecoder(mode, decoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + # get decoder + decoder = getattr(core, decoder_name + "_decoder") + # print(decoder, mode, args + extra) + return decoder(mode, *args + extra) + except AttributeError: + raise IOError("decoder %s not available" % decoder_name) + + +def _getencoder(mode, encoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + # get encoder + encoder = getattr(core, encoder_name + "_encoder") + # print(encoder, mode, args + extra) + return encoder(mode, *args + extra) + except AttributeError: + raise IOError("encoder %s not available" % encoder_name) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + +def coerce_e(value): + return value if isinstance(value, _E) else _E(value) + + +class _E(object): + def __init__(self, data): + self.data = data + + def __add__(self, other): + return _E((self.data, "__add__", coerce_e(other).data)) + + def __mul__(self, other): + return _E((self.data, "__mul__", coerce_e(other).data)) + + +def _getscaleoffset(expr): + stub = ["stub"] + data = expr(_E(stub)).data + try: + (a, b, c) = data # simplified syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number)): + return c, 0.0 + if a is stub and b == "__add__" and isinstance(c, numbers.Number): + return 1.0, c + except TypeError: + pass + try: + ((a, b, c), d, e) = data # full syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number) and + d == "__add__" and isinstance(e, numbers.Number)): + return c, e + except TypeError: + pass + raise ValueError("illegal expression") + + +# -------------------------------------------------------------------- +# Implementation wrapper + +class Image(object): + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + format = None + format_description = None + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self.mode = "" + self.size = (0, 0) + self.palette = None + self.info = {} + self.category = NORMAL + self.readonly = 0 + self.pyaccess = None + + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + + def _new(self, im): + new = Image() + new.im = im + new.mode = im.mode + new.size = im.size + if self.palette: + new.palette = self.palette.copy() + if im.mode == "P" and not new.palette: + from PIL import ImagePalette + new.palette = ImagePalette.ImagePalette() + try: + new.info = self.info.copy() + except AttributeError: + # fallback (pre-1.5.2) + new.info = {} + for k, v in self.info: + new.info[k] = v + return new + + _makeself = _new # compatibility + + # Context Manager Support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is only required to close images that have not + had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. + """ + try: + self.fp.close() + except Exception as msg: + logger.debug("Error closing: %s", msg) + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = deferred_error(ValueError("Operation on closed image")) + + def _copy(self): + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _dump(self, file=None, format=None): + import tempfile + suffix = '' + if format: + suffix = '.'+format + if not file: + f, file = tempfile.mkstemp(suffix) + os.close(f) + + self.load() + if not format or format == "PPM": + self.im.save_ppm(file) + else: + if not file.endswith(format): + file = file + "." + format + self.save(file, format) + return file + + def __eq__(self, other): + if self.__class__.__name__ != other.__class__.__name__: + return False + a = (self.mode == other.mode) + b = (self.size == other.size) + c = (self.getpalette() == other.getpalette()) + d = (self.info == other.info) + e = (self.category == other.category) + f = (self.readonly == other.readonly) + g = (self.tobytes() == other.tobytes()) + return a and b and c and d and e and f and g + + def __ne__(self, other): + eq = (self == other) + return not eq + + def __repr__(self): + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, self.__class__.__name__, + self.mode, self.size[0], self.size[1], + id(self) + ) + + def _repr_png_(self): + """ iPython display hook support + + :returns: png version of the image as bytes + """ + from io import BytesIO + b = BytesIO() + self.save(b, 'PNG') + return b.getvalue() + + def __getattr__(self, name): + if name == "__array_interface__": + # numpy array interface support + new = {} + shape, typestr = _conv_type_shape(self) + new['shape'] = shape + new['typestr'] = typestr + new['data'] = self.tobytes() + new['version'] = 3 + return new + raise AttributeError(name) + + def __getstate__(self): + return [ + self.info, + self.mode, + self.size, + self.getpalette(), + self.tobytes()] + + def __setstate__(self, state): + Image.__init__(self) + self.tile = [] + info, mode, size, palette, data = state + self.info = info + self.mode = mode + self.size = size + self.im = core.new(mode, size) + if mode in ("L", "P") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name="raw", *args): + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + :param args: Extra arguments to the encoder. + :rtype: A bytes object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if encoder_name == "raw" and args == (): + args = self.mode + + self.load() + + # unpack data + e = _getencoder(self.mode, encoder_name, args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + data = [] + while True: + l, s, d = e.encode(bufsize) + data.append(d) + if s: + break + if s < 0: + raise RuntimeError("encoder error %d in tobytes" % s) + + return b"".join(data) + + def tostring(self, *args, **kw): + raise Exception("tostring() has been removed. " + + "Please call tobytes() instead.") + + def tobitmap(self, name="image"): + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + raise ValueError("not a bitmap") + data = self.tobytes("xbm") + return b"".join([ + ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'), + ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'), + ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};" + ]) + + def frombytes(self, data, decoder_name="raw", *args): + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + # default format + if decoder_name == "raw" and args == (): + args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + def fromstring(self, *args, **kw): + raise Exception("fromstring() has been removed. " + + "Please call frombytes() instead.") + + def load(self): + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. This method will close the file + associated with the image. + + :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` + """ + if self.im and self.palette and self.palette.dirty: + # realize palette + self.im.putpalette(*self.palette.getdata()) + self.palette.dirty = 0 + self.palette.mode = "RGB" + self.palette.rawmode = None + if "transparency" in self.info: + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + + if self.im: + if HAS_CFFI and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from PIL import PyAccess + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + + def verify(self): + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert(self, mode=None, matrix=None, dither=None, + palette=WEB, colors=256): + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + The current version supports all possible conversions between + "L", "RGB" and "CMYK." The **matrix** argument only supports "L" + and "RGB". + + When translating a color image to black and white (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a greyscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is NONE, all non-zero values are set to 255 (white). To + use other thresholds, use the :py:meth:`~PIL.Image.Image.point` + method. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are NONE or FLOYDSTEINBERG (default). + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are WEB or ADAPTIVE. + :param colors: Number of colors to use for the ADAPTIVE palette. + Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if not mode: + # determine default mode + if self.mode == "P": + self.load() + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + else: + return self.copy() + + self.load() + + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + raise ValueError("illegal conversion") + im = self.im.convert_matrix(mode, matrix) + return self._new(im) + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if "transparency" in self.info and \ + self.info['transparency'] is not None: + if self.mode in ('L', 'RGB') and mode == 'RGBA': + # Use transparent conversion to promote from transparent + # color to an alpha channel. + return self._new(self.im.convert_transparent( + mode, self.info['transparency'])) + elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'): + t = self.info['transparency'] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn('Palette images with Transparency ' + + ' expressed in bytes should be converted ' + + 'to RGBA images') + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = Image()._new(core.new(self.mode, (1, 1))) + if self.mode == 'P': + trns_im.putpalette(self.palette) + trns_im.putpixel((0, 0), t) + + if mode in ('L', 'RGB'): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert('RGB') + trns = trns_im.getpixel((0, 0)) + + elif self.mode == 'P' and mode == 'RGBA': + t = self.info['transparency'] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should" + + " be bytes or int") + + if mode == "P" and palette == ADAPTIVE: + im = self.im.quantize(colors) + new = self._new(im) + from PIL import ImagePalette + new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB")) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del(new.info['transparency']) + if trns is not None: + try: + new.info['transparency'] = new.palette.getcolor(trns) + except: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del(new.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + return new + + # colorspace conversion + if dither is None: + dither = FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + im = self.im.convert(getmodebase(self.mode)) + im = im.convert(mode, dither) + except KeyError: + raise ValueError("illegal conversion") + + new_im = self._new(im) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del(new_im.info['transparency']) + if trns is not None: + if new_im.mode == 'P': + try: + new_im.info['transparency'] = new_im.palette.getcolor(trns) + except: + del(new_im.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + else: + new_im.info['transparency'] = trns + return new_im + + def quantize(self, colors=256, method=None, kmeans=0, palette=None): + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: 0 = median cut + 1 = maximum coverage + 2 = fast octree + :param kmeans: Integer + :param palette: Quantize to the :py:class:`PIL.ImagingPalette` palette. + :returns: A new image + + """ + + self.load() + + if method is None: + # defaults: + method = 0 + if self.mode == 'RGBA': + method = 2 + + if self.mode == 'RGBA' and method != 2: + # Caller specified an invalid mode. + raise ValueError('Fast Octree (method == 2) is the ' + + ' only valid method for quantizing RGBA images') + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + raise ValueError("bad mode for palette image") + if self.mode != "RGB" and self.mode != "L": + raise ValueError( + "only RGB or L mode images can be quantized to a palette" + ) + im = self.im.convert("P", 1, palette.im) + return self._makeself(im) + + im = self.im.quantize(colors, method, kmeans) + return self._new(im) + + def copy(self): + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + im = self.im.copy() + return self._new(im) + + def crop(self, box=None): + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. + + This is a lazy operation. Changes to the source image may or + may not be reflected in the cropped image. To break the + connection, call the :py:meth:`~PIL.Image.Image.load` method on + the cropped copy. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + if box is None: + return self.copy() + + # lazy operation + return _ImageCrop(self, box) + + def draft(self, mode, size): + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it, or to extract a 128x192 + version from a PCD file. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + :param mode: The requested mode. + :param size: The requested size. + """ + pass + + def _expand(self, xmargin, ymargin=None): + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin, 0)) + + def filter(self, filter): + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object. """ + + self.load() + + if isinstance(filter, collections.Callable): + filter = filter() + if not hasattr(filter, "filter"): + raise TypeError("filter argument should be ImageFilter.Filter " + + "instance or class") + + if self.im.bands == 1: + return self._new(filter.filter(self.im)) + # fix to handle multiband images since _imaging doesn't + ims = [] + for c in range(self.im.bands): + ims.append(self._new(filter.filter(self.im.getband(c)))) + return merge(self.mode, ims) + + def getbands(self): + """ + Returns a tuple containing the name of each band in this image. + For example, **getbands** on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self): + """ + Calculates the bounding box of the non-zero regions in the + image. + + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. If the image + is completely empty, this method returns None. + + """ + + self.load() + return self.im.getbbox() + + def getcolors(self, maxcolors=256): + """ + Returns a list of colors used in this image. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [] + for i in range(256): + if h[i]: + out.append((h[i], i)) + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band=None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use **list(im.getdata())**. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self): + """ + Gets the the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + extrema = [] + for i in range(self.im.bands): + extrema.append(self.im.getband(i).getextrema()) + return tuple(extrema) + return self.im.getextrema() + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self): + """ + Returns the image palette as a list. + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + if bytes is str: + return [i8(c) for c in self.im.getpalette()] + else: + return list(self.im.getpalette()) + except ValueError: + return None # no palette + + def getpixel(self, xy): + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(xy) + + def getprojection(self): + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return [i8(c) for c in x], [i8(c) for c in y] + + def histogram(self, mask=None, extrema=None): + """ + Returns a histogram for the image. The histogram is returned as + a list of pixel counts, one for each pixel value in the source + image. If the image has more than one band, the histograms for + all bands are concatenated (for example, the histogram for an + "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a greyscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def offset(self, xoffset, yoffset=None): + raise Exception("offset() has been removed. " + + "Please call ImageChops.offset() instead.") + + def paste(self, im, box=None, mask=None): + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). If a 4-tuple is given, the size of the pasted image + must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L" or "RGBA" + images (in the latter case, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box) and mask is None: + # abbreviated paste(im, mask) syntax + mask = box + box = None + + if box is None: + # cover all of self + box = (0, 0) + self.size + + if len(box) == 2: + # lower left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + raise ValueError( + "cannot determine region size; use 4-item box" + ) + box = box + (box[0]+size[0], box[1]+size[1]) + + if isStringType(im): + from PIL import ImageColor + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self.load() + if self.readonly: + self._copy() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def point(self, lut, mode=None): + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65336 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + :param mode: Output mode (default is same as input). In the + current version, this can only be used if the source image + has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + lut = [lut(i) for i in range(256)] * self.im.bands + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + raise ValueError("point operation not supported for this mode") + + return self._new(self.im.point(lut, mode)) + + def putalpha(self, alpha): + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer or + other color value. + """ + + self.load() + if self.readonly: + self._copy() + + if self.mode not in ("LA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + self.pyaccess = None + except (AttributeError, ValueError): + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "RGBA"): + raise ValueError # sanity check + self.im = im + self.pyaccess = None + self.mode = self.im.mode + except (KeyError, ValueError): + raise ValueError("illegal image mode") + + if self.mode == "LA": + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + raise ValueError("illegal image mode") + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata(self, data, scale=1.0, offset=0.0): + """ + Copies pixel data to this image. This method copies data from a + sequence object into the image, starting at the upper left + corner (0, 0), and continuing until either the image or the + sequence ends. The scale and offset values are used to adjust + the sequence values: **pixel = value*scale + offset**. + + :param data: A sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self.load() + if self.readonly: + self._copy() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB"): + """ + Attaches a palette to this image. The image must be a "P" or + "L" image, and the palette sequence must contain 768 integer + values, where each group of three values represent the red, + green, and blue values for the corresponding pixel + index. Instead of an integer sequence, you can use an 8-bit + string. + + :param data: A palette sequence (either a list or a string). + """ + from PIL import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + self.load() + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + if bytes is str: + data = "".join(chr(x) for x in data) + else: + data = bytes(data) + palette = ImagePalette.raw(rawmode, data) + self.mode = "P" + self.palette = palette + self.palette.mode = "RGB" + self.load() # install new palette + + def putpixel(self, xy, value): + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + + self.load() + if self.readonly: + self._copy() + self.pyaccess = None + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + return self.im.putpixel(xy, value) + + def resize(self, size, resample=NEAREST): + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation), + :py:attr:`PIL.Image.BICUBIC` (cubic spline interpolation), or + :py:attr:`PIL.Image.LANCZOS` (a high-quality downsampling filter). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS): + raise ValueError("unknown resampling filter") + + self.load() + + size = tuple(size) + if self.size == size: + return self._new(self.im) + + if self.mode in ("1", "P"): + resample = NEAREST + + if self.mode == 'RGBA': + return self.convert('RGBa').resize(size, resample).convert('RGBA') + + return self._new(self.im.resize(size, resample)) + + def rotate(self, angle, resample=NEAREST, expand=0): + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` + (cubic spline interpolation in a 4x4 environment). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if expand: + import math + angle = -angle * math.pi / 180 + matrix = [ + math.cos(angle), math.sin(angle), 0.0, + -math.sin(angle), math.cos(angle), 0.0 + ] + + def transform(x, y, matrix=matrix): + (a, b, c, d, e, f) = matrix + return a*x + b*y + c, d*x + e*y + f + + # calculate output size + w, h = self.size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y) + xx.append(x) + yy.append(y) + w = int(math.ceil(max(xx)) - math.floor(min(xx))) + h = int(math.ceil(max(yy)) - math.floor(min(yy))) + + # adjust center + x, y = transform(w / 2.0, h / 2.0) + matrix[2] = self.size[0] / 2.0 - x + matrix[5] = self.size[1] / 2.0 - y + + return self.transform((w, h), AFFINE, matrix, resample) + + if resample not in (NEAREST, BILINEAR, BICUBIC): + raise ValueError("unknown resampling filter") + + self.load() + + if self.mode in ("1", "P"): + resample = NEAREST + + return self._new(self.im.rotate(angle, resample, expand)) + + def save(self, fp, format=None, **params): + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), pathlib.Path object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param options: Extra parameters to the image writer. + :returns: None + :exception KeyError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception IOError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename = "" + open_fp = False + if isPath(fp): + filename = fp + open_fp = True + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + open_fp = True + elif hasattr(fp, "name") and isPath(fp.name): + # only set the name for metadata purposes + filename = fp.name + + # may mutate self! + self.load() + + save_all = False + if 'save_all' in params: + save_all = params['save_all'] + del params['save_all'] + self.encoderinfo = params + self.encoderconfig = () + + preinit() + + ext = os.path.splitext(filename)[1].lower() + + if not format: + if ext not in EXTENSION: + init() + format = EXTENSION[ext] + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + if open_fp: + fp = builtins.open(filename, "wb") + + try: + save_handler(self, fp, filename) + finally: + # do what we can to clean up + if open_fp: + fp.close() + + def seek(self, frame): + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + **EOFError** exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + Note that in the current version of the library, most sequence + formats only allows you to seek to the next frame. + + See :py:meth:`~PIL.Image.Image.tell`. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + raise EOFError + + def show(self, title=None, command=None): + """ + Displays this image. This method is mainly intended for + debugging purposes. + + On Unix platforms, this method saves the image to a temporary + PPM file, and calls the **xv** utility. + + On Windows, it saves the image to a temporary BMP file, and uses + the standard BMP display utility to show it (usually Paint). + + :param title: Optional title to use for the image window, + where possible. + :param command: command used to show the image + """ + + _show(self, title=title, command=command) + + def split(self): + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + ims = [self.copy()] + else: + ims = [] + for i in range(self.im.bands): + ims.append(self._new(self.im.getband(i))) + return tuple(ims) + + def tell(self): + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail(self, size, resample=BICUBIC): + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: Requested size. + :param resample: Optional resampling filter. This can be one + of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`, + :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`. + If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`. + (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0) + :returns: None + """ + + # preserve aspect ratio + x, y = self.size + if x > size[0]: + y = int(max(y * size[0] / x, 1)) + x = int(size[0]) + if y > size[1]: + x = int(max(x * size[1] / y, 1)) + y = int(size[1]) + size = x, y + + if size == self.size: + return + + self.draft(None, size) + + im = self.resize(size, resample) + + self.im = im.im + self.mode = im.mode + self.size = size + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform(self, size, method, data=None, resample=NEAREST, fill=1): + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size. + :param method: The transformation method. This is one of + :py:attr:`PIL.Image.EXTENT` (cut out a rectangular subregion), + :py:attr:`PIL.Image.AFFINE` (affine transform), + :py:attr:`PIL.Image.PERSPECTIVE` (perspective transform), + :py:attr:`PIL.Image.QUAD` (map a quadrilateral to a rectangle), or + :py:attr:`PIL.Image.MESH` (map a number of source quadrilaterals + in one operation). + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:attr:`PIL.Image.NEAREST`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode == 'RGBA': + return self.convert('RGBa').transform( + size, method, data, resample, fill).convert('RGBA') + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + if data is None: + raise ValueError("missing method data") + + im = new(self.mode, size, None) + if method == MESH: + # list of quads + for box, quad in data: + im.__transformer(box, self, QUAD, quad, resample, fill) + else: + im.__transformer((0, 0)+size, self, method, data, resample, fill) + + return im + + def __transformer(self, box, image, method, data, + resample=NEAREST, fill=1): + + # FIXME: this should be turned into a lazy operation (?) + + w = box[2]-box[0] + h = box[3]-box[1] + + if method == AFFINE: + # change argument order to match implementation + data = (data[2], data[0], data[1], + data[5], data[3], data[4]) + elif method == EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = float(x1 - x0) / w + ys = float(y1 - y0) / h + method = AFFINE + data = (x0 + xs/2, xs, 0, y0 + ys/2, 0, ys) + elif method == PERSPECTIVE: + # change argument order to match implementation + data = (data[2], data[0], data[1], + data[5], data[3], data[4], + data[6], data[7]) + elif method == QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[0:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At, + (se[0]-sw[0]-ne[0]+x0)*As*At, + y0, (ne[1]-y0)*As, (sw[1]-y0)*At, + (se[1]-sw[1]-ne[1]+y0)*As*At) + else: + raise ValueError("unknown transformation method") + + if resample not in (NEAREST, BILINEAR, BICUBIC): + raise ValueError("unknown resampling filter") + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = NEAREST + + self.im.transform2(box, image.im, method, data, resample, fill) + + def transpose(self, method): + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`, + :py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`, + :py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270` or + :py:attr:`PIL.Image.TRANSPOSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + im = self.im.effect_spread(distance) + return self._new(im) + + def toqimage(self): + """Returns a QImage copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Lazy operations + +class _ImageCrop(Image): + + def __init__(self, im, box): + + Image.__init__(self) + + x0, y0, x1, y1 = box + if x1 < x0: + x1 = x0 + if y1 < y0: + y1 = y0 + + self.mode = im.mode + self.size = x1-x0, y1-y0 + + self.__crop = x0, y0, x1, y1 + + self.im = im.im + + def load(self): + + # lazy evaluation! + if self.__crop: + self.im = self.im.crop(self.__crop) + self.__crop = None + + if self.im: + return self.im.pixel_access(self.readonly) + + # FIXME: future versions should optimize crop/paste + # sequences! + + +# -------------------------------------------------------------------- +# Abstract handlers. + +class ImagePointHandler(object): + # used as a mixin by point transforms (for use with im.point) + pass + + +class ImageTransformHandler(object): + # used as a mixin by geometry transforms (for use with im.transform) + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + +def _wedge(): + "Create greyscale wedge (for debugging only)" + + return Image()._new(core.wedge("L")) + + +def new(mode, size, color=0): + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isStringType(color): + # css3-style specifier + + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + + return Image()._new(core.fill(mode, size, color)) + + +def frombytes(mode, size, data, decoder_name="raw", *args): + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Decoder `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw" and args == (): + args = mode + + im = new(mode, size) + im.frombytes(data, decoder_name, args) + return im + + +def fromstring(*args, **kw): + raise Exception("fromstring() has been removed. " + + "Please call frombytes() instead.") + + +def frombuffer(mode, size, data, decoder_name="raw", *args): + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + **BytesIO** object, and use :py:func:`~PIL.Image.open` to load it. + + In the current version, the default parameters used for the "raw" decoder + differs from that used for :py:func:`~PIL.Image.fromstring`. This is a + bug, and will probably be fixed in a future release. The current release + issues a warning if you do this; to disable the warning, you should provide + the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + warnings.warn( + "the frombuffer defaults may change in a future release; " + "for portability, change the call to read:\n" + " frombuffer(mode, size, data, 'raw', mode, 0, 1)", + RuntimeWarning, stacklevel=2 + ) + args = mode, 0, -1 # may change to (mode, 0, 1) post-1.1.6 + if args[0] in _MAPMODES: + im = new(mode, (1, 1)) + im = im._new( + core.map_buffer(data, size, decoder_name, None, 0, args) + ) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +def fromarray(obj, mode=None): + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol). + + If obj is not contiguous, then the tobytes method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + :param obj: Object with array interface + :param mode: Mode to use (will be determined from type if None) + See: :ref:`concept-modes`. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr['shape'] + ndim = len(shape) + try: + strides = arr['strides'] + except KeyError: + strides = None + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr['typestr'] + mode, rawmode = _fromarray_typemap[typekey] + except KeyError: + # print typekey + raise TypeError("Cannot handle this data type") + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + raise ValueError("Too many dimensions: %d > %d." % (ndim, ndmax)) + + size = shape[1], shape[0] + if strides is not None: + if hasattr(obj, 'tobytes'): + obj = obj.tobytes() + else: + obj = obj.tostring() + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + # ((1, 1), "|b1"): ("1", "1"), # broken + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "i2"): ("I", "I;16B"), + ((1, 1), "i4"): ("I", "I;32B"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), + } + +# shortcuts +_fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I") +_fromarray_typemap[((1, 1), _ENDIAN + "f4")] = ("F", "F") + + +def _decompression_bomb_check(size): + if MAX_IMAGE_PIXELS is None: + return + + pixels = size[0] * size[1] + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + "Image size (%d pixels) exceeds limit of %d pixels, " + "could be decompression bomb DOS attack." % + (pixels, MAX_IMAGE_PIXELS), + DecompressionBombWarning) + + +def open(fp, mode="r"): + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. + + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement :py:meth:`~file.read`, + :py:meth:`~file.seek`, and :py:meth:`~file.tell` methods, + and be opened in binary mode. + :param mode: The mode. If given, this argument must be "r". + :returns: An :py:class:`~PIL.Image.Image` object. + :exception IOError: If the file cannot be found, or the image cannot be + opened and identified. + """ + + if mode != "r": + raise ValueError("bad mode %r" % mode) + + filename = "" + if isPath(fp): + filename = fp + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + if filename: + fp = builtins.open(filename, "rb") + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + + prefix = fp.read(16) + + preinit() + + def _open_core(fp, filename, prefix): + for i in ID: + try: + factory, accept = OPEN[i] + if not accept or accept(prefix): + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + # logger.debug("", exc_info=True) + continue + return None + + im = _open_core(fp, filename, prefix) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix) + + if im: + return im + + raise IOError("cannot identify image file %r" + % (filename if filename else fp)) + +# +# Image processing. + + +def alpha_composite(im1, im2): + """ + Alpha composite im2 over im1. + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1, im2, alpha): + """ + Creates a new image by interpolating between two input images, using + a constant alpha.:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1, image2, mask): + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode, bands): + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + raise ValueError("wrong number of bands") + for im in bands[1:]: + if im.mode != getmodetype(mode): + raise ValueError("mode mismatch") + if im.size != bands[0].size: + raise ValueError("size mismatch") + im = core.new(mode, bands[0].size) + for i in range(getmodebands(mode)): + bands[i].load() + im.putband(bands[i].im, i) + return bands[0]._new(im) + + +# -------------------------------------------------------------------- +# Plugin registry + +def register_open(id, factory, accept=None): + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id, mimetype): + """ + Registers an image MIME type. This function should not be used + in application code. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save(id, driver): + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id, extension): + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +# -------------------------------------------------------------------- +# Simple display support. User code may override this. + +def _show(image, **options): + # override me, as necessary + _showxv(image, **options) + + +def _showxv(image, title=None, **options): + from PIL import ImageShow + ImageShow.show(image, title, **options) + + +# -------------------------------------------------------------------- +# Effects + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y2). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageChops.py b/server/www/packages/packages-darwin/x64/PIL/ImageChops.py new file mode 100644 index 0000000..ba5350e --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageChops.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +def constant(image, value): + """Fill a channel with a given grey level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image): + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image): + """ + Invert an image (channel). + + .. code-block:: python + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. + + .. code-block:: python + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image + containing the darker values. + + .. code-block:: python + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1, image2): + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. + + .. code-block:: python + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1, image2): + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. + + .. code-block:: python + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1, image2): + """ + Superimposes two inverted images on top of each other. + + .. code-block:: python + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def add(image1, image2, scale=1.0, offset=0): + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract(image1, image2, scale=1.0, offset=0): + """ + Subtracts two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1, image2): + """Add two images, without clipping the result. + + .. code-block:: python + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1, image2): + """Subtract two images, without clipping the result. + + .. code-block:: python + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1, image2): + """Logical AND between two images. + + .. code-block:: python + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1, image2): + """Logical OR between two images. + + .. code-block:: python + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1, image2): + """Logical XOR between two images. + + .. code-block:: python + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1, image2, alpha): + """Blend images using constant transparency weight. Alias for + :py:meth:`PIL.Image.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite(image1, image2, mask): + """Create composite using transparency mask. Alias for + :py:meth:`PIL.Image.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image, xoffset, yoffset=None): + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If **yoffset** is omitted, it + is assumed to be equal to **xoffset**. + + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageCms.py b/server/www/packages/packages-darwin/x64/PIL/ImageCms.py new file mode 100644 index 0000000..ba5504a --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageCms.py @@ -0,0 +1,970 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color managment support, based on Kevin Cazabon's PyCMS +# library. + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. + +from __future__ import print_function +import sys + +DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + http://www.cazabon.com + + pyCMS home page: http://www.cazabon.com/pyCMS + littleCMS home page: http://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +VERSION = "1.0.0 pil" + +# --------------------------------------------------------------------. + +from PIL import Image +try: + from PIL import _imagingcms +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from _util import deferred_error + _imagingcms = deferred_error(ex) +from PIL._util import isStringType + +core = _imagingcms + +# +# intent/direction values + +INTENT_PERCEPTUAL = 0 +INTENT_RELATIVE_COLORIMETRIC = 1 +INTENT_SATURATION = 2 +INTENT_ABSOLUTE_COLORIMETRIC = 3 + +DIRECTION_INPUT = 0 +DIRECTION_OUTPUT = 1 +DIRECTION_PROOF = 2 + +# +# flags + +FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16 # Gridpoints +} + +_MAX_FLAG = 0 +for flag in FLAGS.values(): + if isinstance(flag, int): + _MAX_FLAG = _MAX_FLAG | flag + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + +class ImageCmsProfile(object): + + def __init__(self, profile): + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isStringType(profile): + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + else: + self._set(profile) # assume it's already a profile + + def _set(self, profile, filename=None): + self.profile = profile + self.filename = filename + if profile: + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + else: + self.product_name = None + self.product_info = None + + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + + # Transform. This can be used with the procedural API, or with the + # standard Image.point() method. + # + # Will return the output profile in the output.info['icc_profile']. + + def __init__(self, input, output, input_mode, output_mode, + intent=INTENT_PERCEPTUAL, proof=None, + proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0): + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, + input_mode, output_mode, + intent, + flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, output.profile, proof.profile, + input_mode, output_mode, + intent, proof_intent, + flags + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im): + return self.apply(im) + + def apply(self, im, imOut=None): + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info['icc_profile'] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im): + im.load() + if im.mode != self.output_mode: + raise ValueError("mode mismatch") # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info['icc_profile'] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle=None): + """ (experimental) Fetches the profile for the current display device. + :returns: None if the profile is not known. + """ + + if sys.platform == "win32": + from PIL import ImageWin + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) + else: + profile = core.get_display_profile_win32(handle or 0) + else: + try: + get = _imagingcms.get_display_profile + except AttributeError: + return None + else: + profile = get() + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + +class PyCMSError(Exception): + + """ (pyCMS) Exception class. + This is used for all errors in the pyCMS API. """ + pass + + +def profileToProfile( + im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL, + outputMode=None, inPlace=0, flags=0): + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + inputProfile to outputProfile. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If inPlace == TRUE and outputMode != im.mode, + a PyCMSError will be raised. If an error occurs during application of + the profiles, a PyCMSError will be raised. If outputMode is not a mode + supported by the outputProfile (or by pyCMS), a PyCMSError will be + raised. + + This function applies an ICC transformation to im from inputProfile's + color space to outputProfile's color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + OutputMode can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open PIL image object (i.e. Image.new(...) or + Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean (1 = True, None or 0 = False). If True, the + original image is modified in-place, and None is returned. If False + (default), a new Image object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new PIL image object, depending on value of + inPlace + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, outputProfile, im.mode, outputMode, + renderingIntent, flags=flags + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def getOpenProfile(profileFilename): + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If profileFilename is not a vaild filename for an ICC profile, a PyCMSError + will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, flags=0): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If an error occurs during creation of the + transform, a PyCMSError will be raised. + + If inMode or outMode are not a mode supported by the outputProfile (or + by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile using the renderingIntent to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in inMode to images that are in outMode color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the "inMode" and "outMode" attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent, flags=flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildProofTransform( + inputProfile, outputProfile, proofProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, + proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"]): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device. + + If the input, output, or proof profiles specified are not valid + filenames, a PyCMSError will be raised. + + If an error occurs during creation of the transform, a PyCMSError will + be raised. + + If inMode or outMode are not a mode supported by the outputProfile + (or by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device using renderingIntent and + proofRenderingIntent to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in inMode to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the proofProfile device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for proof->output transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, + proofProfile, proofRenderingIntent, flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform(im, transform, inPlace=0): + """ + (pyCMS) Applies a transform to a given image. + + If im.mode != transform.inMode, a PyCMSError is raised. + + If inPlace == TRUE and transform.inMode != transform.outMode, a + PyCMSError is raised. + + If im.mode, transfer.inMode, or transfer.outMode is not supported by + pyCMSdll or the profiles you used for the transform, a PyCMSError is + raised. + + If an error occurs while the transform is being applied, a PyCMSError + is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set inPlace to TRUE. This can only be done if + transform.inMode and transform.outMode are the same, because we can't + change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new Image object of + the same dimensions in mode transform.outMode. + + :param im: A PIL Image object, and im.mode must be the same as the inMode + supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool (1 == True, 0 or None == False). If True, im is + modified in place and None is returned, if False, a new Image object + with the transform applied is returned (and im is not changed). The + default is False. + :returns: Either None, or a new PIL Image object, depending on the value of + inPlace. The profile will be returned in the image's info['icc_profile']. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def createProfile(colorSpace, colorTemp=-1): + """ + (pyCMS) Creates a profile. + + If colorSpace not in ["LAB", "XYZ", "sRGB"], a PyCMSError is raised + + If using LAB and colorTemp != a positive integer, a PyCMSError is raised. + + If an error occurs while creating the profile, a PyCMSError is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive integer for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + raise PyCMSError( + "Color space not supported for on-the-fly profile creation (%s)" + % colorSpace) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except: + raise PyCMSError( + "Color temperature must be numeric, \"%s\" not valid" + % colorTemp) + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileName(profile): + """ + + (pyCMS) Gets the internal product name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised If an error occurs while trying to obtain the + name tag, a PyCMSError is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.product_model + manufacturer = profile.profile.product_manufacturer + + if not (model or manufacturer): + return profile.profile.product_description + "\n" + if not manufacturer or len(model) > 30: + return model + "\n" + return "%s - %s\n" % (model, manufacturer) + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileInfo(profile): + """ + (pyCMS) Gets the internal product information for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the info tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.product_description + cpright = profile.profile.product_copyright + arr = [] + for elt in (description, cpright): + if elt: + arr.append(elt) + return "\r\n\r\n".join(arr) + "\r\n\r\n" + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileCopyright(profile): + """ + (pyCMS) Gets the copyright for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the copyright tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_copyright + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileManufacturer(profile): + """ + (pyCMS) Gets the manufacturer for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + PyCMSError is raised + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_manufacturer + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileModel(profile): + """ + (pyCMS) Gets the model for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the model tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_model + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileDescription(profile): + """ + (pyCMS) Gets the description for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the description tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_description + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getDefaultIntent(profile): + """ + (pyCMS) Gets the default intent name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the default intent, a + PyCMSError is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def isIntentSupported(profile, intent, direction): + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + renderingIntent with profile, and that profile can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents... so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential PyCMSError that will occur if they don't support the modes + you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for input, + output, or proof + + INPUT = 0 (or use ImageCms.DIRECTION_INPUT) + OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT) + PROOF = 2 (or use ImageCms.DIRECTION_PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def versions(): + """ + (pyCMS) Fetches versions. + """ + + return ( + VERSION, core.littlecms_version, + sys.version.split()[0], Image.VERSION + ) + +# -------------------------------------------------------------------- + +if __name__ == "__main__": + # create a cheap manual from the __doc__ strings for the functions above + + print(__doc__) + + for f in dir(sys.modules[__name__]): + doc = None + try: + exec("doc = %s.__doc__" % (f)) + if "pyCMS" in doc: + # so we don't get the __doc__ string for imported modules + print("=" * 80) + print("%s" % f) + print(doc) + except (AttributeError, TypeError): + pass + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageColor.py b/server/www/packages/packages-darwin/x64/PIL/ImageColor.py new file mode 100644 index 0000000..fc95e6d --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageColor.py @@ -0,0 +1,279 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +import re + + +def getrgb(color): + """ + Convert a color string to an RGB tuple. If the string cannot be parsed, + this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + try: + rgb = colormap[color] + except KeyError: + try: + # fall back on case-insensitive lookup + rgb = colormap[color.lower()] + except KeyError: + rgb = None + # found color in cache + if rgb: + if isinstance(rgb, tuple): + return rgb + colormap[color] = rgb = getrgb(rgb) + return rgb + # check for known string formats + m = re.match("#\w\w\w$", color) + if m: + return ( + int(color[1]*2, 16), + int(color[2]*2, 16), + int(color[3]*2, 16) + ) + m = re.match("#\w\w\w\w\w\w$", color) + if m: + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16) + ) + m = re.match("rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)) + ) + m = re.match("rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5) + ) + m = re.match("hsl\(\s*(\d+)\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + from colorsys import hls_to_rgb + rgb = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5) + ) + m = re.match("rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", + color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)), + int(m.group(4)) + ) + raise ValueError("unknown color specifier: %r" % color) + + +def getcolor(color, mode): + """ + Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a + greyscale value if the mode is not color or a palette image. If the string + cannot be parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(graylevel [, alpha]) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + color, alpha = getrgb(color), 255 + if len(color) == 4: + color, alpha = color[0:3], color[3] + + if Image.getmodebase(mode) == "L": + r, g, b = color + color = (r*299 + g*587 + b*114)//1000 + if mode[-1] == 'A': + return (color, alpha) + else: + if mode[-1] == 'A': + return color + (alpha,) + return color + +colormap = { + # X11 colour table (from "CSS3 module: Color working draft"), with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageDraw.py b/server/www/packages/packages-darwin/x64/PIL/ImageDraw.py new file mode 100644 index 0000000..f5b72ef --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageDraw.py @@ -0,0 +1,407 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import numbers +import warnings + +from PIL import Image, ImageColor +from PIL._util import isStringType + +## +# A simple 2D drawing interface for PIL images. +#

+# Application code should use the Draw factory, instead of +# directly. + + +class ImageDraw(object): + + ## + # Create a drawing instance. + # + # @param im The image to draw in. + # @param mode Optional mode to use for color values. For RGB + # images, this argument can be RGB or RGBA (to blend the + # drawing into the image). For all other modes, this argument + # must be the same as the image mode. If omitted, the mode + # defaults to the mode of the image. + + def __init__(self, im, mode=None): + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1, mode) + else: + self.ink = self.draw.draw_ink(-1, mode) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = 0 + self.font = None + + def setink(self, ink): + raise Exception("setink() has been removed. " + + "Please use keyword arguments instead.") + + def setfill(self, onoff): + raise Exception("setfill() has been removed. " + + "Please use keyword arguments instead.") + + def setfont(self, font): + warnings.warn("setfont() is deprecated. " + + "Please set the attribute directly instead.") + # compatibility + self.font = font + + ## + # Get the current default font. + + def getfont(self): + if not self.font: + # FIXME: should add a font repository + from PIL import ImageFont + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isStringType(ink): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink) + ink = self.draw.draw_ink(ink, self.mode) + if fill is not None: + if isStringType(fill): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill) + fill = self.draw.draw_ink(fill, self.mode) + return ink, fill + + ## + # Draw an arc. + + def arc(self, xy, start, end, fill=None): + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink) + + ## + # Draw a bitmap. + + def bitmap(self, xy, bitmap, fill=None): + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + ## + # Draw a chord. + + def chord(self, xy, start, end, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_chord(xy, start, end, ink, 0) + + ## + # Draw an ellipse. + + def ellipse(self, xy, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None: + self.draw.draw_ellipse(xy, ink, 0) + + ## + # Draw a line, or a connected sequence of line segments. + + def line(self, xy, fill=None, width=0): + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_lines(xy, ink, width) + + ## + # (Experimental) Draw a shape. + + def shape(self, shape, fill=None, outline=None): + # experimental + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None: + self.draw.draw_outline(shape, ink, 0) + + ## + # Draw a pieslice. + + def pieslice(self, xy, start, end, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_pieslice(xy, start, end, ink, 0) + + ## + # Draw one or more individual pixels. + + def point(self, xy, fill=None): + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + ## + # Draw a polygon. + + def polygon(self, xy, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None: + self.draw.draw_polygon(xy, ink, 0) + + ## + # Draw a rectangle. + + def rectangle(self, xy, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None: + self.draw.draw_rectangle(xy, ink, 0) + + ## + # Draw text. + + def _multiline_check(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return text.split(split_character) + + def text(self, xy, text, fill=None, font=None, anchor=None): + if self._multiline_check(text): + return self.multiline_text(xy, text, fill, font, anchor) + + ink, fill = self._getink(fill) + if font is None: + font = self.getfont() + if ink is None: + ink = fill + if ink is not None: + try: + mask, offset = font.getmask2(text, self.fontmode) + xy = xy[0] + offset[0], xy[1] + offset[1] + except AttributeError: + try: + mask = font.getmask(text, self.fontmode) + except TypeError: + mask = font.getmask(text) + self.draw.draw_bitmap(xy, mask, ink) + + def multiline_text(self, xy, text, fill=None, font=None, anchor=None, + spacing=4, align="left"): + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + widths.append(line_width) + max_width = max(max_width, line_width) + left, top = xy + for idx, line in enumerate(lines): + if align == "left": + pass # left = x + elif align == "center": + left += (max_width - widths[idx]) / 2.0 + elif align == "right": + left += (max_width - widths[idx]) + else: + assert False, 'align must be "left", "center" or "right"' + self.text((left, top), line, fill, font, anchor) + top += line_spacing + left = xy[0] + + ## + # Get the size of a given string, in pixels. + + def textsize(self, text, font=None): + if self._multiline_check(text): + return self.multiline_textsize(text, font) + + if font is None: + font = self.getfont() + return font.getsize(text) + + def multiline_textsize(self, text, font=None, spacing=4): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + max_width = max(max_width, line_width) + return max_width, len(lines)*line_spacing + + +## +# A simple 2D drawing interface for PIL images. +# +# @param im The image to draw in. +# @param mode Optional mode to use for color values. For RGB +# images, this argument can be RGB or RGBA (to blend the +# drawing into the image). For all other modes, this argument +# must be the same as the image mode. If omitted, the mode +# defaults to the mode of the image. + +def Draw(im, mode=None): + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +## +# (Experimental) A more advanced 2D drawing interface for PIL images, +# based on the WCK interface. +# +# @param im The image to draw in. +# @param hints An optional list of hints. +# @return A (drawing context, drawing resource factory) tuple. + +def getdraw(im=None, hints=None): + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from PIL import _imagingagg as handler + except ImportError: + pass + if handler is None: + from PIL import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +## +# (experimental) Fills a bounded region with a given color. +# +# @param image Target image. +# @param xy Seed position (a 2-item coordinate tuple). +# @param value Fill color. +# @param border Optional border value. If given, the region consists of +# pixels with a color different from the border color. If not given, +# the region consists of pixels having the same color as the seed +# pixel. + +def floodfill(image, xy, value, border=None): + "Fill bounded region." + # based on an implementation by Eric S. Raymond + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if background == value: + return # seed point already has fill color + pixel[x, y] = value + except IndexError: + return # seed point outside image + edge = [(x, y)] + if border is None: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p == background: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge + else: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p != value and p != border: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageDraw2.py b/server/www/packages/packages-darwin/x64/PIL/ImageDraw2.py new file mode 100644 index 0000000..62ee116 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageDraw2.py @@ -0,0 +1,111 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageColor, ImageDraw, ImageFont, ImagePath + + +class Pen(object): + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush(object): + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font(object): + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw(object): + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + self.render("rectangle", xy, *options) + + def symbol(self, xy, symbol, *options): + raise NotImplementedError("not in this version") + + def text(self, xy, text, font): + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + return self.draw.textsize(text, font=font.font) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageEnhance.py b/server/www/packages/packages-darwin/x64/PIL/ImageEnhance.py new file mode 100644 index 0000000..56b5c01 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageEnhance.py @@ -0,0 +1,100 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFilter, ImageStat + + +class _Enhance(object): + + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + def __init__(self, image): + self.image = image + self.intermediate_mode = 'L' + if 'A' in image.getbands(): + self.intermediate_mode = 'LA' + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageFile.py b/server/www/packages/packages-darwin/x64/PIL/ImageFile.py new file mode 100644 index 0000000..9617ffb --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageFile.py @@ -0,0 +1,513 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +import io +import os +import sys +import struct + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024*1024 + +LOAD_TRUNCATED_IMAGES = False + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error" +} + + +def raise_ioerror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = "decoder error %d" % error + raise IOError(message + " when reading image file") + + +# +# -------------------------------------------------------------------- +# Helpers + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + +class ImageFile(Image.Image): + "Base class for image file format handlers." + + def __init__(self, fp=None, filename=None): + Image.Image.__init__(self) + + self.tile = None + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if isPath(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + else: + # stream + self.fp = fp + self.filename = filename + + try: + self._open() + except (IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error) as v: + raise SyntaxError(v) + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + + def draft(self, mode, size): + "Set draft mode" + + pass + + def verify(self): + "Check file integrity" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + self.fp = None + + def load(self): + "Load image data based on tile list" + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + d, e, o, a = self.tile[0] + if d == "raw" and a[0] == self.mode and a[0] in Image._MAPMODES: + try: + if hasattr(Image.core, "map"): + # use built-in mapper + self.map = Image.core.map(self.filename) + self.map.seek(o) + self.im = self.map.readimage( + self.mode, self.size, a[1], a[2] + ) + else: + # use mmap, if possible + import mmap + fp = open(self.filename, "r+") + size = os.path.getsize(self.filename) + # FIXME: on Unix, use PROT_READ etc + self.map = mmap.mmap(fp.fileno(), size) + self.im = Image.core.map_buffer( + self.map, self.size, d, e, o, a + ) + readonly = 1 + except (AttributeError, EnvironmentError, ImportError): + self.map = None + + self.load_prepare() + + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + for d, e, o, a in self.tile: + d = Image._getdecoder(self.mode, d, a, self.decoderconfig) + seek(o) + try: + d.setimage(self.im, e) + except ValueError: + continue + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error): # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated") + + if not s and not d.handles_eof: # truncated jpeg + self.tile = [] + + # JpegDecode needs to clean things up here either way + # If we don't destroy the decompressor, + # we have a memory leak. + d.cleanup() + + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated " + "(%d bytes not processed)" % len(b)) + + b = b + s + n, e = d.decode(b) + if n < 0: + break + b = b[n:] + # Need to cleanup here to prevent leaks in PyPy + d.cleanup() + + self.tile = [] + self.readonly = readonly + + self.fp = None # might be shared + + if not self.map and not LOAD_TRUNCATED_IMAGES and e < 0: + # still raised if decoder fails to return anything + raise_ioerror(e) + + # post processing + if hasattr(self, "tile_post_rotate"): + # FIXME: This is a hack to handle rotated PCD's + self.im = self.im.rotate(self.tile_post_rotate) + self.size = self.im.size + + self.load_end() + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError( + "StubImageFile subclass must implement _open" + ) + + def load(self): + loader = self._load() + if loader is None: + raise IOError("cannot find loader for this %s file" % self.format) + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + + def _load(self): + "(Hook) Find actual image loader." + raise NotImplementedError( + "StubImageFile subclass must implement _load" + ) + + +class Parser(object): + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + + In Python 2.x, this is an old-style class. + """ + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception IOError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_ioerror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + try: + fp = io.BytesIO(self.data) + im = Image.open(fp) + finally: + fp.close() # explicitly close the virtual file + except IOError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder( + im.mode, d, a, im.decoderconfig + ) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset:] + self.offset = 0 + + self.image = im + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception IOError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise IOError("image was incomplete") + if not self.image: + raise IOError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + try: + fp = io.BytesIO(self.data) + self.image = Image.open(fp) + finally: + self.image.load() + fp.close() # explicitly close the virtual file + return self.image + + +# -------------------------------------------------------------------- + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return + try: + fh = fp.fileno() + fp.flush() + except (AttributeError, io.UnsupportedOperation): + # compress to Python file-compatible object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + while True: + l, s, d = e.encode(bufsize) + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + else: + # slight speedup: compress to real file object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + s = e.encode_to_file(fh, bufsize) + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + if hasattr(fp, "flush"): + fp.flush() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing up to size bytes of data. + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + return fp.read(size) + data = [] + while size > 0: + block = fp.read(min(size, SAFEBLOCK)) + if not block: + break + data.append(block) + size -= len(block) + return b"".join(data) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageFilter.py b/server/www/packages/packages-darwin/x64/PIL/ImageFilter.py new file mode 100644 index 0000000..baa168a --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageFilter.py @@ -0,0 +1,275 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import functools + + +class Filter(object): + pass + + +class Kernel(Filter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. the default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a+b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class BuiltinFilter(Kernel): + def __init__(self): + pass + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the **rank**'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size//2, self.size//2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size*size//2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size*size-1 + + +class ModeFilter(Filter): + """ + + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(Filter): + """Gaussian blur filter. + + :param radius: Blur radius. + """ + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class UnsharpMask(Filter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1 + ) + + +class CONTOUR(BuiltinFilter): + name = "Contour" + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class DETAIL(BuiltinFilter): + name = "Detail" + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0 + ) + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1 + ) + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1 + ) + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0 + ) + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1 + ) + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1 + ) + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2 + ) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageFont.py b/server/www/packages/packages-darwin/x64/PIL/ImageFont.py new file mode 100644 index 0000000..af1166d --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageFont.py @@ -0,0 +1,437 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isDirectory, isPath +import os +import sys + + +class _imagingft_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + +try: + from PIL import _imagingft as core +except ImportError: + core = _imagingft_not_installed() + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont(object): + "PIL font wrapper" + + def _load_pilfont(self, filename): + + fp = open(filename, "rb") + + for ext in (".png", ".gif", ".pbm"): + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + raise IOError("cannot find glyph data file") + + self.file = fullname + + return self._load_pilfont_data(fp, image) + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256*20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + # delegate critical operations to internal type + self.getsize = self.font.getsize + self.getmask = self.font.getmask + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + +class FreeTypeFont(object): + "FreeType font wrapper (requires _imagingft service)" + + def __init__(self, font=None, size=10, index=0, encoding=""): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if isPath(font): + self.font = core.getfont(font, size, index, encoding) + else: + self.font_bytes = font.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes) + + def getname(self): + return self.font.family, self.font.style + + def getmetrics(self): + return self.font.ascent, self.font.descent + + def getsize(self, text): + size, offset = self.font.getsize(text) + return (size[0] + offset[0], size[1] + offset[1]) + + def getoffset(self, text): + return self.font.getsize(text)[1] + + def getmask(self, text, mode=""): + return self.getmask2(text, mode)[0] + + def getmask2(self, text, mode="", fill=Image.core.fill): + size, offset = self.font.getsize(text) + im = fill("L", size, 0) + self.font.render(text, im.id, mode == "1") + return im, offset + + def font_variant(self, font=None, size=None, index=None, encoding=None): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont(font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else + encoding) + +## +# Wrapper that creates a transposed font from any existing font +# object. +# +# @param font A font object. +# @param orientation An optional orientation. If given, this should +# be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, +# Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. + + +class TransposedFont(object): + "Wrapper for writing rotated or mirrored text" + + def __init__(self, font, orientation=None): + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text): + w, h = self.font.getsize(text) + if self.orientation in (Image.ROTATE_90, Image.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode=""): + im = self.font.getmask(text, mode) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding=""): + """ + Load a TrueType or OpenType font file, and create a font object. + This function loads a font object from the given file, and creates + a font object for a font of the given size. + + This function requires the _imagingft service. + + :param font: A truetype font file. Under Windows, if the file + is not found in this filename, the loader also looks in + Windows :file:`fonts/` directory. + :param size: The requested size, in points. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Common + encodings are "unic" (Unicode), "symb" (Microsoft + Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), + and "armn" (Apple Roman). See the FreeType documentation + for more information. + :return: A font object. + :exception IOError: If the file could not be read. + """ + + try: + return FreeTypeFont(font, size, index, encoding) + except IOError: + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ('linux', 'linux2'): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = '/usr/share' + dirs += [os.path.join(lindir, "fonts") + for lindir in lindirs.split(":")] + elif sys.platform == 'darwin': + dirs += ['/Library/Fonts', '/System/Library/Fonts', + os.path.expanduser('~/Library/Fonts')] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + return FreeTypeFont(fontpath, size, index, encoding) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == '.ttf': + return FreeTypeFont(fontpath, size, index, encoding) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return FreeTypeFont(first_font_with_a_different_extension, size, + index, encoding) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + for directory in sys.path: + if isDirectory(directory): + if not isinstance(filename, str): + if bytes is str: + filename = filename.encode("utf-8") + else: + filename = filename.decode("utf-8") + try: + return load(os.path.join(directory, filename)) + except IOError: + pass + raise IOError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + from io import BytesIO + import base64 + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO(base64.decodestring(b''' +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +''')), Image.open(BytesIO(base64.decodestring(b''' +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +''')))) + return f + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageGrab.py b/server/www/packages/packages-darwin/x64/PIL/ImageGrab.py new file mode 100644 index 0000000..febdb23 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageGrab.py @@ -0,0 +1,61 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber (OS X and Windows only) +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + +import sys +if sys.platform not in ["win32", "darwin"]: + raise ImportError("ImageGrab is OS X and Windows only") + +if sys.platform == "win32": + grabber = Image.core.grabscreen +elif sys.platform == "darwin": + import os + import tempfile + import subprocess + + +def grab(bbox=None): + if sys.platform == "darwin": + f, file = tempfile.mkstemp('.png') + os.close(f) + subprocess.call(['screencapture', '-x', file]) + im = Image.open(file) + im.load() + os.unlink(file) + else: + size, data = grabber() + im = Image.frombytes( + "RGB", size, data, + # RGB, 32-bit line padding, origo in lower left corner + "raw", "BGR", (size[0]*3 + 3) & -4, -1 + ) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + raise NotImplementedError("Method is not implemented on OS X") + debug = 0 # temporary interface + data = Image.core.grabclipboard(debug) + if isinstance(data, bytes): + from PIL import BmpImagePlugin + import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) + return data diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageMath.py b/server/www/packages/packages-darwin/x64/PIL/ImageMath.py new file mode 100644 index 0000000..f92d500 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageMath.py @@ -0,0 +1,270 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import _imagingmath + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +VERBOSE = 0 + + +def _isconstant(v): + return isinstance(v, int) or isinstance(v, float) + + +class _Operand(object): + # wraps an image operand, providing standard operators + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError("unsupported mode: %s" % im1.im.mode) + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.mode != im2.mode: + raise ValueError("mode mismatch") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), + min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, size, None) + else: + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + if bytes is str: + # Provide __nonzero__ for pre-Py3k + __nonzero__ = __bool__ + del __bool__ + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + if bytes is str: + # Provide __div__ and __rdiv__ for pre-Py3k + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + del __truediv__ + del __rtruediv__ + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = builtins.eval(expression, args) + try: + return out.im + except AttributeError: + return out diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageMode.py b/server/www/packages/packages-darwin/x64/PIL/ImageMode.py new file mode 100644 index 0000000..d896001 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageMode.py @@ -0,0 +1,52 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# mode descriptor cache +_modes = {} + + +## +# Wrapper for mode strings. + +class ModeDescriptor(object): + + def __init__(self, mode, bands, basemode, basetype): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + + def __str__(self): + return self.mode + + +## +# Gets a mode descriptor for the given mode. + +def getmode(mode): + if not _modes: + # initialize mode cache + from PIL import Image + # core modes + for m, (basemode, basetype, bands) in Image._MODEINFO.items(): + _modes[m] = ModeDescriptor(m, bands, basemode, basetype) + # extra experimental modes + _modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") + _modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") + # mapping modes + _modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L") + _modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L") + _modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L") + return _modes[mode] diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageMorph.py b/server/www/packages/packages-darwin/x64/PIL/ImageMorph.py new file mode 100644 index 0000000..902ed8d --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageMorph.py @@ -0,0 +1,251 @@ +# A binary morphology add-on for the Python Imaging Library +# +# History: +# 2014-06-04 Initial version. +# +# Copyright (c) 2014 Dov Grobgeld + +from PIL import Image +from PIL import _imagingmorph +import re + +LUT_SIZE = 1 << 9 + + +class LutBuilder(object): + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + 'corner': ['1:(... ... ...)->0', + '4:(00. 01. ...)->1'], + 'dilation4': ['4:(... .0. .1.)->1'], + 'dilation8': ['4:(... .0. .1.)->1', + '4:(... .0. ..1)->1'], + 'erosion4': ['4:(... .1. .0.)->0'], + 'erosion8': ['4:(... .1. .0.)->0', + '4:(... .1. ..0)->0'], + 'edge': ['1:(... ... ...)->0', + '4:(.0. .1. ...)->1', + '4:(01. .1. ...)->1'] + } + if op_name not in known_patterns: + raise Exception('Unknown pattern '+op_name+'!') + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray([symbols[(i & m) > 0] for i in range(LUT_SIZE)]) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert(len(permutation) == 9) + return ''.join([pattern[p] for p in permutation]) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if '4' in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], [6, 3, 0, + 7, 4, 1, + 8, 5, 2]), res)) + # mirror + if 'M' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + patterns.append( + (self._string_permute(pattern, [2, 1, 0, + 5, 4, 3, + 8, 7, 6]), res)) + + # negate + if 'N' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + # Swap 0 and 1 + pattern = (pattern + .replace('0', 'Z') + .replace('1', '0') + .replace('Z', '1')) + res = '%d' % (1-int(res)) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search( + r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', '')) + if not m: + raise Exception('Syntax error in pattern "'+p+'"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(' ', '').replace('\n', '') + + patterns += self._pattern_permute(pattern, options, result) + +# # Debugging +# for p,r in patterns: +# print p,r +# print '--' + + # compile the patterns into regular expressions for speed + for i in range(len(patterns)): + p = patterns[i][0].replace('.', 'X').replace('X', '[01]') + p = re.compile(p) + patterns[i] = (p, patterns[i][1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp(object): + """A class for binary morphological operators""" + + def __init__(self, + lut=None, + op_name=None, + patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply( + bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, 'rb') as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != 8192: + self.lut = None + raise Exception('Wrong size operator file!') + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception('No operator loaded') + with open(filename, 'wb') as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageOps.py b/server/www/packages/packages-darwin/x64/PIL/ImageOps.py new file mode 100644 index 0000000..f317645 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageOps.py @@ -0,0 +1,461 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isStringType +import operator +import functools + + +# +# helpers + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isStringType(color): + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise IOError("not supported for this image mode") + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image, removes **cutoff** percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: How many percent to cut off from the histogram. + :param ignore: The background pixel value (use None for no background). + :return: An image. + """ + histogram = image.histogram() + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer:layer+256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the hi end + cut = n * cutoff // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white): + """ + Colorize grayscale image. The **black** and **white** + arguments should be RGB tuples; this function calculates a color + wedge mapping all black pixels in the source image to the first + color, and all white pixels to the second color. + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :return: An image. + """ + assert image.mode == "L" + black = _color(black, "RGB") + white = _color(white, "RGB") + red = [] + green = [] + blue = [] + for i in range(256): + red.append(black[0]+i*(white[0]-black[0])//255) + green.append(black[1]+i*(white[1]-black[1])//255) + blue.append(black[2]+i*(white[2]-black[2])//255) + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop( + (left, top, image.size[0]-right, image.size[1]-bottom) + ) + + +def deform(image, deformer, resample=Image.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + **getmesh** method can be used. + :param resample: What resampling filter to use. + :return: An image. + """ + return image.transform( + image.size, Image.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b:b+256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i+b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + out = Image.new(image.mode, (width, height), _color(fill, image.mode)) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a sized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.NEAREST`. + :param bleed: Remove a border around the outside of the image (from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # http://www.cazabon.com + + # ensure inputs are valid + if not isinstance(centering, list): + centering = [centering[0], centering[1]] + + if centering[0] > 1.0 or centering[0] < 0.0: + centering[0] = 0.50 + if centering[1] > 1.0 or centering[1] < 0.0: + centering[1] = 0.50 + + if bleed > 0.49999 or bleed < 0.0: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleedPixels = ( + int((float(bleed) * float(image.size[0])) + 0.5), + int((float(bleed) * float(image.size[1])) + 0.5) + ) + + liveArea = (0, 0, image.size[0], image.size[1]) + if bleed > 0.0: + liveArea = ( + bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1, + image.size[1] - bleedPixels[1] - 1 + ) + + liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1]) + + # calculate the aspect ratio of the liveArea + liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1]) + + # calculate the aspect ratio of the output image + aspectRatio = float(size[0]) / float(size[1]) + + # figure out if the sides or top/bottom will be cropped off + if liveAreaAspectRatio >= aspectRatio: + # liveArea is wider than what's needed, crop the sides + cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5) + cropHeight = liveSize[1] + else: + # liveArea is taller than what's needed, crop the top and bottom + cropWidth = liveSize[0] + cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5) + + # make the crop + leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0])) + if leftSide < 0: + leftSide = 0 + topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1])) + if topSide < 0: + topSide = 0 + + out = image.crop( + (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight) + ) + + # resize the image and return it + return out.resize(size, method) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255-i) + return _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2**(8-bits)-1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255-i) + return _lut(image, lut) + + +# -------------------------------------------------------------------- +# PIL USM components, from Kevin Cazabon. + +def gaussian_blur(im, radius=None): + """ PIL_usm.gblur(im, [radius])""" + + if radius is None: + radius = 5.0 + + im.load() + + return im.im.gaussian_blur(radius) + +gblur = gaussian_blur + + +def unsharp_mask(im, radius=None, percent=None, threshold=None): + """ PIL_usm.usm(im, [radius, percent, threshold])""" + + if radius is None: + radius = 5.0 + if percent is None: + percent = 150 + if threshold is None: + threshold = 3 + + im.load() + + return im.im.unsharp_mask(radius, percent, threshold) + +usm = unsharp_mask + + +def box_blur(image, radius): + """ + Blur the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param image: The image to blur. + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + :return: An image. + """ + image.load() + + return image._new(image.im.box_blur(radius)) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImagePalette.py b/server/www/packages/packages-darwin/x64/PIL/ImagePalette.py new file mode 100644 index 0000000..fdc5a46 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImagePalette.py @@ -0,0 +1,237 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array +from PIL import ImageColor + + +class ImagePalette(object): + """ + Color palette for palette mapped images + + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or list(range(256))*len(self.mode) + self.colors = {} + self.dirty = None + if ((size == 0 and len(self.mode)*256 != len(self.palette)) or + (size != 0 and size != len(self.palette))): + raise ValueError("wrong palette size") + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable # for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode + ";L", self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + if hasattr(arr, 'tobytes'): + return arr.tobytes() + return arr.tostring() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + try: + return self.colors[color] + except KeyError: + # allocate new color slot + if isinstance(self.palette, bytes): + self.palette = [int(x) for x in self.palette] + index = len(self.colors) + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") + self.colors[color] = index + self.palette[index] = color[0] + self.palette[index+256] = color[1] + self.palette[index+512] = color[2] + self.dirty = 1 + return index + else: + raise ValueError("unknown color specifier: %r" % color) + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write("# Mode: %s\n" % self.mode) + for i in range(256): + fp.write("%d" % i) + for j in range(i*len(self.mode), (i+1)*len(self.mode)): + try: + fp.write(" %d" % self.palette[j]) + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white*i//255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256)) + palette.reverse() + return ImagePalette(mode, palette * len(mode)) + + +def random(mode="RGB"): + from random import randint + palette = [] + for i in range(256*len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + r, g, b = ImageColor.getrgb(white) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) + return ImagePalette("RGB", r + g + b) + + +def wedge(mode="RGB"): + return ImagePalette(mode, list(range(256)) * len(mode)) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + fp = open(filename, "rb") + + lut = None + + if not lut: + try: + from PIL import GimpPaletteFile + fp.seek(0) + p = GimpPaletteFile.GimpPaletteFile(fp) + lut = p.getpalette() + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + + if not lut: + try: + from PIL import GimpGradientFile + fp.seek(0) + p = GimpGradientFile.GimpGradientFile(fp) + lut = p.getpalette() + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + + if not lut: + try: + from PIL import PaletteFile + fp.seek(0) + p = PaletteFile.PaletteFile(fp) + lut = p.getpalette() + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + + if not lut: + raise IOError("cannot load palette") + + return lut # data, rawmode diff --git a/server/www/packages/packages-darwin/x64/PIL/ImagePath.py b/server/www/packages/packages-darwin/x64/PIL/ImagePath.py new file mode 100644 index 0000000..f23d014 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImagePath.py @@ -0,0 +1,66 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +# the Python class below is overridden by the C implementation. + + +class Path(object): + + def __init__(self, xy): + pass + + ## + # Compacts the path, by removing points that are close to each + # other. This method modifies the path in place. + + def compact(self, distance=2): + pass + + ## + # Gets the bounding box. + + def getbbox(self): + pass + + ## + # Maps the path through a function. + + def map(self, function): + pass + + ## + # Converts the path to Python list. + # + # @param flat By default, this function returns a list of 2-tuples + # [(x, y), ...]. If this argument is true, it returns a flat + # list [x, y, ...] instead. + # @return A list of coordinates. + + def tolist(self, flat=0): + pass + + ## + # Transforms the path. + + def transform(self, matrix): + pass + + +# override with C implementation +Path = Image.core.path diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageQt.py b/server/www/packages/packages-darwin/x64/PIL/ImageQt.py new file mode 100644 index 0000000..aece9d6 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageQt.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +from io import BytesIO + +qt_is_installed = True +qt_version = None +try: + from PyQt5.QtGui import QImage, qRgba, QPixmap + from PyQt5.QtCore import QBuffer, QIODevice + qt_version = '5' +except ImportError: + try: + from PyQt4.QtGui import QImage, qRgba, QPixmap + from PyQt4.QtCore import QBuffer, QIODevice + qt_version = '4' + except ImportError: + try: + from PySide.QtGui import QImage, qRgba, QPixmap + from PySide.QtCore import QBuffer, QIODevice + qt_version = 'side' + except ImportError: + qt_is_installed = False + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return (qRgba(r, g, b, a) & 0xffffffff) + + +# :param im A PIL Image object, or a file name +# (given either as Python string or a PyQt string object) + +def fromqimage(im): + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, 'png') + else: + im.save(buffer, 'ppm') + + b = BytesIO() + try: + b.write(buffer.data()) + except TypeError: + # workaround for Python 2 + b.write(str(buffer.data())) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = { + '1': 1, + 'L': 8, + 'P': 8, + }[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding) + + return b''.join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + if str is bytes: + im = unicode(im.toUtf8(), "utf-8") + else: + im = str(im.toUtf8(), "utf-8") + if isPath(im): + im = Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i:i+3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + try: + data = im.tobytes("raw", "BGRA") + except SystemError: + # workaround for earlier versions + r, g, b, a = im.split() + im = Image.merge("RGBA", (b, g, r, a)) + format = QImage.Format_ARGB32 + else: + raise ValueError("unsupported image mode %r" % im.mode) + + # must keep a reference, or Qt will crash! + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return { + 'data': __data, 'im': im, 'format': format, 'colortable': colortable + } + +## +# An PIL image wrapper for Qt. This is a subclass of PyQt's QImage +# class. +# +# @param im A PIL Image object, or a file name (given either as Python +# string or a PyQt string object). + +if qt_is_installed: + class ImageQt(QImage): + + def __init__(self, im): + im_data = _toqclass_helper(im) + QImage.__init__(self, + im_data['data'], im_data['im'].size[0], + im_data['im'].size[1], im_data['format']) + if im_data['colortable']: + self.setColorTable(im_data['colortable']) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == 'RGB': + im = im.convert('RGBA') + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageSequence.py b/server/www/packages/packages-darwin/x64/PIL/ImageSequence.py new file mode 100644 index 0000000..256bcbe --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageSequence.py @@ -0,0 +1,42 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator(object): + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + + def __getitem__(self, ix): + try: + if ix: + self.im.seek(ix) + return self.im + except EOFError: + raise IndexError # end of sequence diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageShow.py b/server/www/packages/packages-darwin/x64/PIL/ImageShow.py new file mode 100644 index 0000000..51417c3 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageShow.py @@ -0,0 +1,179 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image +import os +import sys + +if sys.version_info >= (3, 3): + from shlex import quote +else: + from pipes import quote + +_viewers = [] + + +def register(viewer, order=1): + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + elif order < 0: + _viewers.insert(0, viewer) + + +## +# Displays a given image. +# +# @param image An image object. +# @param title Optional title. Not all viewers can display the title. +# @param **options Additional viewer options. +# @return True if a suitable viewer was found, false otherwise. + +def show(image, title=None, **options): + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return 1 + return 0 + + +## +# Base class for viewers. + +class Viewer(object): + + # main api + + def show(self, image, **options): + + # save temporary image to disk + if image.mode[:4] == "I;16": + # @PIL88 @PIL101 + # "I;16" isn't an 'official' mode, but we still want to + # provide a simple way to show 16-bit images. + base = "L" + # FIXME: auto-contrast if max() > 255? + else: + base = Image.getmodebase(image.mode) + if base != image.mode and image.mode != "1": + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + + def get_format(self, image): + # return format name, or None to save as PGM/PPM + return self.format + + def get_command(self, file, **options): + raise NotImplementedError + + def save_image(self, image): + # save to temporary file, and return filename + return image._dump(format=self.get_format(image)) + + def show_image(self, image, **options): + # display given image + return self.show_file(self.save_image(image), **options) + + def show_file(self, file, **options): + # display given file + os.system(self.get_command(file, **options)) + return 1 + +# -------------------------------------------------------------------- + +if sys.platform == "win32": + + class WindowsViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + return ('start "Pillow" /WAIT "%s" ' + '&& ping -n 2 127.0.0.1 >NUL ' + '&& del /f "%s"' % (file, file)) + + register(WindowsViewer) + +elif sys.platform == "darwin": + + class MacViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a /Applications/Preview.app" + command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), + quote(file)) + return command + + register(MacViewer) + +else: + + # unixoids + + def which(executable): + path = os.environ.get("PATH") + if not path: + return None + for dirname in path.split(os.pathsep): + filename = os.path.join(dirname, executable) + if os.path.isfile(filename): + # FIXME: make sure it's executable + return filename + return None + + class UnixViewer(Viewer): + def show_file(self, file, **options): + command, executable = self.get_command_ex(file, **options) + command = "(%s %s; rm -f %s)&" % (command, quote(file), + quote(file)) + os.system(command) + return 1 + + # implementations + + class DisplayViewer(UnixViewer): + def get_command_ex(self, file, **options): + command = executable = "display" + return command, executable + + if which("display"): + register(DisplayViewer) + + class XVViewer(UnixViewer): + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += " -name %s" % quote(title) + return command, executable + + if which("xv"): + register(XVViewer) + +if __name__ == "__main__": + # usage: python ImageShow.py imagefile [title] + print(show(Image.open(sys.argv[1]), *sys.argv[2:])) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageStat.py b/server/www/packages/packages-darwin/x64/PIL/ImageStat.py new file mode 100644 index 0000000..f3c138b --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageStat.py @@ -0,0 +1,147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import math +import operator +import functools + + +class Stat(object): + + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + "Calculate missing attribute" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + "Get min/max values for each band in the image" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + "Get total number of pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i:i+256])) + return v + + def _getsum(self): + "Get sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + layerSum = 0.0 + for j in range(256): + layerSum += j * self.h[i + j] + v.append(layerSum) + return v + + def _getsum2(self): + "Get squared sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j ** 2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + "Get average pixel level for each layer" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + "Get median pixel level for each layer" + + v = [] + for i in self.bands: + s = 0 + l = self.count[i]//2 + b = i * 256 + for j in range(256): + s = s + self.h[b+j] + if s > l: + break + v.append(j) + return v + + def _getrms(self): + "Get RMS for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + "Get variance for each layer" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n) + return v + + def _getstddev(self): + "Get standard deviation for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + +Global = Stat # compatibility diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageTk.py b/server/www/packages/packages-darwin/x64/PIL/ImageTk.py new file mode 100644 index 0000000..68d388e --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageTk.py @@ -0,0 +1,292 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +try: + import tkinter +except ImportError: + import Tkinter + tkinter = Tkinter + del Tkinter + +from PIL import Image + + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data="PIL:%d" % im.im.id) + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +# -------------------------------------------------------------------- +# PhotoImage + +class PhotoImage(object): + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the **file** or **data** options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + if "file" in kw: + image = Image.open(kw["file"]) + del kw["file"] + elif "data" in kw: + from io import BytesIO + image = Image.open(BytesIO(kw["data"])) + del kw["data"] + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and lower pixel + coordinate. If None is given instead of a tuple, all of + the image is assumed. + """ + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + tk = self.__photo.tk + + try: + tk.call("PyImagingPhoto", self.__photo, block.id) + except tkinter.TclError: + # activate Tkinter hook + try: + from PIL import _imagingtk + try: + _imagingtk.tkinit(tk.interpaddr(), 1) + except AttributeError: + _imagingtk.tkinit(id(tk), 0) + tk.call("PyImagingPhoto", self.__photo, block.id) + except (ImportError, AttributeError, tkinter.TclError): + raise # configuration problem; cannot attach to Tkinter + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage(object): + """ + + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is **foreground**, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + if "file" in kw: + image = Image.open(kw["file"]) + del kw["file"] + elif "data" in kw: + from io import BytesIO + image = Image.open(BytesIO(kw["data"])) + del kw["data"] + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = "PIL:%d" % image.im.id + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """Copies the contents of a PhotoImage to a PIL image memory.""" + photo.tk.call("PyImagingPhotoGet", photo) + + +# -------------------------------------------------------------------- +# Helper for the Image.show method. + +def _show(image, title): + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + tkinter.Label.__init__(self, master, image=self.image, + bg="black", bd=0) + + if not tkinter._default_root: + raise IOError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageTransform.py b/server/www/packages/packages-darwin/x64/PIL/ImageTransform.py new file mode 100644 index 0000000..81f9050 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageTransform.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +## +# Define an affine image transform. +#

+# This function takes a 6-tuple (a, b, c, d, e, f) which +# contain the first two rows from an affine transform matrix. For +# each pixel (x, y) in the output image, the new value is +# taken from a position (a x + b y + c, +# d x + e y + f) in the input image, rounded to +# nearest pixel. +#

+# This function can be used to scale, translate, rotate, and shear the +# original image. +# +# @def AffineTransform(matrix) +# @param matrix A 6-tuple (a, b, c, d, e, f) containing +# the first two rows from an affine transform matrix. +# @see Image#Image.transform + + +class AffineTransform(Transform): + method = Image.AFFINE + + +## +# Define a transform to extract a subregion from an image. +#

+# Maps a rectangle (defined by two corners) from the image to a +# rectangle of the given size. The resulting image will contain +# data sampled from between the corners, such that (x0, y0) +# in the input image will end up at (0,0) in the output image, +# and (x1, y1) at size. +#

+# This method can be used to crop, stretch, shrink, or mirror an +# arbitrary rectangle in the current image. It is slightly slower than +# crop, but about as fast as a corresponding resize +# operation. +# +# @def ExtentTransform(bbox) +# @param bbox A 4-tuple (x0, y0, x1, y1) which specifies +# two points in the input image's coordinate system. +# @see Image#Image.transform + +class ExtentTransform(Transform): + method = Image.EXTENT + + +## +# Define an quad image transform. +#

+# Maps a quadrilateral (a region defined by four corners) from the +# image to a rectangle of the given size. +# +# @def QuadTransform(xy) +# @param xy An 8-tuple (x0, y0, x1, y1, x2, y2, y3, y3) which +# contain the upper left, lower left, lower right, and upper right +# corner of the source quadrilateral. +# @see Image#Image.transform + +class QuadTransform(Transform): + method = Image.QUAD + + +## +# Define an mesh image transform. A mesh transform consists of one +# or more individual quad transforms. +# +# @def MeshTransform(data) +# @param data A list of (bbox, quad) tuples. +# @see Image#Image.transform + +class MeshTransform(Transform): + method = Image.MESH diff --git a/server/www/packages/packages-darwin/x64/PIL/ImageWin.py b/server/www/packages/packages-darwin/x64/PIL/ImageWin.py new file mode 100644 index 0000000..58894d6 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImageWin.py @@ -0,0 +1,239 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class HDC(object): + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND(object): + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib(object): + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + **palette** method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use the + :py:meth:`CDC.GetHandleAttrib` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. If None is given instead of a + tuple, all of the image is assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from tobytes) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + def fromstring(self, *args, **kw): + raise Exception("fromstring() has been removed. " + + "Please use frombytes() instead.") + + def tostring(self, *args, **kw): + raise Exception("tostring() has been removed. " + + "Please use tobytes() instead.") + + +## +# Create a Window with the given title size. + +class Window(object): + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +## +# Create an image window which displays the given image. + +class ImageWindow(Window): + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + Window.__init__(self, title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/server/www/packages/packages-darwin/x64/PIL/ImtImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..63e8924 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/ImtImagePlugin.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from PIL import Image, ImageFile + +__version__ = "0.2" + + +# +# -------------------------------------------------------------------- + +field = re.compile(br"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + xsize = ysize = 0 + + while True: + + s = self.fp.read(1) + if not s: + break + + if s == b'\x0C': + + # image data begins + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), + (self.mode, 0, 1))] + + break + + else: + + # read key/value pair + # FIXME: dangerous, may read whole file + s = s + self.fp.readline() + if len(s) == 1 or len(s) > 100: + break + if s[0] == b"*": + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == "width": + xsize = int(v) + self.size = xsize, ysize + elif k == "height": + ysize = int(v) + self.size = xsize, ysize + elif k == "pixel" and v == "n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/server/www/packages/packages-darwin/x64/PIL/IptcImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..56d1de4 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/IptcImagePlugin.py @@ -0,0 +1,267 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image, ImageFile, _binary +import os +import tempfile + +__version__ = "0.3" + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be +o8 = _binary.o8 + +COMPRESSION = { + 1: "raw", + 5: "jpeg" +} + +PAD = o8(0) * 4 + + +# +# Helpers + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=' ') + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = i8(s[1]), i8(s[2]) + + # syntax + if i8(s[0]) != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = i8(s[3]) + if size > 132: + raise IOError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size-128)) + else: + size = i16(s[3:]) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in list(self.info.keys()): + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # print tag, self.info[tag] + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0])-1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self.size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError: + raise IOError("Unknown IPTC image compression") + + # tile + if tag == (8, 10): + self.tile = [("iptc", (compression, offset), + (0, 0, self.size[0], self.size[1]))] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + try: + # fast + self.im = Image.core.open_ppm(outfile) + except: + # slightly slower + im = Image.open(outfile) + im.load() + self.im = im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +## +# Get IPTC information from TIFF, JPEG, or IPTC file. +# +# @param im An image containing IPTC data. +# @return A dictionary containing IPTC information, or None if +# no IPTC information block was found. + +def getiptcinfo(im): + + from PIL import TiffImagePlugin, JpegImagePlugin + import io + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + try: + app = im.app["APP13"] + if app[:14] == b"Photoshop 3.0\x00": + app = app[14:] + # parse the image resource block + offset = 0 + while app[offset:offset+4] == b"8BIM": + offset += 4 + # resource code + code = i16(app, offset) + offset += 2 + # resource name (usually empty) + name_len = i8(app[offset]) + # name = app[offset+1:offset+1+name_len] + offset = 1 + offset + name_len + if offset & 1: + offset += 1 + # resource data block + size = i32(app, offset) + offset += 4 + if code == 0x0404: + # 0x0404 contains IPTC/NAA data + data = app[offset:offset+size] + break + offset = offset + size + if offset & 1: + offset += 1 + except (AttributeError, KeyError): + pass + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage(object): + pass + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/server/www/packages/packages-darwin/x64/PIL/Jpeg2KImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..b82acdd --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,276 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +from PIL import Image, ImageFile +import struct +import os +import io + +__version__ = "0.1" + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack('>H', hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \ + xtosiz, ytosiz, csiz \ + = struct.unpack('>HHIIIIIIIIH', siz[:38]) + ssiz = [None]*csiz + xrsiz = [None]*csiz + yrsiz = [None]*csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] \ + = struct.unpack('>BBB', siz[36 + 3 * i:39 + 3 * i]) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7f) > 8: + mode = 'I;16' + else: + mode = 'L' + elif csiz == 2: + mode = 'LA' + elif csiz == 3: + mode = 'RGB' + elif csiz == 4: + mode = 'RGBA' + else: + mode = None + + return (size, mode) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count and + color space information, returning a PIL (size, mode) tuple.""" + + # Find the JP2 header box + header = None + while True: + lbox, tbox = struct.unpack('>I4s', fp.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', fp.read(8))[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen: + raise SyntaxError('Invalid JP2 header length') + + if tbox == b'jp2h': + header = fp.read(lbox - hlen) + break + else: + fp.seek(lbox - hlen, os.SEEK_CUR) + + if header is None: + raise SyntaxError('could not find JP2 header') + + size = None + mode = None + bpc = None + + hio = io.BytesIO(header) + while True: + lbox, tbox = struct.unpack('>I4s', hio.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', hio.read(8))[0] + hlen = 16 + else: + hlen = 8 + + content = hio.read(lbox - hlen) + + if tbox == b'ihdr': + height, width, nc, bpc, c, unkc, ipr \ + = struct.unpack('>IIHBBBB', content) + size = (width, height) + if unkc: + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif tbox == b'colr': + meth, prec, approx = struct.unpack('>BBB', content[:3]) + if meth == 1: + cs = struct.unpack('>I', content[3:7])[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif cs == 17: # grayscale + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + break + elif cs == 18: # sYCC + if nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + + return (size, mode) + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b'\xff\x4f\xff\x51': + self.codec = "j2k" + self.size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + self.codec = "jp2" + self.size, self.mode = _parse_jp2_header(self.fp) + else: + raise SyntaxError('not a JPEG 2000 file') + + if self.size is None or self.mode is None: + raise SyntaxError('unable to determine size/mode') + + self.reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, 2) + length = self.fp.tell() + self.fp.seek(pos, 0) + except: + length = -1 + + self.tile = [('jpeg2k', (0, 0) + self.size, 0, + (self.codec, self.reduce, self.layers, fd, length))] + + def load(self): + if self.reduce: + power = 1 << self.reduce + adjust = power >> 1 + self.size = (int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power)) + + if self.tile: + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return (prefix[:4] == b'\xff\x4f\xff\x51' or + prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + + +# ------------------------------------------------------------ +# Save support + +def _save(im, fp, filename): + if filename.endswith('.j2k'): + kind = 'j2k' + else: + kind = 'jp2' + + # Get the keyword arguments + info = im.encoderinfo + + offset = info.get('offset', None) + tile_offset = info.get('tile_offset', None) + tile_size = info.get('tile_size', None) + quality_mode = info.get('quality_mode', 'rates') + quality_layers = info.get('quality_layers', None) + num_resolutions = info.get('num_resolutions', 0) + cblk_size = info.get('codeblock_size', None) + precinct_size = info.get('precinct_size', None) + irreversible = info.get('irreversible', False) + progression = info.get('progression', 'LRCP') + cinema_mode = info.get('cinema_mode', 'no') + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + fd + ) + + ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)]) + +# ------------------------------------------------------------ +# Registry stuff + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extension(Jpeg2KImageFile.format, '.jp2') +Image.register_extension(Jpeg2KImageFile.format, '.j2k') +Image.register_extension(Jpeg2KImageFile.format, '.jpc') +Image.register_extension(Jpeg2KImageFile.format, '.jpf') +Image.register_extension(Jpeg2KImageFile.format, '.jpx') +Image.register_extension(Jpeg2KImageFile.format, '.j2c') + +Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') +Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') diff --git a/server/www/packages/packages-darwin/x64/PIL/JpegImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..beda4f7 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/JpegImagePlugin.py @@ -0,0 +1,753 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import array +import struct +import io +import warnings +from struct import unpack_from +from PIL import Image, ImageFile, TiffImagePlugin, _binary +from PIL.JpegPresets import presets +from PIL._util import isStringType + +i8 = _binary.i8 +o8 = _binary.o8 +i16 = _binary.i16be +i32 = _binary.i32be + +__version__ = "0.6" + + +# +# Parser + +def Skip(self, marker): + n = i16(self.fp.read(2))-2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = i8(s[7]) + jfif_density = i16(s, 8), i16(s, 10) + except: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + # extract Exif information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = i8(s[1]) + except: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + self.size = i16(s[3:]), i16(s[1:]) + + self.bits = i8(s[0]) + if self.bits != 8: + raise SyntaxError("cannot handle %d-bit layers" % self.bits) + + self.layers = i8(s[5]) + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError("cannot handle %d-layer images" % self.layers) + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if i8(self.icclist[0][13]) == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = None + + for i in range(6, len(s), 3): + t = s[i:i+3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2]))) + + +def DQT(self, marker): + # + # Define quantization table. Support baseline 8-bit tables + # only. Note that there might be more than one table in + # each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + if len(s) < 65: + raise SyntaxError("bad quantization table marker") + v = i8(s[0]) + if v//16 == 0: + self.quantization[v & 15] = array.array("b", s[1:65]) + s = s[65:] + else: + return # FIXME: add code to read 16-bit tables! + # raise SyntaxError, "bad quantization table element size" + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM) +} + + +def _accept(prefix): + return prefix[0:1] == b"\377" + + +## +# Image plugin for JPEG and JFIF images. + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(1) + + if i8(s) != 255: + raise SyntaxError("not a JPEG file") + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = i8(s) + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + # print hex(i), name, description + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, + (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + else: + raise SyntaxError("no marker found") + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + d, e, o, a = self.tile[0] + scale = 0 + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = max(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1] + self.size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + return self + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + import subprocess + import tempfile + import os + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + self.im = Image.core.open_ppm(path) + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self.size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + +def _fixup_dict(src_dict): + # Helper function for _getexif() + # returns a dict with any single item tuples/lists as individual values + def _fixup(value): + try: + if len(value) == 1 and type(value) != type({}): + return value[0] + except: pass + return value + + return dict([(k, _fixup(v)) for k, v in src_dict.items()]) + + +def _getexif(self): + # Extract EXIF information. This method is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + try: + data = self.info["exif"] + except KeyError: + return None + file = io.BytesIO(data[6:]) + head = file.read(8) + # process dictionary + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif = dict(_fixup_dict(info)) + # get exif extension + try: + # exif field 0x8769 is an offset pointer to the location + # of the nested embedded exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8769]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif.update(_fixup_dict(info)) + # get gpsinfo extension + try: + # exif field 0x8825 is an offset pointer to the location + # of the nested embedded gps exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8825]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif[0x8825] = _fixup_dict(info) + + return exif + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plug-in. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + info.load(file_contents) + mp = dict(info) + except: + raise SyntaxError("malformed MP Index (unreadable directory)") + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError: + raise SyntaxError("malformed MP Index (no number of images)") + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = unpack_from( + '{0}LLLHH'.format(endianness), rawmpentries, entrynum * 16) + labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', + 'EntryNo2') + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + 'DependentParentImageFlag': bool(mpentry['Attribute'] & + (1 << 31)), + 'DependentChildImageFlag': bool(mpentry['Attribute'] & + (1 << 30)), + 'RepresentativeImageFlag': bool(mpentry['Attribute'] & + (1 << 29)), + 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, + 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, + 'MPType': mpentry['Attribute'] & 0x00FFFFFF + } + if mpentryattr['ImageDataFormat'] == 0: + mpentryattr['ImageDataFormat'] = 'JPEG' + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: 'Undefined', + 0x010001: 'Large Thumbnail (VGA Equivalent)', + 0x010002: 'Large Thumbnail (Full HD Equivalent)', + 0x020001: 'Multi-Frame Image (Panorama)', + 0x020002: 'Multi-Frame Image: (Disparity)', + 0x020003: 'Multi-Frame Image: (Multi-Angle)', + 0x030000: 'Baseline MP Primary Image' + } + mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], + 'Unknown') + mpentry['Attribute'] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError: + raise SyntaxError("malformed MP Index (bad MP Entry)") + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBA": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63) + +samplings = {(1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, + } + + +def convert_dict_qtables(qtables): + qtables = [qtables[key] for key in range(len(qtables)) if key in qtables] + for idx, table in enumerate(qtables): + qtables[idx] = [table[i] for i in zigzag_index] + return qtables + + +def get_sampling(im): + # There's no subsampling when image have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, 'layers') or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + + try: + rawmode = RAWMODE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as JPEG" % im.mode) + + info = im.encoderinfo + + dpi = info.get("dpi", (0, 0)) + + quality = info.get("quality", 0) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = 0 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = 0 + subsampling = preset.get('subsampling', -1) + qtables = preset.get('quantization') + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get('subsampling', -1) + if isStringType(qtables) and qtables in presets: + qtables = presets[qtables].get('quantization') + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:1:1": + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isStringType(qtables): + try: + lines = [int(num) for line in qtables.splitlines() + for num in line.split('#', 1)[0].split()] + except ValueError: + raise ValueError("Invalid quantization table") + else: + qtables = [lines[s:s+64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = convert_dict_qtables(qtables) + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise + table = array.array('b', table) + except TypeError: + raise ValueError("Invalid quantization table") + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = b"" + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + + o8(len(markers)) + marker) + i += 1 + + # get keyword arguments + im.encoderconfig = ( + quality, + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + "progressive" in info or "progression" in info, + info.get("smooth", 0), + "optimize" in info, + info.get("streamtype", 0), + dpi[0], dpi[1], + subsampling, + qtables, + extra, + info.get("exif", b"") + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pizel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if "optimize" in info or "progressive" in info or "progression" in info: + # keep sets quality to 0, but the actual value may be high. + if quality >= 95 or quality == 0: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The exif info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5) + + ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + import os + import subprocess + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + im = MpoImageFile(fp, filename) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn("Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file") + return im + + +# -------------------------------------------------------------------q- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extension(JpegImageFile.format, ".jfif") +Image.register_extension(JpegImageFile.format, ".jpe") +Image.register_extension(JpegImageFile.format, ".jpg") +Image.register_extension(JpegImageFile.format, ".jpeg") + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/server/www/packages/packages-darwin/x64/PIL/JpegPresets.py b/server/www/packages/packages-darwin/x64/PIL/JpegPresets.py new file mode 100644 index 0000000..c5a36b9 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/JpegPresets.py @@ -0,0 +1,241 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. + +More presets can be added to the presets dict if needed. + +Can be use when saving JPEG file. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:1:1 (or 4:2:0?). + +You can get the subsampling of a JPEG with the +`JpegImagePlugin.get_subsampling(im)` function. + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The tables format between im.quantization and quantization in presets differ in +3 ways: + +1. The base container of the preset is a list with sublists instead of dict. + dict[0] -> list[0], dict[1] -> list[1], ... +2. Each table in a preset is a list instead of an array. +3. The zigzag order is remove in the preset (needed by libjpeg >= 6a). + +You can convert the dict format to the preset format with the +`JpegImagePlugin.convert_dict_qtables(dict_qtables)` function. + +Libjpeg ref.: http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +presets = { + 'web_low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} diff --git a/server/www/packages/packages-darwin/x64/PIL/McIdasImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..b753603 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/McIdasImagePlugin.py @@ -0,0 +1,74 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct +from PIL import Image, ImageFile + +__version__ = "0.2" + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self.size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10]*w[11]*w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/server/www/packages/packages-darwin/x64/PIL/MicImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/MicImagePlugin.py new file mode 100644 index 0000000..3c91244 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/MicImagePlugin.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, TiffImagePlugin +from PIL.OleFileIO import MAGIC, OleFileIO + +__version__ = "0.1" + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an MIC file; invalid OLE file") + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.__fp = self.fp + self.frame = 0 + + if len(self.images) > 1: + self.category = Image.CONTAINER + + self.seek(0) + + @property + def n_frames(self): + return len(self.images) + + @property + def is_animated(self): + return len(self.images) > 1 + + def seek(self, frame): + + try: + filename = self.images[frame] + except IndexError: + raise EOFError("no such frame") + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/server/www/packages/packages-darwin/x64/PIL/MpegImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..6671b86 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/MpegImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL._binary import i8 + +__version__ = "0.1" + + +# +# Bitstream parser + +class BitStream(object): + + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self.size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extension(MpegImageFile.format, ".mpg") +Image.register_extension(MpegImageFile.format, ".mpeg") + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/server/www/packages/packages-darwin/x64/PIL/MpoImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..1d26021 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/MpoImagePlugin.py @@ -0,0 +1,99 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, JpegImagePlugin + +__version__ = "0.1" + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self.mpinfo = self._getmp() + self.__framecount = self.mpinfo[0xB001] + self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] + for mpent in self.mpinfo[0xB002]] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.__framecount == len(self.__mpoffsets) + del self.info['mpoffset'] # no longer needed + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if frame < 0 or frame >= self.__framecount: + raise EOFError("no more images in MPO file") + else: + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + self.tile = [ + ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) + ] + self.__frame = frame + + def tell(self): + return self.__frame + + +# -------------------------------------------------------------------q- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/server/www/packages/packages-darwin/x64/PIL/MspImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/MspImagePlugin.py new file mode 100644 index 0000000..85f8e76 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/MspImagePlugin.py @@ -0,0 +1,104 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + + +# +# read MSP files + +i16 = _binary.i16le + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if s[:4] not in [b"DanM", b"LinS"]: + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s[i:i+2]) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self.size = i16(s[4:]), i16(s[6:]) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] + else: + self.tile = [("msp", (0, 0)+self.size, 32+2*self.size[1], None)] + +# +# write MSP files (uncompressed only) + +o16 = _binary.o16le + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as MSP" % im.mode) + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/server/www/packages/packages-darwin/x64/PIL/OleFileIO-README.md b/server/www/packages/packages-darwin/x64/PIL/OleFileIO-README.md new file mode 100644 index 0000000..0962a5a --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/OleFileIO-README.md @@ -0,0 +1,180 @@ +olefile (formerly OleFileIO_PL) +=============================== + +[olefile](http://www.decalage.info/olefile) is a Python package to parse, read and write +[Microsoft OLE2 files](http://en.wikipedia.org/wiki/Compound_File_Binary_Format) +(also called Structured Storage, Compound File Binary Format or Compound Document File Format), +such as Microsoft Office 97-2003 documents, vbaProject.bin in MS Office 2007+ files, Image Composer +and FlashPix files, Outlook messages, StickyNotes, several Microscopy file formats, McAfee antivirus quarantine files, +etc. + + +**Quick links:** [Home page](http://www.decalage.info/olefile) - +[Download/Install](https://bitbucket.org/decalage/olefileio_pl/wiki/Install) - +[Documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) - +[Report Issues/Suggestions/Questions](https://bitbucket.org/decalage/olefileio_pl/issues?status=new&status=open) - +[Contact the author](http://decalage.info/contact) - +[Repository](https://bitbucket.org/decalage/olefileio_pl) - +[Updates on Twitter](https://twitter.com/decalage2) + + +News +---- + +Follow all updates and news on Twitter: + +- **2015-01-25 v0.42**: improved handling of special characters in stream/storage names on Python 2.x (using UTF-8 + instead of Latin-1), fixed bug in listdir with empty storages. +- 2014-11-25 v0.41: OleFileIO.open and isOleFile now support OLE files stored in byte strings, fixed installer for + python 3, added support for Jython (Niko Ehrenfeuchter) +- 2014-10-01 v0.40: renamed OleFileIO_PL to olefile, added initial write support for streams >4K, updated doc and + license, improved the setup script. +- 2014-07-27 v0.31: fixed support for large files with 4K sectors, thanks to Niko Ehrenfeuchter, Martijn Berger and + Dave Jones. Added test scripts from Pillow (by hugovk). Fixed setup for Python 3 (Martin Panter) +- 2014-02-04 v0.30: now compatible with Python 3.x, thanks to Martin Panter who did most of the hard work. +- 2013-07-24 v0.26: added methods to parse stream/storage timestamps, improved listdir to include storages, fixed + parsing of direntry timestamps +- 2013-05-27 v0.25: improved metadata extraction, properties parsing and exception handling, fixed + [issue #12](https://bitbucket.org/decalage/olefileio_pl/issue/12/error-when-converting-timestamps-in-ole) +- 2013-05-07 v0.24: new features to extract metadata (get\_metadata method and OleMetadata class), improved + getproperties to convert timestamps to Python datetime +- 2012-10-09: published [python-oletools](http://www.decalage.info/python/oletools), a package of analysis tools based + on OleFileIO_PL +- 2012-09-11 v0.23: added support for file-like objects, fixed [issue #8](https://bitbucket.org/decalage/olefileio_pl/issue/8/bug-with-file-object) +- 2012-02-17 v0.22: fixed issues #7 (bug in getproperties) and #2 (added close method) +- 2011-10-20: code hosted on bitbucket to ease contributions and bug tracking +- 2010-01-24 v0.21: fixed support for big-endian CPUs, such as PowerPC Macs. +- 2009-12-11 v0.20: small bugfix in OleFileIO.open when filename is not plain str. +- 2009-12-10 v0.19: fixed support for 64 bits platforms (thanks to Ben G. and Martijn for reporting the bug) +- see changelog in source code for more info. + +Download/Install +---------------- + +If you have pip or setuptools installed (pip is included in Python 2.7.9+), you may simply run **pip install olefile** +or **easy_install olefile** for the first installation. + +To update olefile, run **pip install -U olefile**. + +Otherwise, see https://bitbucket.org/decalage/olefileio_pl/wiki/Install + +Features +-------- + +- Parse, read and write any OLE file such as Microsoft Office 97-2003 legacy document formats (Word .doc, Excel .xls, + PowerPoint .ppt, Visio .vsd, Project .mpp), Image Composer and FlashPix files, Outlook messages, StickyNotes, + Zeiss AxioVision ZVI files, Olympus FluoView OIB files, etc +- List all the streams and storages contained in an OLE file +- Open streams as files +- Parse and read property streams, containing metadata of the file +- Portable, pure Python module, no dependency + +olefile can be used as an independent package or with PIL/Pillow. + +olefile is mostly meant for developers. If you are looking for tools to analyze OLE files or to extract data (especially +for security purposes such as malware analysis and forensics), then please also check my +[python-oletools](http://www.decalage.info/python/oletools), which are built upon olefile and provide a higher-level interface. + + +History +------- + +olefile is based on the OleFileIO module from [PIL](http://www.pythonware.com/products/pil/index.htm), the excellent +Python Imaging Library, created and maintained by Fredrik Lundh. The olefile API is still compatible with PIL, but +since 2005 I have improved the internal implementation significantly, with new features, bugfixes and a more robust +design. From 2005 to 2014 the project was called OleFileIO_PL, and in 2014 I changed its name to olefile to celebrate +its 9 years and its new write features. + +As far as I know, olefile is the most complete and robust Python implementation to read MS OLE2 files, portable on +several operating systems. (please tell me if you know other similar Python modules) + +Since 2014 olefile/OleFileIO_PL has been integrated into [Pillow](http://python-imaging.github.io/), the friendly fork +of PIL. olefile will continue to be improved as a separate project, and new versions will be merged into Pillow +regularly. + + +Main improvements over the original version of OleFileIO in PIL: +---------------------------------------------------------------- + +- Compatible with Python 3.x and 2.6+ +- Many bug fixes +- Support for files larger than 6.8MB +- Support for 64 bits platforms and big-endian CPUs +- Robust: many checks to detect malformed files +- Runtime option to choose if malformed files should be parsed or raise exceptions +- Improved API +- Metadata extraction, stream/storage timestamps (e.g. for document forensics) +- Can open file-like objects +- Added setup.py and install.bat to ease installation +- More convenient slash-based syntax for stream paths +- Write features + +Documentation +------------- + +Please see the [online documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) for more information, +especially the [OLE overview](https://bitbucket.org/decalage/olefileio_pl/wiki/OLE_Overview) and the +[API page](https://bitbucket.org/decalage/olefileio_pl/wiki/API) which describe how to use olefile in Python applications. +A copy of the same documentation is also provided in the doc subfolder of the olefile package. + + +## Real-life examples ## + +A real-life example: [using OleFileIO_PL for malware analysis and forensics](http://blog.gregback.net/2011/03/using-remnux-for-forensic-puzzle-6/). + +See also [this paper](https://computer-forensics.sans.org/community/papers/gcfa/grow-forensic-tools-taxonomy-python-libraries-helpful-forensic-analysis_6879) about python tools for forensics, which features olefile. + + +License +------- + +olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +([http://www.decalage.info](http://www.decalage.info)) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +---------- + +olefile is based on source code from the OleFileIO module of the Python Imaging Library (PIL) published by Fredrik +Lundh under the following license: + +The Python Imaging Library (PIL) is + +- Copyright (c) 1997-2005 by Secret Labs AB +- Copyright (c) 1995-2005 by Fredrik Lundh + +By obtaining, using, and/or copying this software and/or its associated documentation, you agree that you have read, +understood, and will comply with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its associated documentation for any purpose and +without fee is hereby granted, provided that the above copyright notice appears in all copies, and that both that +copyright notice and this permission notice appear in supporting documentation, and that the name of Secret Labs AB or +the author not be used in advertising or publicity pertaining to distribution of the software without specific, written +prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/server/www/packages/packages-darwin/x64/PIL/OleFileIO.py b/server/www/packages/packages-darwin/x64/PIL/OleFileIO.py new file mode 100644 index 0000000..4cf106d --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/OleFileIO.py @@ -0,0 +1,2305 @@ +#!/usr/bin/env python + +# olefile (formerly OleFileIO_PL) version 0.42 2015-01-25 +# +# Module to read/write Microsoft OLE2 files (also called Structured Storage or +# Microsoft Compound Document File Format), such as Microsoft Office 97-2003 +# documents, Image Composer and FlashPix files, Outlook messages, ... +# This version is compatible with Python 2.6+ and 3.x +# +# Project website: http://www.decalage.info/olefile +# +# olefile is copyright (c) 2005-2015 Philippe Lagadec (http://www.decalage.info) +# +# olefile is based on the OleFileIO module from the PIL library v1.1.6 +# See: http://www.pythonware.com/products/pil/index.htm +# +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# See source code and LICENSE.txt for information on usage and redistribution. + + +# Since OleFileIO_PL v0.30, only Python 2.6+ and 3.x is supported +# This import enables print() as a function rather than a keyword +# (main requirement to be compatible with Python 3.x) +# The comment on the line below should be printed on Python 2.5 or older: +from __future__ import print_function # This version of olefile requires Python 2.6+ or 3.x. + + +__author__ = "Philippe Lagadec" +__date__ = "2015-01-25" +__version__ = '0.42b' + +#--- LICENSE ------------------------------------------------------------------ + +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +# (http://www.decalage.info) +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# ---------- +# PIL License: +# +# olefile is based on source code from the OleFileIO module of the Python +# Imaging Library (PIL) published by Fredrik Lundh under the following license: + +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its associated +# documentation, you agree that you have read, understood, and will comply with +# the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and its +# associated documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appears in all copies, and that both +# that copyright notice and this permission notice appear in supporting +# documentation, and that the name of Secret Labs AB or the author(s) not be used +# in advertising or publicity pertaining to distribution of the software +# without specific, written prior permission. +# +# SECRET LABS AB AND THE AUTHORS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +# IN NO EVENT SHALL SECRET LABS AB OR THE AUTHORS BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +#----------------------------------------------------------------------------- +# CHANGELOG: (only olefile/OleFileIO_PL changes compared to PIL 1.1.6) +# 2005-05-11 v0.10 PL: - a few fixes for Python 2.4 compatibility +# (all changes flagged with [PL]) +# 2006-02-22 v0.11 PL: - a few fixes for some Office 2003 documents which raise +# exceptions in _OleStream.__init__() +# 2006-06-09 v0.12 PL: - fixes for files above 6.8MB (DIFAT in loadfat) +# - added some constants +# - added header values checks +# - added some docstrings +# - getsect: bugfix in case sectors >512 bytes +# - getsect: added conformity checks +# - DEBUG_MODE constant to activate debug display +# 2007-09-04 v0.13 PL: - improved/translated (lots of) comments +# - updated license +# - converted tabs to 4 spaces +# 2007-11-19 v0.14 PL: - added OleFileIO._raise_defect() to adapt sensitivity +# - improved _unicode() to use Python 2.x unicode support +# - fixed bug in _OleDirectoryEntry +# 2007-11-25 v0.15 PL: - added safety checks to detect FAT loops +# - fixed _OleStream which didn't check stream size +# - added/improved many docstrings and comments +# - moved helper functions _unicode and _clsid out of +# OleFileIO class +# - improved OleFileIO._find() to add Unix path syntax +# - OleFileIO._find() is now case-insensitive +# - added get_type() and get_rootentry_name() +# - rewritten loaddirectory and _OleDirectoryEntry +# 2007-11-27 v0.16 PL: - added _OleDirectoryEntry.kids_dict +# - added detection of duplicate filenames in storages +# - added detection of duplicate references to streams +# - added get_size() and exists() to _OleDirectoryEntry +# - added isOleFile to check header before parsing +# - added __all__ list to control public keywords in pydoc +# 2007-12-04 v0.17 PL: - added _load_direntry to fix a bug in loaddirectory +# - improved _unicode(), added workarounds for Python <2.3 +# - added set_debug_mode and -d option to set debug mode +# - fixed bugs in OleFileIO.open and _OleDirectoryEntry +# - added safety check in main for large or binary +# properties +# - allow size>0 for storages for some implementations +# 2007-12-05 v0.18 PL: - fixed several bugs in handling of FAT, MiniFAT and +# streams +# - added option '-c' in main to check all streams +# 2009-12-10 v0.19 PL: - bugfix for 32 bit arrays on 64 bits platforms +# (thanks to Ben G. and Martijn for reporting the bug) +# 2009-12-11 v0.20 PL: - bugfix in OleFileIO.open when filename is not plain str +# 2010-01-22 v0.21 PL: - added support for big-endian CPUs such as PowerPC Macs +# 2012-02-16 v0.22 PL: - fixed bug in getproperties, patch by chuckleberryfinn +# (https://bitbucket.org/decalage/olefileio_pl/issue/7) +# - added close method to OleFileIO (fixed issue #2) +# 2012-07-25 v0.23 PL: - added support for file-like objects (patch by mete0r_kr) +# 2013-05-05 v0.24 PL: - getproperties: added conversion from filetime to python +# datetime +# - main: displays properties with date format +# - new class OleMetadata to parse standard properties +# - added get_metadata method +# 2013-05-07 v0.24 PL: - a few improvements in OleMetadata +# 2013-05-24 v0.25 PL: - getproperties: option to not convert some timestamps +# - OleMetaData: total_edit_time is now a number of seconds, +# not a timestamp +# - getproperties: added support for VT_BOOL, VT_INT, V_UINT +# - getproperties: filter out null chars from strings +# - getproperties: raise non-fatal defects instead of +# exceptions when properties cannot be parsed properly +# 2013-05-27 PL: - getproperties: improved exception handling +# - _raise_defect: added option to set exception type +# - all non-fatal issues are now recorded, and displayed +# when run as a script +# 2013-07-11 v0.26 PL: - added methods to get modification and creation times +# of a directory entry or a storage/stream +# - fixed parsing of direntry timestamps +# 2013-07-24 PL: - new options in listdir to list storages and/or streams +# 2014-02-04 v0.30 PL: - upgraded code to support Python 3.x by Martin Panter +# - several fixes for Python 2.6 (xrange, MAGIC) +# - reused i32 from Pillow's _binary +# 2014-07-18 v0.31 - preliminary support for 4K sectors +# 2014-07-27 v0.31 PL: - a few improvements in OleFileIO.open (header parsing) +# - Fixed loadfat for large files with 4K sectors (issue #3) +# 2014-07-30 v0.32 PL: - added write_sect to write sectors to disk +# - added write_mode option to OleFileIO.__init__ and open +# 2014-07-31 PL: - fixed padding in write_sect for Python 3, added checks +# - added write_stream to write a stream to disk +# 2014-09-26 v0.40 PL: - renamed OleFileIO_PL to olefile +# 2014-11-09 NE: - added support for Jython (Niko Ehrenfeuchter) +# 2014-11-13 v0.41 PL: - improved isOleFile and OleFileIO.open to support OLE +# data in a string buffer and file-like objects. +# 2014-11-21 PL: - updated comments according to Pillow's commits +# 2015-01-24 v0.42 PL: - changed the default path name encoding from Latin-1 +# to UTF-8 on Python 2.x (Unicode on Python 3.x) +# - added path_encoding option to override the default +# - fixed a bug in _list when a storage is empty + +#----------------------------------------------------------------------------- +# TODO (for version 1.0): +# + get rid of print statements, to simplify Python 2.x and 3.x support +# + add is_stream and is_storage +# + remove leading and trailing slashes where a path is used +# + add functions path_list2str and path_str2list +# + fix how all the methods handle unicode str and/or bytes as arguments +# + add path attrib to _OleDirEntry, set it once and for all in init or +# append_kids (then listdir/_list can be simplified) +# - TESTS with Linux, MacOSX, Python 1.5.2, various files, PIL, ... +# - add underscore to each private method, to avoid their display in +# pydoc/epydoc documentation - Remove it for classes to be documented +# - replace all raised exceptions with _raise_defect (at least in OleFileIO) +# - merge code from _OleStream and OleFileIO.getsect to read sectors +# (maybe add a class for FAT and MiniFAT ?) +# - add method to check all streams (follow sectors chains without storing all +# stream in memory, and report anomalies) +# - use _OleDirectoryEntry.kids_dict to improve _find and _list ? +# - fix Unicode names handling (find some way to stay compatible with Py1.5.2) +# => if possible avoid converting names to Latin-1 +# - review DIFAT code: fix handling of DIFSECT blocks in FAT (not stop) +# - rewrite OleFileIO.getproperties +# - improve docstrings to show more sample uses +# - see also original notes and FIXME below +# - remove all obsolete FIXMEs +# - OleMetadata: fix version attrib according to +# http://msdn.microsoft.com/en-us/library/dd945671%28v=office.12%29.aspx + +# IDEAS: +# - in OleFileIO._open and _OleStream, use size=None instead of 0x7FFFFFFF for +# streams with unknown size +# - use arrays of int instead of long integers for FAT/MiniFAT, to improve +# performance and reduce memory usage ? (possible issue with values >2^31) +# - provide tests with unittest (may need write support to create samples) +# - move all debug code (and maybe dump methods) to a separate module, with +# a class which inherits OleFileIO ? +# - fix docstrings to follow epydoc format +# - add support for big endian byte order ? +# - create a simple OLE explorer with wxPython + +# FUTURE EVOLUTIONS to add write support: +# see issue #6 on Bitbucket: +# https://bitbucket.org/decalage/olefileio_pl/issue/6/improve-olefileio_pl-to-write-ole-files + +#----------------------------------------------------------------------------- +# NOTES from PIL 1.1.6: + +# History: +# 1997-01-20 fl Created +# 1997-01-22 fl Fixed 64-bit portability quirk +# 2003-09-09 fl Fixed typo in OleFileIO.loadfat (noted by Daniel Haertle) +# 2004-02-29 fl Changed long hex constants to signed integers +# +# Notes: +# FIXME: sort out sign problem (eliminate long hex constants) +# FIXME: change filename to use "a/b/c" instead of ["a", "b", "c"] +# FIXME: provide a glob mechanism function (using fnmatchcase) +# +# Literature: +# +# "FlashPix Format Specification, Appendix A", Kodak and Microsoft, +# September 1996. +# +# Quotes: +# +# "If this document and functionality of the Software conflict, +# the actual functionality of the Software represents the correct +# functionality" -- Microsoft, in the OLE format specification + +#------------------------------------------------------------------------------ + + +import io +import sys +import struct +import array +import os.path +import datetime + +#=== COMPATIBILITY WORKAROUNDS ================================================ + +# [PL] Define explicitly the public API to avoid private objects in pydoc: +#TODO: add more +# __all__ = ['OleFileIO', 'isOleFile', 'MAGIC'] + +# For Python 3.x, need to redefine long as int: +if str is not bytes: + long = int + +# Need to make sure we use xrange both on Python 2 and 3.x: +try: + # on Python 2 we need xrange: + iterrange = xrange +except: + # no xrange, for Python 3 it was renamed as range: + iterrange = range + +# [PL] workaround to fix an issue with array item size on 64 bits systems: +if array.array('L').itemsize == 4: + # on 32 bits platforms, long integers in an array are 32 bits: + UINT32 = 'L' +elif array.array('I').itemsize == 4: + # on 64 bits platforms, integers in an array are 32 bits: + UINT32 = 'I' +elif array.array('i').itemsize == 4: + # On 64 bit Jython, signed integers ('i') are the only way to store our 32 + # bit values in an array in a *somewhat* reasonable way, as the otherwise + # perfectly suited 'H' (unsigned int, 32 bits) results in a completely + # unusable behaviour. This is most likely caused by the fact that Java + # doesn't have unsigned values, and thus Jython's "array" implementation, + # which is based on "jarray", doesn't have them either. + # NOTE: to trick Jython into converting the values it would normally + # interpret as "signed" into "unsigned", a binary-and operation with + # 0xFFFFFFFF can be used. This way it is possible to use the same comparing + # operations on all platforms / implementations. The corresponding code + # lines are flagged with a 'JYTHON-WORKAROUND' tag below. + UINT32 = 'i' +else: + raise ValueError('Need to fix a bug with 32 bit arrays, please contact author...') + + +# [PL] These workarounds were inspired from the Path module +# (see http://www.jorendorff.com/articles/python/path/) +try: + basestring +except NameError: + basestring = str + +# [PL] Experimental setting: if True, OLE filenames will be kept in Unicode +# if False (default PIL behaviour), all filenames are converted to Latin-1. +KEEP_UNICODE_NAMES = True + +if sys.version_info[0] < 3: + # On Python 2.x, the default encoding for path names is UTF-8: + DEFAULT_PATH_ENCODING = 'utf-8' +else: + # On Python 3.x, the default encoding for path names is Unicode (None): + DEFAULT_PATH_ENCODING = None + + +#=== DEBUGGING =============================================================== + +#TODO: replace this by proper logging + +# [PL] DEBUG display mode: False by default, use set_debug_mode() or "-d" on +# command line to change it. +DEBUG_MODE = False + + +def debug_print(msg): + print(msg) + + +def debug_pass(msg): + pass + + +debug = debug_pass + + +def set_debug_mode(debug_mode): + """ + Set debug mode on or off, to control display of debugging messages. + :param mode: True or False + """ + global DEBUG_MODE, debug + DEBUG_MODE = debug_mode + if debug_mode: + debug = debug_print + else: + debug = debug_pass + + +#=== CONSTANTS =============================================================== + +# magic bytes that should be at the beginning of every OLE file: +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' + +# [PL]: added constants for Sector IDs (from AAF specifications) +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector + +# [PL]: added constants for Directory Entry IDs (from AAF specifications) +MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID +NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry + +# [PL] object types in storage (from AAF specifications) +STGTY_EMPTY = 0 # empty directory entry (according to OpenOffice.org doc) +STGTY_STORAGE = 1 # element is a storage object +STGTY_STREAM = 2 # element is a stream object +STGTY_LOCKBYTES = 3 # element is an ILockBytes object +STGTY_PROPERTY = 4 # element is an IPropertyStorage object +STGTY_ROOT = 5 # element is a root storage + + +# +# -------------------------------------------------------------------- +# property types + +VT_EMPTY = 0; VT_NULL = 1; VT_I2 = 2; VT_I4 = 3; VT_R4 = 4; VT_R8 = 5; VT_CY = 6; +VT_DATE = 7; VT_BSTR = 8; VT_DISPATCH = 9; VT_ERROR = 10; VT_BOOL = 11; +VT_VARIANT = 12; VT_UNKNOWN = 13; VT_DECIMAL = 14; VT_I1 = 16; VT_UI1 = 17; +VT_UI2 = 18; VT_UI4 = 19; VT_I8 = 20; VT_UI8 = 21; VT_INT = 22; VT_UINT = 23; +VT_VOID = 24; VT_HRESULT = 25; VT_PTR = 26; VT_SAFEARRAY = 27; VT_CARRAY = 28; +VT_USERDEFINED = 29; VT_LPSTR = 30; VT_LPWSTR = 31; VT_FILETIME = 64; +VT_BLOB = 65; VT_STREAM = 66; VT_STORAGE = 67; VT_STREAMED_OBJECT = 68; +VT_STORED_OBJECT = 69; VT_BLOB_OBJECT = 70; VT_CF = 71; VT_CLSID = 72; +VT_VECTOR = 0x1000; + +# map property id to name (for debugging purposes) + +VT = {} +for keyword, var in list(vars().items()): + if keyword[:3] == "VT_": + VT[var] = keyword + +# +# -------------------------------------------------------------------- +# Some common document types (root.clsid fields) + +WORD_CLSID = "00020900-0000-0000-C000-000000000046" +#TODO: check Excel, PPT, ... + +# [PL]: Defect levels to classify parsing errors - see OleFileIO._raise_defect() +DEFECT_UNSURE = 10 # a case which looks weird, but not sure it's a defect +DEFECT_POTENTIAL = 20 # a potential defect +DEFECT_INCORRECT = 30 # an error according to specifications, but parsing + # can go on +DEFECT_FATAL = 40 # an error which cannot be ignored, parsing is + # impossible + +# Minimal size of an empty OLE file, with 512-bytes sectors = 1536 bytes +# (this is used in isOleFile and OleFile.open) +MINIMAL_OLEFILE_SIZE = 1536 + +# [PL] add useful constants to __all__: +# for key in list(vars().keys()): +# if key.startswith('STGTY_') or key.startswith('DEFECT_'): +# __all__.append(key) + + +#=== FUNCTIONS =============================================================== + +def isOleFile(filename): + """ + Test if a file is an OLE container (according to the magic bytes in its header). + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read and seek methods), + it is parsed as-is. + + :returns: True if OLE, False otherwise. + """ + # check if filename is a string-like or file-like object: + if hasattr(filename, 'read'): + # file-like object: use it directly + header = filename.read(len(MAGIC)) + # just in case, seek back to start of file: + filename.seek(0) + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + header = filename[:len(MAGIC)] + else: + # string-like object: filename of file on disk + header = open(filename, 'rb').read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +if bytes is str: + # version for Python 2.x + def i8(c): + return ord(c) +else: + # version for Python 3.x + def i8(c): + return c if c.__class__ is int else c[0] + + +#TODO: replace i16 and i32 with more readable struct.unpack equivalent? + +def i16(c, o = 0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return struct.unpack(" len(fat): + raise IOError('malformed OLE document, stream too large') + # optimization(?): data is first a list of strings, and join() is called + # at the end to concatenate all in one string. + # (this may not be really useful with recent Python versions) + data = [] + # if size is zero, then first sector index should be ENDOFCHAIN: + if size == 0 and sect != ENDOFCHAIN: + debug('size == 0 and sect != ENDOFCHAIN:') + raise IOError('incorrect OLE sector index for empty stream') + # [PL] A fixed-length for loop is used instead of an undefined while + # loop to avoid DoS attacks: + for i in range(nb_sectors): + # Sector index may be ENDOFCHAIN, but only if size was unknown + if sect == ENDOFCHAIN: + if unknown_size: + break + else: + # else this means that the stream is smaller than declared: + debug('sect=ENDOFCHAIN before expected size') + raise IOError('incomplete OLE stream') + # sector index should be within FAT: + if sect < 0 or sect >= len(fat): + debug('sect=%d (%X) / len(fat)=%d' % (sect, sect, len(fat))) + debug('i=%d / nb_sectors=%d' % (i, nb_sectors)) +## tmp_data = b"".join(data) +## f = open('test_debug.bin', 'wb') +## f.write(tmp_data) +## f.close() +## debug('data read so far: %d bytes' % len(tmp_data)) + raise IOError('incorrect OLE FAT, sector index out of range') + #TODO: merge this code with OleFileIO.getsect() ? + #TODO: check if this works with 4K sectors: + try: + fp.seek(offset + sectorsize * sect) + except: + debug('sect=%d, seek=%d, filesize=%d' % + (sect, offset+sectorsize*sect, filesize)) + raise IOError('OLE sector index out of range') + sector_data = fp.read(sectorsize) + # [PL] check if there was enough data: + # Note: if sector is the last of the file, sometimes it is not a + # complete sector (of 512 or 4K), so we may read less than + # sectorsize. + if len(sector_data) != sectorsize and sect != (len(fat)-1): + debug('sect=%d / len(fat)=%d, seek=%d / filesize=%d, len read=%d' % + (sect, len(fat), offset+sectorsize*sect, filesize, len(sector_data))) + debug('seek+len(read)=%d' % (offset+sectorsize*sect+len(sector_data))) + raise IOError('incomplete OLE sector') + data.append(sector_data) + # jump to next sector in the FAT: + try: + sect = fat[sect] & 0xFFFFFFFF # JYTHON-WORKAROUND + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + data = b"".join(data) + # Data is truncated to the actual stream size: + if len(data) >= size: + data = data[:size] + # actual stream size is stored for future use: + self.size = size + elif unknown_size: + # actual stream size was not known, now we know the size of read + # data: + self.size = len(data) + else: + # read data is less than expected: + debug('len(data)=%d, size=%d' % (len(data), size)) + raise IOError('OLE stream size is less than declared') + # when all data is read in memory, BytesIO constructor is called + io.BytesIO.__init__(self, data) + # Then the _OleStream object can be used as a read-only file object. + + +#--- _OleDirectoryEntry ------------------------------------------------------- + +class _OleDirectoryEntry(object): + + """ + OLE2 Directory Entry + """ + # [PL] parsing code moved from OleFileIO.loaddirectory + + # struct to parse directory entries: + # <: little-endian byte order, standard sizes + # (note: this should guarantee that Q returns a 64 bits int) + # 64s: string containing entry name in unicode (max 31 chars) + null char + # H: uint16, number of bytes used in name buffer, including null = (len+1)*2 + # B: uint8, dir entry type (between 0 and 5) + # B: uint8, color: 0=black, 1=red + # I: uint32, index of left child node in the red-black tree, NOSTREAM if none + # I: uint32, index of right child node in the red-black tree, NOSTREAM if none + # I: uint32, index of child root node if it is a storage, else NOSTREAM + # 16s: CLSID, unique identifier (only used if it is a storage) + # I: uint32, user flags + # Q (was 8s): uint64, creation timestamp or zero + # Q (was 8s): uint64, modification timestamp or zero + # I: uint32, SID of first sector if stream or ministream, SID of 1st sector + # of stream containing ministreams if root entry, 0 otherwise + # I: uint32, total stream size in bytes if stream (low 32 bits), 0 otherwise + # I: uint32, total stream size in bytes if stream (high 32 bits), 0 otherwise + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + # size of a directory entry: 128 bytes + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, olefile): + """ + Constructor for an _OleDirectoryEntry object. + Parses a 128-bytes entry from the OLE Directory stream. + + :param entry : string (must be 128 bytes long) + :param sid : index of this directory entry in the OLE file directory + :param olefile: OleFileIO containing this directory entry + """ + self.sid = sid + # ref to olefile is stored for future use + self.olefile = olefile + # kids is a list of children entries, if this entry is a storage: + # (list of _OleDirectoryEntry objects) + self.kids = [] + # kids_dict is a dictionary of children entries, indexed by their + # name in lowercase: used to quickly find an entry, and to detect + # duplicates + self.kids_dict = {} + # flag used to detect if the entry is referenced more than once in + # directory: + self.used = False + # decode DirEntry + ( + name, + namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + sizeLow, + sizeHigh + ) = struct.unpack(_OleDirectoryEntry.STRUCT_DIRENTRY, entry) + if self.entry_type not in [STGTY_ROOT, STGTY_STORAGE, STGTY_STREAM, STGTY_EMPTY]: + olefile.raise_defect(DEFECT_INCORRECT, 'unhandled OLE storage type') + # only first directory entry can (and should) be root: + if self.entry_type == STGTY_ROOT and sid != 0: + olefile.raise_defect(DEFECT_INCORRECT, 'duplicate OLE root entry') + if sid == 0 and self.entry_type != STGTY_ROOT: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect OLE root entry') + #debug (struct.unpack(fmt_entry, entry[:len_entry])) + # name should be at most 31 unicode characters + null character, + # so 64 bytes in total (31*2 + 2): + if namelength > 64: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect DirEntry name length') + # if exception not raised, namelength is set to the maximum value: + namelength = 64 + # only characters without ending null char are kept: + name = name[:(namelength-2)] + #TODO: check if the name is actually followed by a null unicode character ([MS-CFB] 2.6.1) + #TODO: check if the name does not contain forbidden characters: + # [MS-CFB] 2.6.1: "The following characters are illegal and MUST NOT be part of the name: '/', '\', ':', '!'." + # name is converted from UTF-16LE to the path encoding specified in the OleFileIO: + self.name = olefile._decode_utf16_str(name) + + debug('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + debug(' - type: %d' % self.entry_type) + debug(' - sect: %d' % self.isectStart) + debug(' - SID left: %d, right: %d, child: %d' % (self.sid_left, + self.sid_right, self.sid_child)) + + # sizeHigh is only used for 4K sectors, it should be zero for 512 bytes + # sectors, BUT apparently some implementations set it as 0xFFFFFFFF, 1 + # or some other value so it cannot be raised as a defect in general: + if olefile.sectorsize == 512: + if sizeHigh != 0 and sizeHigh != 0xFFFFFFFF: + debug('sectorsize=%d, sizeLow=%d, sizeHigh=%d (%X)' % + (olefile.sectorsize, sizeLow, sizeHigh, sizeHigh)) + olefile.raise_defect(DEFECT_UNSURE, 'incorrect OLE stream size') + self.size = sizeLow + else: + self.size = sizeLow + (long(sizeHigh) << 32) + debug(' - size: %d (sizeLow=%d, sizeHigh=%d)' % (self.size, sizeLow, sizeHigh)) + + self.clsid = _clsid(clsid) + # a storage should have a null size, BUT some implementations such as + # Word 8 for Mac seem to allow non-null values => Potential defect: + if self.entry_type == STGTY_STORAGE and self.size != 0: + olefile.raise_defect(DEFECT_POTENTIAL, 'OLE storage with size>0') + # check if stream is not already referenced elsewhere: + if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: + if self.size < olefile.minisectorcutoff \ + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + # ministream object + minifat = True + else: + minifat = False + olefile._check_duplicate_stream(self.isectStart, minifat) + + def build_storage_tree(self): + """ + Read and build the red-black tree attached to this _OleDirectoryEntry + object, if it is a storage. + Note that this method builds a tree of all subentries, so it should + only be called for the root object once. + """ + debug('build_storage_tree: SID=%d - %s - sid_child=%d' + % (self.sid, repr(self.name), self.sid_child)) + if self.sid_child != NOSTREAM: + # if child SID is not NOSTREAM, then this entry is a storage. + # Let's walk through the tree of children to fill the kids list: + self.append_kids(self.sid_child) + + # Note from OpenOffice documentation: the safest way is to + # recreate the tree because some implementations may store broken + # red-black trees... + + # in the OLE file, entries are sorted on (length, name). + # for convenience, we sort them on name instead: + # (see rich comparison methods in this class) + self.kids.sort() + + def append_kids(self, child_sid): + """ + Walk through red-black tree of children of this directory entry to add + all of them to the kids list. (recursive method) + + :param child_sid : index of child directory entry to use, or None when called + first time for the root. (only used during recursion) + """ + # [PL] this method was added to use simple recursion instead of a complex + # algorithm. + # if this is not a storage or a leaf of the tree, nothing to do: + if child_sid == NOSTREAM: + return + # check if child SID is in the proper range: + if child_sid < 0 or child_sid >= len(self.olefile.direntries): + self.olefile.raise_defect(DEFECT_FATAL, 'OLE DirEntry index out of range') + # get child direntry: + child = self.olefile._load_direntry(child_sid) #direntries[child_sid] + debug('append_kids: child_sid=%d - %s - sid_left=%d, sid_right=%d, sid_child=%d' + % (child.sid, repr(child.name), child.sid_left, child.sid_right, child.sid_child)) + # the directory entries are organized as a red-black tree. + # (cf. Wikipedia for details) + # First walk through left side of the tree: + self.append_kids(child.sid_left) + # Check if its name is not already used (case-insensitive): + name_lower = child.name.lower() + if name_lower in self.kids_dict: + self.olefile.raise_defect(DEFECT_INCORRECT, + "Duplicate filename in OLE storage") + # Then the child_sid _OleDirectoryEntry object is appended to the + # kids list and dictionary: + self.kids.append(child) + self.kids_dict[name_lower] = child + # Check if kid was not already referenced in a storage: + if child.used: + self.olefile.raise_defect(DEFECT_INCORRECT, + 'OLE Entry referenced more than once') + child.used = True + # Finally walk through right side of the tree: + self.append_kids(child.sid_right) + # Afterwards build kid's own tree if it's also a storage: + child.build_storage_tree() + + def __eq__(self, other): + "Compare entries by name" + return self.name == other.name + + def __lt__(self, other): + "Compare entries by name" + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + # Reflected __lt__() and __le__() will be used for __gt__() and __ge__() + + #TODO: replace by the same function as MS implementation ? + # (order by name length first, then case-insensitive order) + + def dump(self, tab = 0): + "Dump this entry, and all its subentries (for debug purposes only)" + TYPES = ["(invalid)", "(storage)", "(stream)", "(lockbytes)", + "(property)", "(root)"] + print(" "*tab + repr(self.name), TYPES[self.entry_type], end=' ') + if self.entry_type in (STGTY_STREAM, STGTY_ROOT): + print(self.size, "bytes", end=' ') + print() + if self.entry_type in (STGTY_STORAGE, STGTY_ROOT) and self.clsid: + print(" "*tab + "{%s}" % self.clsid) + + for kid in self.kids: + kid.dump(tab + 2) + + def getmtime(self): + """ + Return modification time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.modifyTime == 0: + return None + return filetime2datetime(self.modifyTime) + + def getctime(self): + """ + Return creation time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.createTime == 0: + return None + return filetime2datetime(self.createTime) + + +#--- OleFileIO ---------------------------------------------------------------- + +class OleFileIO(object): + """ + OLE container object + + This class encapsulates the interface to an OLE 2 structured + storage file. Use the :py:meth:`~PIL.OleFileIO.OleFileIO.listdir` and + :py:meth:`~PIL.OleFileIO.OleFileIO.openstream` methods to + access the contents of this file. + + Object names are given as a list of strings, one for each subentry + level. The root entry should be omitted. For example, the following + code extracts all image streams from a Microsoft Image Composer file:: + + ole = OleFileIO("fan.mic") + + for entry in ole.listdir(): + if entry[1:2] == "Image": + fin = ole.openstream(entry) + fout = open(entry[0:1], "wb") + while True: + s = fin.read(8192) + if not s: + break + fout.write(s) + + You can use the viewer application provided with the Python Imaging + Library to view the resulting files (which happens to be standard + TIFF files). + """ + + def __init__(self, filename=None, raise_defects=DEFECT_FATAL, + write_mode=False, debug=False, path_encoding=DEFAULT_PATH_ENCODING): + """ + Constructor for the OleFileIO class. + + :param filename: file to open. + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param raise_defects: minimal level for defects to be raised as exceptions. + (use DEFECT_FATAL for a typical application, DEFECT_INCORRECT for a + security-oriented application, see source code for details) + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. + + :param debug: bool, set debug mode + + :param path_encoding: None or str, name of the codec to use for path + names (streams and storages), or None for Unicode. + Unicode by default on Python 3+, UTF-8 on Python 2.x. + (new in olefile 0.42, was hardcoded to Latin-1 until olefile v0.41) + """ + set_debug_mode(debug) + # minimal level for defects to be raised as exceptions: + self._raise_defects_level = raise_defects + # list of defects/issues not raised as exceptions: + # tuples of (exception type, message) + self.parsing_issues = [] + self.write_mode = write_mode + self.path_encoding = path_encoding + self._filesize = None + self.fp = None + if filename: + self.open(filename, write_mode=write_mode) + + def raise_defect(self, defect_level, message, exception_type=IOError): + """ + This method should be called for any defect found during file parsing. + It may raise an IOError exception according to the minimal level chosen + for the OleFileIO object. + + :param defect_level: defect level, possible values are: + + - DEFECT_UNSURE : a case which looks weird, but not sure it's a defect + - DEFECT_POTENTIAL : a potential defect + - DEFECT_INCORRECT : an error according to specifications, but parsing can go on + - DEFECT_FATAL : an error which cannot be ignored, parsing is impossible + + :param message: string describing the defect, used with raised exception. + :param exception_type: exception class to be raised, IOError by default + """ + # added by [PL] + if defect_level >= self._raise_defects_level: + raise exception_type(message) + else: + # just record the issue, no exception raised: + self.parsing_issues.append((exception_type, message)) + + def _decode_utf16_str(self, utf16_str, errors='replace'): + """ + Decode a string encoded in UTF-16 LE format, as found in the OLE + directory or in property streams. Return a string encoded + according to the path_encoding specified for the OleFileIO object. + + :param utf16_str: bytes string encoded in UTF-16 LE format + :param errors: str, see python documentation for str.decode() + :return: str, encoded according to path_encoding + """ + unicode_str = utf16_str.decode('UTF-16LE', errors) + if self.path_encoding: + # an encoding has been specified for path names: + return unicode_str.encode(self.path_encoding, errors) + else: + # path_encoding=None, return the Unicode string as-is: + return unicode_str + + def open(self, filename, write_mode=False): + """ + Open an OLE2 file in read-only or read/write mode. + Read and parse the header, FAT and directory. + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. (ignored if filename is not a path) + """ + self.write_mode = write_mode + # [PL] check if filename is a string-like or file-like object: + # (it is better to check for a read() method) + if hasattr(filename, 'read'): + #TODO: also check seek and tell methods? + # file-like object: use it directly + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + # convert it to BytesIO + self.fp = io.BytesIO(filename) + else: + # string-like object: filename of file on disk + if self.write_mode: + # open file in mode 'read with update, binary' + # According to https://docs.python.org/2/library/functions.html#open + # 'w' would truncate the file, 'a' may only append on some Unixes + mode = 'r+b' + else: + # read-only mode by default + mode = 'rb' + self.fp = open(filename, mode) + # obtain the filesize by using seek and tell, which should work on most + # file-like objects: + #TODO: do it above, using getsize with filename when possible? + #TODO: fix code to fail with clear exception when filesize cannot be obtained + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + + # lists of streams in FAT and MiniFAT, to detect duplicate references + # (list of indexes of first sectors of each stream) + self._used_streams_fat = [] + self._used_streams_minifat = [] + + header = self.fp.read(512) + + if len(header) != 512 or header[:8] != MAGIC: + self.raise_defect(DEFECT_FATAL, "not an OLE2 structured storage file") + + # [PL] header structure according to AAF specifications: + ##Header + ##struct StructuredStorageHeader { // [offset from start (bytes), length (bytes)] + ##BYTE _abSig[8]; // [00H,08] {0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, + ## // 0x1a, 0xe1} for current version + ##CLSID _clsid; // [08H,16] reserved must be zero (WriteClassStg/ + ## // GetClassFile uses root directory class id) + ##USHORT _uMinorVersion; // [18H,02] minor version of the format: 33 is + ## // written by reference implementation + ##USHORT _uDllVersion; // [1AH,02] major version of the dll/format: 3 for + ## // 512-byte sectors, 4 for 4 KB sectors + ##USHORT _uByteOrder; // [1CH,02] 0xFFFE: indicates Intel byte-ordering + ##USHORT _uSectorShift; // [1EH,02] size of sectors in power-of-two; + ## // typically 9 indicating 512-byte sectors + ##USHORT _uMiniSectorShift; // [20H,02] size of mini-sectors in power-of-two; + ## // typically 6 indicating 64-byte mini-sectors + ##USHORT _usReserved; // [22H,02] reserved, must be zero + ##ULONG _ulReserved1; // [24H,04] reserved, must be zero + ##FSINDEX _csectDir; // [28H,04] must be zero for 512-byte sectors, + ## // number of SECTs in directory chain for 4 KB + ## // sectors + ##FSINDEX _csectFat; // [2CH,04] number of SECTs in the FAT chain + ##SECT _sectDirStart; // [30H,04] first SECT in the directory chain + ##DFSIGNATURE _signature; // [34H,04] signature used for transactions; must + ## // be zero. The reference implementation + ## // does not support transactions + ##ULONG _ulMiniSectorCutoff; // [38H,04] maximum size for a mini stream; + ## // typically 4096 bytes + ##SECT _sectMiniFatStart; // [3CH,04] first SECT in the MiniFAT chain + ##FSINDEX _csectMiniFat; // [40H,04] number of SECTs in the MiniFAT chain + ##SECT _sectDifStart; // [44H,04] first SECT in the DIFAT chain + ##FSINDEX _csectDif; // [48H,04] number of SECTs in the DIFAT chain + ##SECT _sectFat[109]; // [4CH,436] the SECTs of first 109 FAT sectors + ##}; + + # [PL] header decoding: + # '<' indicates little-endian byte ordering for Intel (cf. struct module help) + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + debug("fmt_header size = %d, +FAT = %d" % (header_size, header_size + 109*4)) + header1 = header[:header_size] + ( + self.Sig, + self.clsid, + self.MinorVersion, + self.DllVersion, + self.ByteOrder, + self.SectorShift, + self.MiniSectorShift, + self.Reserved, self.Reserved1, + self.csectDir, + self.csectFat, + self.sectDirStart, + self.signature, + self.MiniSectorCutoff, + self.MiniFatStart, + self.csectMiniFat, + self.sectDifStart, + self.csectDif + ) = struct.unpack(fmt_header, header1) + debug(struct.unpack(fmt_header, header1)) + + if self.Sig != MAGIC: + # OLE signature should always be present + self.raise_defect(DEFECT_FATAL, "incorrect OLE signature") + if self.clsid != bytearray(16): + # according to AAF specs, CLSID should always be zero + self.raise_defect(DEFECT_INCORRECT, "incorrect CLSID in OLE header") + debug("MinorVersion = %d" % self.MinorVersion) + debug("DllVersion = %d" % self.DllVersion) + if self.DllVersion not in [3, 4]: + # version 3: usual format, 512 bytes per sector + # version 4: large format, 4K per sector + self.raise_defect(DEFECT_INCORRECT, "incorrect DllVersion in OLE header") + debug("ByteOrder = %X" % self.ByteOrder) + if self.ByteOrder != 0xFFFE: + # For now only common little-endian documents are handled correctly + self.raise_defect(DEFECT_FATAL, "incorrect ByteOrder in OLE header") + # TODO: add big-endian support for documents created on Mac ? + # But according to [MS-CFB] ? v20140502, ByteOrder MUST be 0xFFFE. + self.SectorSize = 2**self.SectorShift + debug("SectorSize = %d" % self.SectorSize) + if self.SectorSize not in [512, 4096]: + self.raise_defect(DEFECT_INCORRECT, "incorrect SectorSize in OLE header") + if (self.DllVersion == 3 and self.SectorSize != 512) \ + or (self.DllVersion == 4 and self.SectorSize != 4096): + self.raise_defect(DEFECT_INCORRECT, "SectorSize does not match DllVersion in OLE header") + self.MiniSectorSize = 2**self.MiniSectorShift + debug("MiniSectorSize = %d" % self.MiniSectorSize) + if self.MiniSectorSize not in [64]: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorSize in OLE header") + if self.Reserved != 0 or self.Reserved1 != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect OLE header (non-null reserved bytes)") + debug("csectDir = %d" % self.csectDir) + # Number of directory sectors (only allowed if DllVersion != 3) + if self.SectorSize == 512 and self.csectDir != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect csectDir in OLE header") + debug("csectFat = %d" % self.csectFat) + # csectFat = number of FAT sectors in the file + debug("sectDirStart = %X" % self.sectDirStart) + # sectDirStart = 1st sector containing the directory + debug("signature = %d" % self.signature) + # Signature should be zero, BUT some implementations do not follow this + # rule => only a potential defect: + # (according to MS-CFB, may be != 0 for applications supporting file + # transactions) + if self.signature != 0: + self.raise_defect(DEFECT_POTENTIAL, "incorrect OLE header (signature>0)") + debug("MiniSectorCutoff = %d" % self.MiniSectorCutoff) + # MS-CFB: This integer field MUST be set to 0x00001000. This field + # specifies the maximum size of a user-defined data stream allocated + # from the mini FAT and mini stream, and that cutoff is 4096 bytes. + # Any user-defined data stream larger than or equal to this cutoff size + # must be allocated as normal sectors from the FAT. + if self.MiniSectorCutoff != 0x1000: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorCutoff in OLE header") + debug("MiniFatStart = %X" % self.MiniFatStart) + debug("csectMiniFat = %d" % self.csectMiniFat) + debug("sectDifStart = %X" % self.sectDifStart) + debug("csectDif = %d" % self.csectDif) + + # calculate the number of sectors in the file + # (-1 because header doesn't count) + self.nb_sect = ((filesize + self.SectorSize-1) // self.SectorSize) - 1 + debug("Number of sectors in the file: %d" % self.nb_sect) + #TODO: change this test, because an OLE file MAY contain other data + # after the last sector. + + # file clsid + self.clsid = _clsid(header[8:24]) + + #TODO: remove redundant attributes, and fix the code which uses them? + self.sectorsize = self.SectorSize #1 << i16(header, 30) + self.minisectorsize = self.MiniSectorSize #1 << i16(header, 32) + self.minisectorcutoff = self.MiniSectorCutoff # i32(header, 56) + + # check known streams for duplicate references (these are always in FAT, + # never in MiniFAT): + self._check_duplicate_stream(self.sectDirStart) + # check MiniFAT only if it is not empty: + if self.csectMiniFat: + self._check_duplicate_stream(self.MiniFatStart) + # check DIFAT only if it is not empty: + if self.csectDif: + self._check_duplicate_stream(self.sectDifStart) + + # Load file allocation tables + self.loadfat(header) + # Load directory. This sets both the direntries list (ordered by sid) + # and the root (ordered by hierarchy) members. + self.loaddirectory(self.sectDirStart)#i32(header, 48)) + self.ministream = None + self.minifatsect = self.MiniFatStart #i32(header, 60) + + def close(self): + """ + close the OLE file, to release the file object + """ + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + """ + Checks if a stream has not been already referenced elsewhere. + This method should only be called once for each known stream, and only + if stream size is not null. + + :param first_sect: int, index of first sector of the stream in FAT + :param minifat: bool, if True, stream is located in the MiniFAT, else in the FAT + """ + if minifat: + debug('_check_duplicate_stream: sect=%d in MiniFAT' % first_sect) + used_streams = self._used_streams_minifat + else: + debug('_check_duplicate_stream: sect=%d in FAT' % first_sect) + # some values can be safely ignored (not a real stream): + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): + return + used_streams = self._used_streams_fat + #TODO: would it be more efficient using a dict or hash values, instead + # of a list of long ? + if first_sect in used_streams: + self.raise_defect(DEFECT_INCORRECT, 'Stream referenced twice') + else: + used_streams.append(first_sect) + + def dumpfat(self, fat, firstindex=0): + "Displays a part of FAT in human-readable form for debugging purpose" + # [PL] added only for debug + if not DEBUG_MODE: + return + # dictionary to convert special FAT values in human-readable strings + VPL = 8 # values per line (8+1 * 8+1 = 81) + fatnames = { + FREESECT: "..free..", + ENDOFCHAIN: "[ END. ]", + FATSECT: "FATSECT ", + DIFSECT: "DIFSECT " + } + nbsect = len(fat) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = fat[i] + aux = sect & 0xFFFFFFFF # JYTHON-WORKAROUND + if aux in fatnames: + name = fatnames[aux] + else: + if sect == i+1: + name = " --->" + else: + name = "%8X" % sect + print(name, end=" ") + print() + + def dumpsect(self, sector, firstindex=0): + "Displays a sector in a human-readable form, for debugging purpose." + if not DEBUG_MODE: + return + VPL = 8 # number of values per line (8+1 * 8+1 = 81) + tab = array.array(UINT32, sector) + if sys.byteorder == 'big': + tab.byteswap() + nbsect = len(tab) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = tab[i] + name = "%8X" % sect + print(name, end=" ") + print() + + def sect2array(self, sect): + """ + convert a sector to an array of 32 bits unsigned integers, + swapping bytes on big endian CPUs such as PowerPC (old Macs) + """ + a = array.array(UINT32, sect) + # if CPU is big endian, swap bytes: + if sys.byteorder == 'big': + a.byteswap() + return a + + def loadfat_sect(self, sect): + """ + Adds the indexes of the given sector to the FAT + + :param sect: string containing the first FAT sector, or array of long integers + :returns: index of last FAT sector. + """ + # a FAT sector is an array of ulong integers. + if isinstance(sect, array.array): + # if sect is already an array it is directly used + fat1 = sect + else: + # if it's a raw sector, it is parsed in an array + fat1 = self.sect2array(sect) + self.dumpsect(sect) + # The FAT is a sector chain starting at the first index of itself. + for isect in fat1: + isect = isect & 0xFFFFFFFF # JYTHON-WORKAROUND + debug("isect = %X" % isect) + if isect == ENDOFCHAIN or isect == FREESECT: + # the end of the sector chain has been reached + debug("found end of sector chain") + break + # read the FAT sector + s = self.getsect(isect) + # parse it as an array of 32 bits integers, and add it to the + # global FAT array + nextfat = self.sect2array(s) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + """ + Load the FAT table. + """ + # The 1st sector of the file contains sector numbers for the first 109 + # FAT sectors, right after the header which is 76 bytes long. + # (always 109, whatever the sector size: 512 bytes = 76+4*109) + # Additional sectors are described by DIF blocks + + sect = header[76:512] + debug("len(sect)=%d, so %d integers" % (len(sect), len(sect)//4)) + #fat = [] + # [PL] FAT is an array of 32 bits unsigned ints, it's more effective + # to use an array than a list in Python. + # It's initialized as empty first: + self.fat = array.array(UINT32) + self.loadfat_sect(sect) + #self.dumpfat(self.fat) +## for i in range(0, len(sect), 4): +## ix = i32(sect, i) +## # [PL] if ix == -2 or ix == -1: # ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## if ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## break +## s = self.getsect(ix) +## #fat = fat + [i32(s, i) for i in range(0, len(s), 4)] +## fat = fat + array.array(UINT32, s) + if self.csectDif != 0: + # [PL] There's a DIFAT because file is larger than 6.8MB + # some checks just in case: + if self.csectFat <= 109: + # there must be at least 109 blocks in header and the rest in + # DIFAT, so number of sectors must be >109. + self.raise_defect(DEFECT_INCORRECT, 'incorrect DIFAT, not enough sectors') + if self.sectDifStart >= self.nb_sect: + # initial DIFAT block index must be valid + self.raise_defect(DEFECT_FATAL, 'incorrect DIFAT, first index out of range') + debug("DIFAT analysis...") + # We compute the necessary number of DIFAT sectors : + # Number of pointers per DIFAT sector = (sectorsize/4)-1 + # (-1 because the last pointer is the next DIFAT sector number) + nb_difat_sectors = (self.sectorsize//4)-1 + # (if 512 bytes: each DIFAT sector = 127 pointers + 1 towards next DIFAT sector) + nb_difat = (self.csectFat-109 + nb_difat_sectors-1)//nb_difat_sectors + debug("nb_difat = %d" % nb_difat) + if self.csectDif != nb_difat: + raise IOError('incorrect DIFAT') + isect_difat = self.sectDifStart + for i in iterrange(nb_difat): + debug("DIFAT block %d, sector %X" % (i, isect_difat)) + #TODO: check if corresponding FAT SID = DIFSECT + sector_difat = self.getsect(isect_difat) + difat = self.sect2array(sector_difat) + self.dumpsect(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + # last DIFAT pointer is next DIFAT sector: + isect_difat = difat[nb_difat_sectors] + debug("next DIFAT sector: %X" % isect_difat) + # checks: + if isect_difat not in [ENDOFCHAIN, FREESECT]: + # last DIFAT pointer value must be ENDOFCHAIN or FREESECT + raise IOError('incorrect end of DIFAT') +## if len(self.fat) != self.csectFat: +## # FAT should contain csectFat blocks +## print("FAT length: %d instead of %d" % (len(self.fat), self.csectFat)) +## raise IOError('incorrect DIFAT') + # since FAT is read from fixed-size sectors, it may contain more values + # than the actual number of sectors in the file. + # Keep only the relevant sector indexes: + if len(self.fat) > self.nb_sect: + debug('len(fat)=%d, shrunk to nb_sect=%d' % (len(self.fat), self.nb_sect)) + self.fat = self.fat[:self.nb_sect] + debug('\nFAT:') + self.dumpfat(self.fat) + + def loadminifat(self): + """ + Load the MiniFAT table. + """ + # MiniFAT is stored in a standard sub-stream, pointed to by a header + # field. + # NOTE: there are two sizes to take into account for this stream: + # 1) Stream size is calculated according to the number of sectors + # declared in the OLE header. This allocated stream may be more than + # needed to store the actual sector indexes. + # (self.csectMiniFat is the number of sectors of size self.SectorSize) + stream_size = self.csectMiniFat * self.SectorSize + # 2) Actually used size is calculated by dividing the MiniStream size + # (given by root entry size) by the size of mini sectors, *4 for + # 32 bits indexes: + nb_minisectors = (self.root.size + self.MiniSectorSize-1) // self.MiniSectorSize + used_size = nb_minisectors * 4 + debug('loadminifat(): minifatsect=%d, nb FAT sectors=%d, used_size=%d, stream_size=%d, nb MiniSectors=%d' % + (self.minifatsect, self.csectMiniFat, used_size, stream_size, nb_minisectors)) + if used_size > stream_size: + # This is not really a problem, but may indicate a wrong implementation: + self.raise_defect(DEFECT_INCORRECT, 'OLE MiniStream is larger than MiniFAT') + # In any case, first read stream_size: + s = self._open(self.minifatsect, stream_size, force_FAT=True).read() + # [PL] Old code replaced by an array: + # self.minifat = [i32(s, i) for i in range(0, len(s), 4)] + self.minifat = self.sect2array(s) + # Then shrink the array to used size, to avoid indexes out of MiniStream: + debug('MiniFAT shrunk from %d to %d sectors' % (len(self.minifat), nb_minisectors)) + self.minifat = self.minifat[:nb_minisectors] + debug('loadminifat(): len=%d' % len(self.minifat)) + debug('\nMiniFAT:') + self.dumpfat(self.minifat) + + def getsect(self, sect): + """ + Read given sector from file on disk. + + :param sect: int, sector index + :returns: a string containing the sector data. + """ + # From [MS-CFB]: A sector number can be converted into a byte offset + # into the file by using the following formula: + # (sector number + 1) x Sector Size. + # This implies that sector #0 of the file begins at byte offset Sector + # Size, not at 0. + + # [PL] the original code in PIL was wrong when sectors are 4KB instead of + # 512 bytes: + # self.fp.seek(512 + self.sectorsize * sect) + # [PL]: added safety checks: + # print("getsect(%X)" % sect) + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('getsect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + sector = self.fp.read(self.sectorsize) + if len(sector) != self.sectorsize: + debug('getsect(): sect=%X, read=%d, sectorsize=%d' % + (sect, len(sector), self.sectorsize)) + self.raise_defect(DEFECT_FATAL, 'incomplete OLE sector') + return sector + + def write_sect(self, sect, data, padding=b'\x00'): + """ + Write given sector to file on disk. + + :param sect: int, sector index + :param data: bytes, sector data + :param padding: single byte, padding character if data < sector size + """ + if not isinstance(data, bytes): + raise TypeError("write_sect: data must be a bytes string") + if not isinstance(padding, bytes) or len(padding) != 1: + raise TypeError("write_sect: padding must be a bytes string of 1 char") + #TODO: we could allow padding=None for no padding at all + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('write_sect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + if len(data) < self.sectorsize: + # add padding + data += padding * (self.sectorsize - len(data)) + elif len(data) < self.sectorsize: + raise ValueError("Data is larger than sector size") + self.fp.write(data) + + def loaddirectory(self, sect): + """ + Load the directory. + + :param sect: sector index of directory stream. + """ + # The directory is stored in a standard + # substream, independent of its size. + + # open directory stream as a read-only file: + # (stream size is not known in advance) + self.directory_fp = self._open(sect) + + # [PL] to detect malformed documents and avoid DoS attacks, the maximum + # number of directory entries can be calculated: + max_entries = self.directory_fp.size // 128 + debug('loaddirectory: size=%d, max_entries=%d' % + (self.directory_fp.size, max_entries)) + + # Create list of directory entries + # self.direntries = [] + # We start with a list of "None" object + self.direntries = [None] * max_entries +## for sid in iterrange(max_entries): +## entry = fp.read(128) +## if not entry: +## break +## self.direntries.append(_OleDirectoryEntry(entry, sid, self)) + # load root entry: + root_entry = self._load_direntry(0) + # Root entry is the first entry: + self.root = self.direntries[0] + # read and build all storage trees, starting from the root: + self.root.build_storage_tree() + + def _load_direntry(self, sid): + """ + Load a directory entry from the directory. + This method should only be called once for each storage/stream when + loading the directory. + + :param sid: index of storage/stream in the directory. + :returns: a _OleDirectoryEntry object + + :exception IOError: if the entry has always been referenced. + """ + # check if SID is OK: + if sid < 0 or sid >= len(self.direntries): + self.raise_defect(DEFECT_FATAL, "OLE directory index out of range") + # check if entry was already referenced: + if self.direntries[sid] is not None: + self.raise_defect(DEFECT_INCORRECT, + "double reference for OLE stream/storage") + # if exception not raised, return the object + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = _OleDirectoryEntry(entry, sid, self) + return self.direntries[sid] + + def dumpdirectory(self): + """ + Dump directory (for debugging only) + """ + self.root.dump() + + def _open(self, start, size = 0x7FFFFFFF, force_FAT=False): + """ + Open a stream, either in FAT or MiniFAT according to its size. + (openstream helper) + + :param start: index of first sector + :param size: size of stream (or nothing if size is unknown) + :param force_FAT: if False (default), stream will be opened in FAT or MiniFAT + according to size. If True, it will always be opened in FAT. + """ + debug('OleFileIO.open(): sect=%d, size=%d, force_FAT=%s' % + (start, size, str(force_FAT))) + # stream size is compared to the MiniSectorCutoff threshold: + if size < self.minisectorcutoff and not force_FAT: + # ministream object + if not self.ministream: + # load MiniFAT if it wasn't already done: + self.loadminifat() + # The first sector index of the miniFAT stream is stored in the + # root directory entry: + size_ministream = self.root.size + debug('Opening MiniStream: sect=%d, size=%d' % + (self.root.isectStart, size_ministream)) + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return _OleStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + # standard stream + return _OleStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, + sectorsize=self.sectorsize, fat=self.fat, + filesize=self._filesize) + + def _list(self, files, prefix, node, streams=True, storages=False): + """ + listdir helper + + :param files: list of files to fill in + :param prefix: current location in storage tree (list of names) + :param node: current node (_OleDirectoryEntry object) + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + """ + prefix = prefix + [node.name] + for entry in node.kids: + if entry.entry_type == STGTY_STORAGE: + # this is a storage + if storages: + # add it to the list + files.append(prefix[1:] + [entry.name]) + # check its kids + self._list(files, prefix, entry, streams, storages) + elif entry.entry_type == STGTY_STREAM: + # this is a stream + if streams: + # add it to the list + files.append(prefix[1:] + [entry.name]) + else: + self.raise_defect(DEFECT_INCORRECT, 'The directory tree contains an entry which is not a stream nor a storage.') + + def listdir(self, streams=True, storages=False): + """ + Return a list of streams and/or storages stored in this file + + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + :returns: list of stream and/or storage paths + """ + files = [] + self._list(files, [], self.root, streams, storages) + return files + + def _find(self, filename): + """ + Returns directory entry of given filename. (openstream helper) + Note: this method is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: sid of requested filename + :exception IOError: if file not found + """ + + # if filename is a string instead of a list, split it on slashes to + # convert to a list: + if isinstance(filename, basestring): + filename = filename.split('/') + # walk across storage tree, following given path: + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + else: + raise IOError("file not found") + node = kid + return node.sid + + def openstream(self, filename): + """ + Open a stream as a read-only file object (BytesIO). + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: file object (read-only) + :exception IOError: if filename not found, or if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this file is not a stream") + return self._open(entry.isectStart, entry.size) + + def write_stream(self, stream_name, data): + """ + Write a stream to disk. For now, it is only possible to replace an + existing stream by data of the same size. + + :param stream_name: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :param data: bytes, data to be written, must be the same size as the original + stream. + """ + if not isinstance(data, bytes): + raise TypeError("write_stream: data must be a bytes string") + sid = self._find(stream_name) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this is not a stream") + size = entry.size + if size != len(data): + raise ValueError("write_stream: data must be the same size as the existing stream") + if size < self.minisectorcutoff: + raise NotImplementedError("Writing a stream in MiniFAT is not implemented yet") + sect = entry.isectStart + # number of sectors to write + nb_sectors = (size + (self.sectorsize-1)) // self.sectorsize + debug('nb_sectors = %d' % nb_sectors) + for i in range(nb_sectors): + # try: + # self.fp.seek(offset + self.sectorsize * sect) + # except: + # debug('sect=%d, seek=%d' % + # (sect, offset+self.sectorsize*sect)) + # raise IOError('OLE sector index out of range') + # extract one sector from data, the last one being smaller: + if i < (nb_sectors-1): + data_sector = data[i*self.sectorsize:(i+1)*self.sectorsize] + #TODO: comment this if it works + assert(len(data_sector) == self.sectorsize) + else: + data_sector = data[i*self.sectorsize:] + # TODO: comment this if it works + debug('write_stream: size=%d sectorsize=%d data_sector=%d size%%sectorsize=%d' + % (size, self.sectorsize, len(data_sector), size % self.sectorsize)) + assert(len(data_sector) % self.sectorsize == size % self.sectorsize) + self.write_sect(sect, data_sector) +# self.fp.write(data_sector) + # jump to next sector in the FAT: + try: + sect = self.fat[sect] + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + + def get_type(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container, and return its type. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: False if object does not exist, its entry type (>0) otherwise: + + - STGTY_STREAM: a stream + - STGTY_STORAGE: a storage + - STGTY_ROOT: the root entry + """ + try: + sid = self._find(filename) + entry = self.direntries[sid] + return entry.entry_type + except: + return False + + def getmtime(self, filename): + """ + Return modification time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getmtime() + + def getctime(self, filename): + """ + Return creation time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if creation time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getctime() + + def exists(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container. + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: True if object exist, else False. + """ + try: + sid = self._find(filename) + return True + except: + return False + + def get_size(self, filename): + """ + Return size of a stream in the OLE container, in bytes. + + :param filename: path of stream in storage tree (see openstream for syntax) + :returns: size in bytes (long integer) + :exception IOError: if file not found + :exception TypeError: if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + #TODO: Should it return zero instead of raising an exception ? + raise TypeError('object is not an OLE stream') + return entry.size + + def get_rootentry_name(self): + """ + Return root entry name. Should usually be 'Root Entry' or 'R' in most + implementations. + """ + return self.root.name + + def getproperties(self, filename, convert_time=False, no_conversion=None): + """ + Return properties described in substream. + + :param filename: path of stream in storage tree (see openstream for syntax) + :param convert_time: bool, if True timestamps will be converted to Python datetime + :param no_conversion: None or list of int, timestamps not to be converted + (for example total editing time is not a real timestamp) + + :returns: a dictionary of values indexed by id (integer) + """ + # REFERENCE: [MS-OLEPS] https://msdn.microsoft.com/en-us/library/dd942421.aspx + # make sure no_conversion is a list, just to simplify code below: + if no_conversion is None: + no_conversion = [] + # stream path as a string to report exceptions: + streampath = filename + if not isinstance(streampath, str): + streampath = '/'.join(streampath) + + fp = self.openstream(filename) + + data = {} + + try: + # header + s = fp.read(28) + clsid = _clsid(s[8:24]) + + # format id + s = fp.read(20) + fmtid = _clsid(s[:16]) + fp.seek(i32(s, 16)) + + # get section + s = b"****" + fp.read(i32(fp.read(4))-4) + # number of properties: + num_props = i32(s, 4) + except BaseException as exc: + # catch exception while parsing property header, and only raise + # a DEFECT_INCORRECT then return an empty dict, because this is not + # a fatal error when parsing the whole file + msg = 'Error while parsing properties header in stream %s: %s' % ( + repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + return data + + for i in range(num_props): + try: + id = 0 # just in case of an exception + id = i32(s, 8+i*8) + offset = i32(s, 12+i*8) + type = i32(s, offset) + + debug('property id=%d: type=%d offset=%X' % (id, type, offset)) + + # test for common types first (should perhaps use + # a dictionary instead?) + + if type == VT_I2: # 16-bit signed integer + value = i16(s, offset+4) + if value >= 32768: + value = value - 65536 + elif type == VT_UI2: # 2-byte unsigned integer + value = i16(s, offset+4) + elif type in (VT_I4, VT_INT, VT_ERROR): + # VT_I4: 32-bit signed integer + # VT_ERROR: HRESULT, similar to 32-bit signed integer, + # see http://msdn.microsoft.com/en-us/library/cc230330.aspx + value = i32(s, offset+4) + elif type in (VT_UI4, VT_UINT): # 4-byte unsigned integer + value = i32(s, offset+4) # FIXME + elif type in (VT_BSTR, VT_LPSTR): + # CodePageString, see http://msdn.microsoft.com/en-us/library/dd942354.aspx + # size is a 32 bits integer, including the null terminator, and + # possibly trailing or embedded null chars + #TODO: if codepage is unicode, the string should be converted as such + count = i32(s, offset+4) + value = s[offset+8:offset+8+count-1] + # remove all null chars: + value = value.replace(b'\x00', b'') + elif type == VT_BLOB: + # binary large object (BLOB) + # see http://msdn.microsoft.com/en-us/library/dd942282.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_LPWSTR: + # UnicodeString + # see http://msdn.microsoft.com/en-us/library/dd942313.aspx + # "the string should NOT contain embedded or additional trailing + # null characters." + count = i32(s, offset+4) + value = self._decode_utf16_str(s[offset+8:offset+8+count*2]) + elif type == VT_FILETIME: + value = long(i32(s, offset+4)) + (long(i32(s, offset+8)) << 32) + # FILETIME is a 64-bit int: "number of 100ns periods + # since Jan 1,1601". + if convert_time and id not in no_conversion: + debug('Converting property #%d to python datetime, value=%d=%fs' + % (id, value, float(value) / 10000000)) + # convert FILETIME to Python datetime.datetime + # inspired from http://code.activestate.com/recipes/511425-filetime-to-datetime/ + _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) + debug('timedelta days=%d' % (value//(10*1000000*3600*24))) + value = _FILETIME_null_date + datetime.timedelta(microseconds=value//10) + else: + # legacy code kept for backward compatibility: returns a + # number of seconds since Jan 1,1601 + value = value // 10000000 # seconds + elif type == VT_UI1: # 1-byte unsigned integer + value = i8(s[offset+4]) + elif type == VT_CLSID: + value = _clsid(s[offset+4:offset+20]) + elif type == VT_CF: + # PropertyIdentifier or ClipboardData?? + # see http://msdn.microsoft.com/en-us/library/dd941945.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_BOOL: + # VARIANT_BOOL, 16 bits bool, 0x0000=Fals, 0xFFFF=True + # see http://msdn.microsoft.com/en-us/library/cc237864.aspx + value = bool(i16(s, offset+4)) + else: + value = None # everything else yields "None" + debug('property id=%d: type=%d not implemented in parser yet' % (id, type)) + + # missing: VT_EMPTY, VT_NULL, VT_R4, VT_R8, VT_CY, VT_DATE, + # VT_DECIMAL, VT_I1, VT_I8, VT_UI8, + # see http://msdn.microsoft.com/en-us/library/dd942033.aspx + + # FIXME: add support for VT_VECTOR + # VT_VECTOR is a 32 uint giving the number of items, followed by + # the items in sequence. The VT_VECTOR value is combined with the + # type of items, e.g. VT_VECTOR|VT_BSTR + # see http://msdn.microsoft.com/en-us/library/dd942011.aspx + + # print("%08x" % id, repr(value), end=" ") + # print("(%s)" % VT[i32(s, offset) & 0xFFF]) + + data[id] = value + except BaseException as exc: + # catch exception while parsing each property, and only raise + # a DEFECT_INCORRECT, because parsing can go on + msg = 'Error while parsing property id %d in stream %s: %s' % ( + id, repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + + return data + + def get_metadata(self): + """ + Parse standard properties streams, return an OleMetadata object + containing all the available metadata. + (also stored in the metadata attribute of the OleFileIO object) + + new in version 0.25 + """ + self.metadata = OleMetadata() + self.metadata.parse_properties(self) + return self.metadata + +# +# -------------------------------------------------------------------- +# This script can be used to dump the directory of any OLE2 structured +# storage file. + +if __name__ == "__main__": + + # [PL] display quick usage info if launched from command-line + if len(sys.argv) <= 1: + print('olefile version %s %s - %s' % (__version__, __date__, __author__)) + print( +""" +Launched from the command line, this script parses OLE files and prints info. + +Usage: olefile.py [-d] [-c] [file2 ...] + +Options: +-d : debug mode (displays a lot of debug information, for developers only) +-c : check all streams (for debugging purposes) + +For more information, see http://www.decalage.info/olefile +""") + sys.exit() + + check_streams = False + for filename in sys.argv[1:]: + # try: + # OPTIONS: + if filename == '-d': + # option to switch debug mode on: + set_debug_mode(True) + continue + if filename == '-c': + # option to switch check streams mode on: + check_streams = True + continue + + ole = OleFileIO(filename)#, raise_defects=DEFECT_INCORRECT) + print("-" * 68) + print(filename) + print("-" * 68) + ole.dumpdirectory() + for streamname in ole.listdir(): + if streamname[-1][0] == "\005": + print(streamname, ": properties") + props = ole.getproperties(streamname, convert_time=True) + props = sorted(props.items()) + for k, v in props: + # [PL]: avoid to display too large or binary values: + if isinstance(v, (basestring, bytes)): + if len(v) > 50: + v = v[:50] + if isinstance(v, bytes): + # quick and dirty binary check: + for c in (1, 2, 3, 4, 5, 6, 7, 11, 12, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31): + if c in bytearray(v): + v = '(binary data)' + break + print(" ", k, v) + + if check_streams: + # Read all streams to check if there are errors: + print('\nChecking streams...') + for streamname in ole.listdir(): + # print name using repr() to convert binary chars to \xNN: + print('-', repr('/'.join(streamname)), '-', end=' ') + st_type = ole.get_type(streamname) + if st_type == STGTY_STREAM: + print('size %d' % ole.get_size(streamname)) + # just try to read stream in memory: + ole.openstream(streamname) + else: + print('NOT a stream : type=%d' % st_type) + print() + +# for streamname in ole.listdir(): +# # print name using repr() to convert binary chars to \xNN: +# print('-', repr('/'.join(streamname)),'-', end=' ') +# print(ole.getmtime(streamname)) +# print() + + print('Modification/Creation times of all directory entries:') + for entry in ole.direntries: + if entry is not None: + print('- %s: mtime=%s ctime=%s' % (entry.name, + entry.getmtime(), entry.getctime())) + print() + + # parse and display metadata: + meta = ole.get_metadata() + meta.dump() + print() + # [PL] Test a few new methods: + root = ole.get_rootentry_name() + print('Root entry name: "%s"' % root) + if ole.exists('worddocument'): + print("This is a Word document.") + print("type of stream 'WordDocument':", ole.get_type('worddocument')) + print("size :", ole.get_size('worddocument')) + if ole.exists('macros/vba'): + print("This document may contain VBA macros.") + + # print parsing issues: + print('\nNon-fatal issues raised during parsing:') + if ole.parsing_issues: + for exctype, msg in ole.parsing_issues: + print('- %s: %s' % (exctype.__name__, msg)) + else: + print('None') +## except IOError as v: +## print("***", "cannot read", file, "-", v) + +# this code was developed while listening to The Wedding Present "Sea Monsters" diff --git a/server/www/packages/packages-darwin/x64/PIL/PSDraw.py b/server/www/packages/packages-darwin/x64/PIL/PSDraw.py new file mode 100644 index 0000000..d4e7b18 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PSDraw.py @@ -0,0 +1,235 @@ +# +# The Python Imaging Library +# $Id$ +# +# simple postscript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import EpsImagePlugin +import sys + +## +# Simple Postscript graphics interface. + + +class PSDraw(object): + """ + Sets up printing to the given file. If **file** is omitted, + :py:attr:`sys.stdout` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + fp = sys.stdout + self.fp = fp + + def _fp_write(self, to_write): + if bytes is str or self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, 'UTF-8')) + + def begin_document(self, id=None): + """Set up printing of a document. (Write Postscript DSC header.)""" + # FIXME: incomplete + self._fp_write("%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n") + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write Postscript DSC footer.)""" + self._fp_write("%%EndDocument\n" + "restore showpage\n" + "%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A Postscript font name + :param size: Size in points. + """ + if font not in self.isofont: + # reencode font + self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % + (font, font)) + self.isofont[font] = 1 + # rough + self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + Postscript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + xy = xy0 + xy1 + self._fp_write("%d %d %d %d Vl\n" % xy) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A 4-tuple of integers whose order and function is currently + undocumented. + + Hint: the tuple is passed into this format string: + + .. code-block:: python + + %d %d M %d %d 0 Vr\n + """ + self._fp_write("%d %d M %d %d 0 Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = "\\(".join(text.split("(")) + text = "\\)".join(text.split(")")) + xy = xy + (text,) + self._fp_write("%d %d M (%s) S\n" % xy) + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = float(im.size[0] * 72) / dpi + y = float(im.size[1] * 72) / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self._fp_write("%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, None, 0) + self._fp_write("\ngrestore\n") + +# -------------------------------------------------------------------- +# Postscript driver + +# +# EDROFF.PS -- Postscript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +EDROFF_PS = """\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- Postscript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = """\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup neg 0 exch rlineto + exch neg 0 rlineto + 0 exch rlineto + 100 div setgray fill 0 setgray } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = """\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/server/www/packages/packages-darwin/x64/PIL/PaletteFile.py b/server/www/packages/packages-darwin/x64/PIL/PaletteFile.py new file mode 100644 index 0000000..ef50fee --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PaletteFile.py @@ -0,0 +1,55 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL._binary import o8 + + +## +# File handler for Teragon-style palette files. + +class PaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[0:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/server/www/packages/packages-darwin/x64/PIL/PalmImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..4f415ff --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PalmImagePlugin.py @@ -0,0 +1,241 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from PIL import Image, ImageFile, _binary + +__version__ = "1.0" + +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues),)) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for i in range(len(_Palm8BitColormapValues)): + palettedata = palettedata + _Palm8BitColormapValues[i] + for i in range(256 - len(_Palm8BitColormapValues)): + palettedata = palettedata + (0, 0, 0) + image.putpalette(palettedata) + return image + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = { + "custom-colormap": 0x4000, + "is-compressed": 0x8000, + "has-transparent": 0x2000, + } + +_COMPRESSION_TYPES = { + "none": 0xFF, + "rle": 0x01, + "scanline": 0x00, + } + +o8 = _binary.o8 +o16b = _binary.o16be + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + +def _save(im, fp, filename, check=0): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif (im.mode == "L" and + "bpp" in im.encoderinfo and + im.encoderinfo["bpp"] in (1, 2, 4)): + + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): + + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise IOError("cannot write mode %s as Palm" % im.mode) + + if check: + return check + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == 'RGB': + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2])) + elif colormapmode == 'RGBA': + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2])) + + # now convert data to raw form + ImageFile._save( + im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/server/www/packages/packages-darwin/x64/PIL/PcdImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..b53635a --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PcdImagePlugin.py @@ -0,0 +1,59 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +i8 = _binary.i8 + + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = i8(s[1538]) & 3 + if orientation == 1: + self.tile_post_rotate = 90 # hack + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self.size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/server/www/packages/packages-darwin/x64/PIL/PcfFontFile.py b/server/www/packages/packages-darwin/x64/PIL/PcfFontFile.py new file mode 100644 index 0000000..c200690 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PcfFontFile.py @@ -0,0 +1,252 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile +from PIL import _binary + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = (1 << 0) +PCF_ACCELERATORS = (1 << 1) +PCF_METRICS = (1 << 2) +PCF_BITMAPS = (1 << 3) +PCF_INK_METRICS = (1 << 4) +PCF_BDF_ENCODINGS = (1 << 5) +PCF_SWIDTHS = (1 << 6) +PCF_GLYPH_NAMES = (1 << 7) +PCF_BDF_ACCELERATORS = (1 << 8) + +BYTES_PER_ROW = [ + lambda bits: ((bits+7) >> 3), + lambda bits: ((bits+15) >> 3) & ~1, + lambda bits: ((bits+31) >> 3) & ~3, + lambda bits: ((bits+63) >> 3) & ~7, +] + +i8 = _binary.i8 +l16 = _binary.i16le +l32 = _binary.i32le +b16 = _binary.i16be +b32 = _binary.i32be + + +def sz(s, o): + return s[o:s.index(b"\0", o)] + + +## +# Font file plugin for the X11 PCF format. + +class PcfFontFile(FontFile.FontFile): + + name = "name" + + def __init__(self, fp): + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + FontFile.FontFile.__init__(self) + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch in range(256): + ix = encoding[ch] + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), 1) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xff00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, 0) + ) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, attributes) + ) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise IOError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmapSizes = [] + for i in range(4): + bitmapSizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmapSizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i+1] + bitmaps.append( + Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)) + ) + + return bitmaps + + def _load_encoding(self): + + # map character code to bitmap index + encoding = [None] * 256 + + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) + firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) + + default = i16(fp.read(2)) + + nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) + + for i in range(nencoding): + encodingOffset = i16(fp.read(2)) + if encodingOffset != 0xFFFF: + try: + encoding[i+firstCol] = encodingOffset + except IndexError: + break # only load ISO-8859-1 glyphs + + return encoding diff --git a/server/www/packages/packages-darwin/x64/PIL/PcxImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..9440d53 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PcxImagePlugin.py @@ -0,0 +1,187 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +from PIL import Image, ImageFile, ImagePalette, _binary + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 + +__version__ = "0.6" + + +def _accept(prefix): + return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = i8(s[1]) + bits = i8(s[3]) + planes = i8(s[65]) + stride = i16(s, 66) + logger.debug("PCX version %s, bits %s, planes %s, stride %s", + version, bits, planes, stride) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, 2) + s = self.fp.read(769) + if len(s) == 769 and i8(s[0]) == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i*3+1:i*3+4] != o8(i)*3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise IOError("unknown PCX mode") + + self.mode = mode + self.size = bbox[2]-bbox[0], bbox[3]-bbox[1] + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + +# -------------------------------------------------------------------- +# save PCX files + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + +o16 = _binary.o16le + + +def _save(im, fp, filename, check=0): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as PCX" % im.mode) + + if check: + return check + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], bits, stride) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) + + o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) + + o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) + + o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) + + b"\0"*54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, + (rawmode, bits*planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i)*3) + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") diff --git a/server/www/packages/packages-darwin/x64/PIL/PdfImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..7decf0e --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PdfImagePlugin.py @@ -0,0 +1,258 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +from PIL import Image, ImageFile +from PIL._binary import i8 +import io + +__version__ = "0.4" + + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + +def _obj(fp, obj, **dict): + fp.write("%d 0 obj\n" % obj) + if dict: + fp.write("<<\n") + for k, v in dict.items(): + if v is not None: + fp.write("/%s %s\n" % (k, v)) + fp.write(">>\n") + + +def _endobj(fp): + fp.write("endobj\n") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + +def _save(im, fp, filename, save_all=False): + resolution = im.encoderinfo.get("resolution", 72.0) + + # + # make sure image data is available + im.load() + + xref = [0] + + class TextWriter(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def write(self, value): + self.fp.write(value.encode('latin-1')) + + fp = TextWriter(fp) + + fp.write("%PDF-1.2\n") + fp.write("% created by PIL PDF driver " + __version__ + "\n") + + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) + # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports + # Flatedecode (zip compression). + + bits = 8 + params = None + + if im.mode == "1": + filter = "/ASCIIHexDecode" + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "/DCTDecode" + # params = "<< /Predictor 15 /Columns %d >>" % (width-2) + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + elif im.mode == "P": + filter = "/ASCIIHexDecode" + colorspace = "[ /Indexed /DeviceRGB 255 <" + palette = im.im.getpalette("RGB") + for i in range(256): + r = i8(palette[i*3]) + g = i8(palette[i*3+1]) + b = i8(palette[i*3+2]) + colorspace += "%02x%02x%02x " % (r, g, b) + colorspace += "> ]" + procset = "/ImageI" # indexed color + elif im.mode == "RGB": + filter = "/DCTDecode" + colorspace = "/DeviceRGB" + procset = "/ImageC" # color images + elif im.mode == "CMYK": + filter = "/DCTDecode" + colorspace = "/DeviceCMYK" + procset = "/ImageC" # color images + else: + raise ValueError("cannot save mode %s" % im.mode) + + # + # catalogue + + xref.append(fp.tell()) + _obj( + fp, 1, + Type="/Catalog", + Pages="2 0 R") + _endobj(fp) + + # + # pages + numberOfPages = 1 + if save_all: + try: + numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. It is a single frame image + pass + pages = [str(pageNumber*3+4)+" 0 R" + for pageNumber in range(0, numberOfPages)] + + xref.append(fp.tell()) + _obj( + fp, 2, + Type="/Pages", + Count=len(pages), + Kids="["+"\n".join(pages)+"]") + _endobj(fp) + + for pageNumber in range(0, numberOfPages): + im.seek(pageNumber) + + # + # image + + op = io.BytesIO() + + if filter == "/ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", (len(data), 1), None) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "/FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) + else: + raise ValueError("unsupported PDF filter (%s)" % filter) + + # + # Get image characteristics + + width, height = im.size + + xref.append(fp.tell()) + _obj( + fp, pageNumber*3+3, + Type="/XObject", + Subtype="/Image", + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Length=len(op.getvalue()), + Filter=filter, + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace) + + fp.write("stream\n") + fp.fp.write(op.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # page + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+4) + fp.write( + "<<\n/Type /Page\n/Parent 2 0 R\n" + "/Resources <<\n/ProcSet [ /PDF %s ]\n" + "/XObject << /image %d 0 R >>\n>>\n" + "/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % ( + procset, + pageNumber*3+3, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + pageNumber*3+5)) + _endobj(fp) + + # + # page contents + + op = TextWriter(io.BytesIO()) + + op.write( + "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution))) + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue())) + + fp.write("stream\n") + fp.fp.write(op.fp.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # trailer + startxref = fp.tell() + fp.write("xref\n0 %d\n0000000000 65535 f \n" % len(xref)) + for x in xref[1:]: + fp.write("%010d 00000 n \n" % x) + fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref)) + fp.write("startxref\n%d\n%%%%EOF\n" % startxref) + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/server/www/packages/packages-darwin/x64/PIL/PixarImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..db2ee55 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PixarImagePlugin.py @@ -0,0 +1,68 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +# +# helpers + +i16 = _binary.i16le + + +## +# Image plugin for PIXAR raster images. + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label (FIXME: add "_accept" hook) + s = self.fp.read(4) + if s != b"\200\350\000\000": + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self.size = i16(s[418:420]), i16(s[416:418]) + + # get channel/depth descriptions + mode = i16(s[424:426]), i16(s[426:428]) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile) + +# +# FIXME: what's the standard extension? diff --git a/server/www/packages/packages-darwin/x64/PIL/PngImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PngImagePlugin.py new file mode 100644 index 0000000..d677882 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PngImagePlugin.py @@ -0,0 +1,809 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +import re +import zlib + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.9" + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + +is_cid = re.compile(b"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b'^\xff+\x00\xff*$') +_null_palette = re.compile(b'^\x00*$') + +# Maximum decompressed size for a iTXt or zTXt chunk. +# Eliminates decompression bombs where compressed chunks can expand 1000x +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +# Set the maximum total text chunk size. +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + +class ChunkStream(object): + + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + if not hasattr(Image.core, "crc32"): + self.crc = self.crc_skip + + def read(self): + "Fetch a new chunk. Returns header information." + + if self.queue: + cid, pos, length = self.queue[-1] + del self.queue[-1] + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + raise SyntaxError("broken PNG file (chunk %s)" % repr(cid)) + + return cid, pos, length + + def close(self): + self.queue = self.crc = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + "Call the appropriate chunk handler" + + logger.debug("STREAM %s %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) + + def crc(self, cid, data): + "Read and verify checksum" + + crc1 = Image.core.crc32(data, Image.core.crc32(cid)) + crc2 = i16(self.fp.read(2)), i16(self.fp.read(2)) + if crc1 != crc2: + raise SyntaxError("broken PNG file" + "(bad header checksum in %s)" % cid) + + def crc_skip(self, cid, data): + "Read checksum. Used if the C module is not present" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + cid, pos, length = self.read() + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + @staticmethod + def __new__(cls, text, lang, tkey): + """ + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo(object): + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + + """ + + self.chunks.append((cid, data)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + + zlib.compress(value)) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + + value) + + def add_text(self, key, value, zip=0): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, bool(zip)) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode('latin-1', 'strict') + except UnicodeError: + return self.add_itxt(key, value, zip=bool(zip)) + + if not isinstance(key, bytes): + key = key.encode('latin-1', 'strict') + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + +class PngStream(ChunkStream): + + def __init__(self, fp): + + ChunkStream.__init__(self, fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" % + self.text_memory) + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %s", s[:i]) + logger.debug("Compression method %s", i8(s[i])) + comp_method = i8(s[i]) + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in iCCP chunk" % + comp_method) + try: + icc_profile = _safe_zlib_decompress(s[i+2:]) + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + self.im_size = i32(s), i32(s[4:]) + try: + self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))] + except: + pass + if i8(s[12]): + self.im_info["interlace"] = 1 + if i8(s[11]): + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + elif _null_palette.match(s): + self.im_info["transparency"] = 0 + else: + self.im_info["transparency"] = s + elif self.im_mode == "L": + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:]) + return s + + def chunk_gAMA(self, pos, length): + + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + px, py = i32(s), i32(s[4:]) + unit = i8(s[8]) + if unit == 1: # meter + dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = i8(v[0]) + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in zTXt chunk" % + comp_method) + try: + v = _safe_zlib_decompress(v[1:]) + except zlib.error: + v = b"" + + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = i8(r[0]), i8(r[1]), r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except zlib.error: + return s + else: + return s + if bytes is not str: + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + +# -------------------------------------------------------------------- +# PNG reader + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if self.fp.read(8) != _MAGIC: + raise SyntaxError("not a PNG file") + + # + # Parse headers up to the first IDAT chunk + + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%s %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self.size = self.png.im_size + self.info = self.png.im_info + self.text = self.png.im_text # experimental + self.tile = self.png.im_tile + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + self.__idat = length # used by load_read() + + def verify(self): + "Verify PNG file" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + self.fp = None + + def load_prepare(self): + "internal: prepare to read PNG file" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + "internal: read more image data" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT"]: + self.png.push(cid, pos, length) + return b"" + + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + "internal: finished reading image data" + + self.png.close() + self.png = None + + +# -------------------------------------------------------------------- +# PNG writer + +o8 = _binary.o8 +o16 = _binary.o16be +o32 = _binary.o32be + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b'\x01\x00'), + "L;1": ("L;1", b'\x01\x00'), + "L;2": ("L;2", b'\x02\x00'), + "L;4": ("L;4", b'\x04\x00'), + "L": ("L", b'\x08\x00'), + "LA": ("LA", b'\x08\x04'), + "I": ("I;16B", b'\x10\x00'), + "P;1": ("P;1", b'\x01\x03'), + "P;2": ("P;2", b'\x02\x03'), + "P;4": ("P;4", b'\x04\x03'), + "P": ("P", b'\x08\x03'), + "RGB": ("RGB", b'\x08\x02'), + "RGBA": ("RGBA", b'\x08\x06'), +} + + +def putchunk(fp, cid, *data): + "Write a PNG chunk (including CRC field)" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + fp.write(o16(hi) + o16(lo)) + + +class _idat(object): + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +def _save(im, fp, filename, chunk=putchunk, check=0): + # save an image to disk (called by the save method) + + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = 1 << im.encoderinfo["bits"] + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1])//3, 256), 2) + else: + colors = 256 + + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + elif colors <= 16: + bits = 4 + else: + bits = 8 + if bits != 8: + mode = "%s;%d" % (mode, bits) + + # encoder options + if "dictionary" in im.encoderinfo: + dictionary = im.encoderinfo["dictionary"] + else: + dictionary = b"" + + im.encoderconfig = ("optimize" in im.encoderinfo, + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + dictionary) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError: + raise IOError("cannot write mode %s as PNG" % mode) + + if check: + return check + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk(fp, b"IHDR", + o32(im.size[0]), o32(im.size[1]), # 0: size + mode, # 8: depth/type + b'\0', # 10: compression + b'\0', # 11: filter category + b'\0') # 12: interlace flag + + if im.mode == "P": + palette_byte_number = (2 ** bits) * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b'\0' + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get('transparency', + im.info.get('transparency', None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = 2**bits + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b'\xFF' * transparency + b'\0' + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode == "L": + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise IOError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = 2**bits + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk(fp, b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b'\x01') + + info = im.encoderinfo.get("pnginfo") + if info: + for cid, data in info.chunks: + chunk(fp, cid, data) + + # ICC profile writing support -- 2008-06-06 Florian Hoech + if im.info.get("icc_profile"): + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(im.info["icc_profile"]) + chunk(fp, b"iCCP", data) + + ImageFile._save(im, _idat(fp, chunk), + [("zip", (0, 0)+im.size, 0, rawmode)]) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector(object): + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + crc = o16(hi) + o16(lo) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) + +Image.register_extension(PngImageFile.format, ".png") + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/server/www/packages/packages-darwin/x64/PIL/PpmImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..68073ca --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PpmImagePlugin.py @@ -0,0 +1,174 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +import string + +from PIL import Image, ImageFile + +__version__ = "0.2" + +# +# -------------------------------------------------------------------- + +b_whitespace = string.whitespace +try: + import locale + locale_lang, locale_enc = locale.getlocale() + if locale_enc is None: + locale_lang, locale_enc = locale.getdefaultlocale() + b_whitespace = b_whitespace.decode(locale_enc) +except: + pass +b_whitespace = b_whitespace.encode('ascii', 'ignore') + +MODES = { + # standard + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK" +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _token(self, s=b""): + while True: # read until next whitespace + c = self.fp.read(1) + if not c or c in b_whitespace: + break + if c > b'\x79': + raise ValueError("Expected ASCII value, found binary") + s = s + c + if (len(s) > 9): + raise ValueError("Expected int, got > 9 digits") + return s + + def _open(self): + + # check magic + s = self.fp.read(1) + if s != b"P": + raise SyntaxError("not a PPM file") + mode = MODES[self._token(s)] + + if mode == "1": + self.mode = "1" + rawmode = "1;I" + else: + self.mode = rawmode = mode + + for ix in range(3): + while True: + while True: + s = self.fp.read(1) + if s not in b_whitespace: + break + if s == b"": + raise ValueError("File does not extend beyond magic number") + if s != b"#": + break + s = self.fp.readline() + s = int(self._token(s)) + if ix == 0: + xsize = s + elif ix == 1: + ysize = s + if mode == "1": + break + elif ix == 2: + # maxgrey + if s > 255: + if not mode == 'L': + raise ValueError("Too many colors for band: %s" % s) + if s < 2**16: + self.mode = 'I' + rawmode = 'I;16B' + else: + self.mode = 'I' + rawmode = 'I;32B' + + self.size = xsize, ysize + self.tile = [("raw", + (0, 0, xsize, ysize), + self.fp.tell(), + (rawmode, 0, 1))] + + # ALTERNATIVE: load via builtin debug function + # self.im = Image.core.open_ppm(self.filename) + # self.mode = self.im.mode + # self.size = self.im.size + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + if im.getextrema()[1] < 2**16: + rawmode, head = "I;16B", b"P5" + else: + rawmode, head = "I;32B", b"P5" + elif im.mode == "RGB": + rawmode, head = "RGB", b"P6" + elif im.mode == "RGBA": + rawmode, head = "RGB", b"P6" + else: + raise IOError("cannot write mode %s as PPM" % im.mode) + fp.write(head + ("\n%d %d\n" % im.size).encode('ascii')) + if head == b"P6": + fp.write(b"255\n") + if head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + elif rawmode == "I;16B": + fp.write(b"65535\n") + elif rawmode == "I;32B": + fp.write(b"2147483648\n") + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + +# +# -------------------------------------------------------------------- + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_extension(PpmImageFile.format, ".pbm") +Image.register_extension(PpmImageFile.format, ".pgm") +Image.register_extension(PpmImageFile.format, ".ppm") diff --git a/server/www/packages/packages-darwin/x64/PIL/PsdImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..d06e320 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PsdImagePlugin.py @@ -0,0 +1,312 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +__version__ = "0.4" + +from PIL import Image, ImageFile, ImagePalette, _binary + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3) +} + +# +# helpers + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + + +# --------------------------------------------------------------------. +# read PSD images + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if s[:4] != b"8BPS" or i16(s[4:]) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s[22:]) + psd_channels = i16(s[12:]) + psd_mode = i16(s[24:]) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise IOError("not enough channels") + + self.mode = mode + self.size = i32(s[18:]), i32(s[14:]) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + signature = read(4) + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if (len(data) & 1): + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self.layers = _layerinfo(self.fp) + self.fp.seek(end) + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 0 + + @property + def n_frames(self): + return len(self.layers) + + @property + def is_animated(self): + return len(self.layers) > 1 + + def seek(self, layer): + # seek to given layer (1..max) + if layer == self.frame: + return + try: + if layer <= 0: + raise IndexError + name, mode, bbox, tile = self.layers[layer-1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + return name, bbox + except IndexError: + raise EOFError("no such layer") + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.fill(self.mode, self.size, 0) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + +def _layerinfo(file): + # read layerinfo block + layers = [] + read = file.read + for i in range(abs(i16(read(2)))): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + info = [] + mode = [] + types = list(range(i16(read(2)))) + if len(types) > 4: + continue + + for i in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + size = i32(read(4)) + info.append((m, size)) + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + filler = read(12) + name = "" + size = i32(read(4)) + combined = 0 + if size: + length = i32(read(4)) + if length: + mask_y = i32(read(4)) + mask_x = i32(read(4)) + mask_h = i32(read(4)) - mask_y + mask_w = i32(read(4)) - mask_x + file.seek(length - 16, 1) + combined += length + 4 + + length = i32(read(4)) + if length: + file.seek(length, 1) + combined += length + 4 + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode('latin-1', 'replace') + combined += length + 1 + + file.seek(size - combined, 1) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(file, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize*ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append( + ("packbits", bbox, offset, layer) + ) + for y in range(ysize): + offset = offset + i16(bytecount[i:i+2]) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") diff --git a/server/www/packages/packages-darwin/x64/PIL/PyAccess.py b/server/www/packages/packages-darwin/x64/PIL/PyAccess.py new file mode 100644 index 0000000..faa868c --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/PyAccess.py @@ -0,0 +1,317 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +from __future__ import print_function + +import logging +import sys + +from cffi import FFI + + +logger = logging.getLogger(__name__) + + +defs = """ +struct Pixel_RGBA { + unsigned char r,g,b,a; +}; +struct Pixel_I16 { + unsigned char l,r; +}; +""" +ffi = FFI() +ffi.cdef(defs) + + +class PyAccess(object): + + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast('unsigned char **', vals['image8']) + self.image32 = ffi.cast('int **', vals['image32']) + self.image = ffi.cast('unsigned char **', vals['image']) + self.xsize = vals['xsize'] + self.ysize = vals['ysize'] + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + if self.readonly: + raise ValueError('Attempt to putpixel a read only image') + (x, y) = self.check_xy(xy) + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + + (x, y) = self.check_xy(xy) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError('pixel location out of range') + return xy + + +class _PyAccess32_2(PyAccess): + """ PA, LA, stored in first and last bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """ RGB and friends, stored in the first three bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + + +class _PyAccess32_4(PyAccess): + """ RGBA etc, all 4 bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """ 1, L, P, 8 bit images stored as uint8 """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """ I;16 access, native bitendian without conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('unsigned short **', self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """ I;16L access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """ I;16B access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except: + color = min(color[0], 65535) + + pixel.l = color >> 8 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """ Signed Int32 access, native endian """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """ I;32L/B access, with byteswapping conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new('int *', i) + chars = ffi.cast('unsigned char *', orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ + chars[1], chars[0] + return ffi.cast('int *', chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """ 32 bit float access """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('float **', self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = {'1': _PyAccess8, + 'L': _PyAccess8, + 'P': _PyAccess8, + 'LA': _PyAccess32_2, + 'PA': _PyAccess32_2, + 'RGB': _PyAccess32_3, + 'LAB': _PyAccess32_3, + 'HSV': _PyAccess32_3, + 'YCbCr': _PyAccess32_3, + 'RGBA': _PyAccess32_4, + 'RGBa': _PyAccess32_4, + 'RGBX': _PyAccess32_4, + 'CMYK': _PyAccess32_4, + 'F': _PyAccessF, + 'I': _PyAccessI32_N, + } + +if sys.byteorder == 'little': + mode_map['I;16'] = _PyAccessI16_N + mode_map['I;16L'] = _PyAccessI16_N + mode_map['I;16B'] = _PyAccessI16_B + + mode_map['I;32L'] = _PyAccessI32_N + mode_map['I;32B'] = _PyAccessI32_Swap +else: + mode_map['I;16'] = _PyAccessI16_L + mode_map['I;16L'] = _PyAccessI16_L + mode_map['I;16B'] = _PyAccessI16_N + + mode_map['I;32L'] = _PyAccessI32_Swap + mode_map['I;32B'] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/SgiImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..f890c7e --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/SgiImagePlugin.py @@ -0,0 +1,89 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# History: +# 1995-09-10 fl Created +# +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16be + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +## +# Image plugin for SGI images. + +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + s = self.fp.read(512) + if i16(s) != 474: + raise ValueError("Not an SGI image file") + + # relevant header entries + compression = i8(s[2]) + + # bytes, dimension, zsize + layout = i8(s[3]), i16(s[4:]), i16(s[10:]) + + # determine mode from bytes/zsize + if layout == (1, 2, 1) or layout == (1, 1, 1): + self.mode = "L" + elif layout == (1, 3, 3): + self.mode = "RGB" + elif layout == (1, 3, 4): + self.mode = "RGBA" + else: + raise ValueError("Unsupported SGI image mode") + + # size + self.size = i16(s[6:]), i16(s[8:]) + + # decoder info + if compression == 0: + offset = 512 + pagesize = self.size[0]*self.size[1]*layout[0] + self.tile = [] + for layer in self.mode: + self.tile.append( + ("raw", (0, 0)+self.size, offset, (layer, 0, -1))) + offset = offset + pagesize + elif compression == 1: + raise ValueError("SGI RLE encoding not supported") + +# +# registry + +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) + +Image.register_extension(SgiImageFile.format, ".bw") +Image.register_extension(SgiImageFile.format, ".rgb") +Image.register_extension(SgiImageFile.format, ".rgba") +Image.register_extension(SgiImageFile.format, ".sgi") + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/SpiderImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..d545789 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/SpiderImagePlugin.py @@ -0,0 +1,322 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# http://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# http://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# + +from __future__ import print_function + +from PIL import Image, ImageFile +import os +import struct +import sys + + +def isInt(f): + try: + i = int(f) + if f-i == 0: + return 1 + else: + return 0 + except ValueError: + return 0 + except OverflowError: + return 0 + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no.of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + # print "labrec = %d, labbyt = %d, lenbyt = %d" % (labrec,labbyt,lenbyt) + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + fp = open(filename, 'rb') + f = fp.read(92) # read 23 * 4 bytes + fp.close() + t = struct.unpack('>23f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack('<23f', f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack('>27f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack('<27f', f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error: + raise SyntaxError("not a valid Spider file") + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self.size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [ + ("raw", (0, 0) + self.size, offset, + (self.rawmode, 0, 1))] + self.__fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + return + if frame >= self._nimages: + raise EOFError("attempt to seek past end of file") + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self.__fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum-minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + " create a list of Image.images for use in montage " + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print("unable to find %s" % img) + continue + try: + im = Image.open(img).convert2byte() + except: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info['filename'] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = 1024 / lenbyt + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + hdr = [] + nvalues = int(labbyt / 4) + for i in range(nvalues): + hdr.append(0.0) + + if len(hdr) < 23: + return [] + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + hdrstr = [] + for v in hdr: + hdrstr.append(struct.pack('f', v)) + return hdrstr + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert('F') + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise IOError("Error creating Spider header") + + # write the SPIDER header + try: + fp = open(filename, 'wb') + except: + raise IOError("Unable to open %s for writing" % filename) + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + fp.close() + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension("SPIDER", ext) + _save(im, fp, filename) + +# -------------------------------------------------------------------- + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if not sys.argv[1:]: + print("Syntax: python SpiderImagePlugin.py Spiderimage [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + outfile = "" + if len(sys.argv[1:]) > 1: + outfile = sys.argv[2] + + im = Image.open(filename) + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=' ') + print(im.getextrema()) + + if outfile != "": + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + "saving a flipped version of %s as %s " % + (os.path.basename(filename), outfile)) + im.save(outfile, "SPIDER") diff --git a/server/www/packages/packages-darwin/x64/PIL/SunImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/SunImagePlugin.py new file mode 100644 index 0000000..af63144 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/SunImagePlugin.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + +i32 = _binary.i32be + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + + +## +# Image plugin for Sun raster files. + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # HEAD + s = self.fp.read(32) + if i32(s) != 0x59a66a95: + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self.size = i32(s[4:8]), i32(s[8:12]) + + depth = i32(s[12:16]) + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + self.mode, rawmode = "RGB", "BGR" + else: + raise SyntaxError("unsupported mode") + + compression = i32(s[20:24]) + + if i32(s[24:28]) != 0: + length = i32(s[28:32]) + offset = offset + length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(length)) + if self.mode == "L": + self.mode = rawmode = "P" + + stride = (((self.size[0] * depth + 7) // 8) + 3) & (~3) + + if compression == 1: + self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] + elif compression == 2: + self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] + +# +# registry + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/server/www/packages/packages-darwin/x64/PIL/TarIO.py b/server/www/packages/packages-darwin/x64/PIL/TarIO.py new file mode 100644 index 0000000..4e5115b --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/TarIO.py @@ -0,0 +1,57 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +from PIL import ContainerIO + + +## +# A file object that provides read access to a given member of a TAR +# file. + +class TarIO(ContainerIO.ContainerIO): + + ## + # Create file object. + # + # @param tarfile Name of TAR file. + # @param file Name of member file. + + def __init__(self, tarfile, file): + + fh = open(tarfile, "rb") + + while True: + + s = fh.read(512) + if len(s) != 512: + raise IOError("unexpected end of tar file") + + name = s[:100].decode('utf-8') + i = name.find('\0') + if i == 0: + raise IOError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + fh.seek((size + 511) & (~511), 1) + + # Open region + ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size) diff --git a/server/www/packages/packages-darwin/x64/PIL/TgaImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..a75ce29 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/TgaImagePlugin.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + + +# +# -------------------------------------------------------------------- +# Read RGA file + +i8 = _binary.i8 +i16 = _binary.i16le + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + idlen = i8(s[0]) + + colormaptype = i8(s[1]) + imagetype = i8(s[2]) + + depth = i8(s[16]) + + flags = i8(s[17]) + + self.size = i16(s[12:]), i16(s[14:]) + + # validate header fields + if colormaptype not in (0, 1) or\ + self.size[0] <= 0 or self.size[1] <= 0 or\ + depth not in (1, 8, 16, 24, 32): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + if orientation == 0x20: + orientation = 1 + elif not orientation: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if idlen: + self.info["id_section"] = self.fp.read(idlen) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;16", b"\0"*2*start + self.fp.read(2*size)) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0"*3*start + self.fp.read(3*size)) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0"*4*start + self.fp.read(4*size)) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [("tga_rle", (0, 0)+self.size, + self.fp.tell(), (rawmode, orientation, depth))] + else: + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), (rawmode, 0, orientation))] + except KeyError: + pass # cannot decode + +# +# -------------------------------------------------------------------- +# Write TGA file + +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename, check=0): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TGA" % im.mode) + + if check: + return check + + if colormaptype: + colormapfirst, colormaplength, colormapentry = 0, 256, 24 + else: + colormapfirst, colormaplength, colormapentry = 0, 0, 0 + + if im.mode == "RGBA": + flags = 8 + else: + flags = 0 + + orientation = im.info.get("orientation", -1) + if orientation > 0: + flags = flags | 0x20 + + fp.write(b"\000" + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags)) + + if colormaptype: + fp.write(im.im.getpalette("RGB", "BGR")) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extension(TgaImageFile.format, ".tga") diff --git a/server/www/packages/packages-darwin/x64/PIL/TiffImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..096be6f --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/TiffImagePlugin.py @@ -0,0 +1,1476 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division, print_function + +from PIL import Image, ImageFile +from PIL import ImagePalette +from PIL import _binary +from PIL import TiffTags + +import collections +from fractions import Fraction +from numbers import Number, Rational + +import io +import itertools +import os +import struct +import sys +import warnings + +from .TiffTags import TYPES, TagInfo + + +__version__ = "1.3.5" +DEBUG = False # Needs to be merged with the new logging approach. + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +i8 = _binary.i8 +o8 = _binary.o8 + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", +} + +COMPRESSION_INFO_REV = dict([(v, k) for (k, v) in COMPRESSION_INFO.items()]) + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + # ? + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I;16S", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I;16BS", "I;16BS"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I;32BS", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + +class IFDRational(Rational): + """ Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ('_numerator', '_denominator', '_val') + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + self._denominator = denominator + self._numerator = value + self._val = float(1) + + if type(value) == Fraction: + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value + + if type(value) == IFDRational: + self._denominator = value.denominator + self._numerator = value.numerator + self._val = value._val + return + + if denominator == 0: + self._val = float('nan') + return + + + elif denominator == 1: + if sys.hexversion < 0x2070000 and type(value) == float: + # python 2.6 is different. + self._val = Fraction.from_float(value) + else: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(a): + return a._numerator + + @property + def denominator(a): + return a._denominator + + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return (self.numerator, self.denominator) + + f = self._val.limit_denominator(max_denominator) + return (f.numerator, f.denominator) + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self,other): + return self._val == other + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val,op)(*args) + return delegate + + """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', + 'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero', + 'ceil', 'floor', 'round'] + print "\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a) + """ + + __add__ = _delegate('__add__') + __radd__ = _delegate('__radd__') + __sub__ = _delegate('__sub__') + __rsub__ = _delegate('__rsub__') + __div__ = _delegate('__div__') + __rdiv__ = _delegate('__rdiv__') + __mul__ = _delegate('__mul__') + __rmul__ = _delegate('__rmul__') + __truediv__ = _delegate('__truediv__') + __rtruediv__ = _delegate('__rtruediv__') + __floordiv__ = _delegate('__floordiv__') + __rfloordiv__ = _delegate('__rfloordiv__') + __mod__ = _delegate('__mod__') + __rmod__ = _delegate('__rmod__') + __pow__ = _delegate('__pow__') + __rpow__ = _delegate('__rpow__') + __pos__ = _delegate('__pos__') + __neg__ = _delegate('__neg__') + __abs__ = _delegate('__abs__') + __trunc__ = _delegate('__trunc__') + __lt__ = _delegate('__lt__') + __gt__ = _delegate('__gt__') + __le__ = _delegate('__le__') + __ge__ = _delegate('__ge__') + __nonzero__ = _delegate('__nonzero__') + __ceil__ = _delegate('__ceil__') + __floor__ = _delegate('__floor__') + __round__ = _delegate('__round__') + + + +class ImageFileDirectory_v2(collections.MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + `~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError("not a TIFF file (header %r not valid)" % ifh) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.reset() + self.next, = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def as_dict(self): + """Return a dictionary of the image's tags. + + use `dict(ifd)` instead. + + .. deprecated:: 3.0.0 + """ + # FIXME Deprecate: use dict(self) + return dict(self) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return dict((TiffTags.lookup(code).name, value) + for code, value in self.items()) + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = val, + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + if bytes is str: + def has_key(self, tag): + return tag in self + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + if bytes is str: + basetypes += unicode, + + info = TiffTags.lookup(tag) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = 7 + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = 5 + elif all(isinstance(v, int) for v in values): + if all(v < 2 ** 16 for v in values): + self.tagtype[tag] = 3 + else: + self.tagtype[tag] = 4 + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = 12 + else: + if bytes is str: + # Never treat data as binary by default on Python 2. + self.tagtype[tag] = 2 + else: + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = 2 + + if self.tagtype[tag] == 7 and bytes is not str: + values = [value.encode("ascii", 'replace') if isinstance(value, str) else value + for value in values] + + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + if info.length == 1: + if legacy_api and self.tagtype[tag] in [5, 10]: + values = values, + dest[tag], = values + else: + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from PIL.TiffTags import TYPES + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func + return func + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func + return func + return decorator + + def _register_basic(idx_fmt_name): + from PIL.TiffTags import TYPES + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( + self._unpack("{0}{1}".format(len(data) // size, fmt), data)) + _write_dispatch[idx] = lambda self, *values: ( + b"".join(self._pack(fmt, value) for value in values)) + + list(map(_register_basic, + [(3, "H", "short"), (4, "L", "long"), + (6, "b", "signed byte"), (8, "h", "signed short"), + (9, "l", "signed long"), (11, "f", "float"), (12, "d", "double")])) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return (data if legacy_api else + tuple(map(ord, data) if bytes is str else data)) + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if sys.version_info[0] == 2: + value = value.decode('ascii', 'replace') + return b"" + value.encode('ascii', 'replace') + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{0}L".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) + for frac in values) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{0}l".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) + for frac in values) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise IOError("Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % + (size, len(ret))) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("tag: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + if DEBUG: + print("- unsupported type", typ) + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + offset, = self._unpack("L", data) + if DEBUG: + print("Tag Location: %s - Data Location: %s" % + (here, offset), end=" ") + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn("Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d. " + "Skipping tag %s" % (size, len(data), tag)) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + if DEBUG: + if size > 32: + print("- value: " % size) + else: + print("- value:", self[tag]) + + self.next, = self._unpack("L", self._ensure_read(fp, 4)) + except IOError as msg: + warnings.warn(str(msg)) + return + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + # FIXME What about tagdata? + fp.write(self._pack("H", len(self._tags_v2))) + + entries = [] + offset = fp.tell() + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + if DEBUG: + print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("save: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + if len(data) >= 16: + print("- value: " % len(data)) + else: + print("- value:", values) + + # count is sum of lengths for string and arbitrary data + count = len(data) if typ in [2, 7] else len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError( + "multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + if DEBUG > 1: + print(tag, typ, count, repr(value), repr(data)) + fp.write(self._pack("HHL4s", tag, typ, count, value)) + + # -- overwrite here for multi-page -- + fp.write(b"\0\0\0\0") # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + fp.write(data) + if len(data) & 1: + fp.write(b"\0") + + return offset + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print ifd[key] + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + `~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + def __init__(self, *args, **kwargs): + ImageFileDirectory_v2.__init__(self, *args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + @classmethod + def from_v2(cls, original): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = val, + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + + def _open(self): + "Open the first image in a TIFF file" + + # Header + ifh = self.fp.read(8) + + # image file directory (tag dictionary) + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy tag/ifd entries will be filled in later + self.tag = self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + self._is_animated = None + + if DEBUG: + print("*** TiffImageFile._open ***") + print("- __first:", self.__first) + print("- ifh: ", ifh) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + "Select a given frame as current image" + self._seek(max(frame, 0)) # Questionable backwards compatibility. + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + if DEBUG: + print("Seeking to frame %s, on frame %s, " + "__next %s, location: %s" % + (frame, self.__frame, self.__next, self.fp.tell())) + # reset python3 buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + if DEBUG: + print("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + "Return the current frame number" + return self.__frame + + def _decoder(self, rawmode, layer, tile=None): + "Setup decoder contexts" + + args = None + if rawmode == "RGB" and self._planar_configuration == 2: + rawmode = rawmode[layer] + compression = self._compression + if compression == "raw": + args = (rawmode, 0, 1) + elif compression == "jpeg": + args = rawmode, "" + if JPEGTABLES in self.tag_v2: + # Hack to handle abbreviated JPEG headers + # FIXME This will fail with more than one value + self.tile_prefix, = self.tag_v2[JPEGTABLES] + elif compression == "packbits": + args = rawmode + elif compression == "tiff_lzw": + args = rawmode + if PREDICTOR in self.tag_v2: + # Section 14: Differencing Predictor + self.decoderconfig = (self.tag_v2[PREDICTOR],) + + if ICCPROFILE in self.tag_v2: + self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + + return args + + def _load_libtiff(self): + """ Overload method triggered when we detect a compressed tiff + Calls out to libtiff """ + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.load_prepare() + + if not len(self.tile) == 1: + raise IOError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = self.tile[0][3] + (self.tag_v2.offset,) + decoder = Image._getdecoder(self.mode, 'libtiff', args, + self.decoderconfig) + try: + decoder.setimage(self.im, extents) + except ValueError: + raise IOError("Couldn't set the image") + + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an IOError if there's no underlying fp. Easier to + # deal with here by reordering. + if DEBUG: + print("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif hasattr(self.fp, "fileno"): + # we've got a actual file on disk, pass in the fp. + if DEBUG: + print("have fileno, calling fileno version of the decoder.") + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + if DEBUG: + print("don't have fileno or getvalue. just reading") + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + # libtiff closed the fp in a, we need to close self.fp, if possible + if hasattr(self.fp, 'close'): + if not self.__next: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise IOError(err) + + self.load_end() + + return Image.Image.load(self) + + def _setup(self): + "Setup this image object based on current tags" + + if 0xBC01 in self.tag_v2: + raise IOError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + fillorder = self.tag_v2.get(FILLORDER, 1) + + if DEBUG: + print("*** Summary ***") + print("- compression:", self._compression) + print("- photometric_interpretation:", photo) + print("- planar_configuration:", self._planar_configuration) + print("- fill_order:", fillorder) + + # size + xsize = self.tag_v2.get(IMAGEWIDTH) + ysize = self.tag_v2.get(IMAGELENGTH) + self.size = xsize, ysize + + if DEBUG: + print("- size:", self.size) + + format = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if len(format) > 1 and max(format) == min(format) == 1: + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + format = (1,) + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, photo, format, fillorder, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError: + if DEBUG: + print("- unsupported format") + raise SyntaxError("unknown pixel mode") + + if DEBUG: + print("- raw mode:", rawmode) + print("- pil mode:", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION,1) + yres = self.tag_v2.get(Y_RESOLUTION,1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT, 1) + if resunit == 2: # dots per inch + self.info["dpi"] = xres, yres + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = xres * 2.54, yres * 2.54 + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = l = 0 + self.tile = [] + if STRIPOFFSETS in self.tag_v2: + # striped image + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3", + "group4", "tiff_jpeg", + "tiff_adobe_deflate", + "tiff_thunderscan", + "tiff_deflate", + "tiff_sgilog", + "tiff_sgilog24", + "tiff_raw_16"]: + # if DEBUG: + # print "Activating g4 compression for whole file" + + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # replace the existing load function with our + # _load_libtiff function. + + self.load = self._load_libtiff + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and \ + os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell for py3 + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except IOError: + # io.BytesIO have a fileno, but returns an IOError if + # it doesn't use a file descriptor. + fp = False + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + key = ( + self.tag_v2.prefix, photo, format, 1, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode: + rawmode = 'I;16N' + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, fp) + self.tile.append( + (self._compression, + (0, 0, w, ysize), + 0, a)) + a = None + + else: + for i in range(len(offsets)): + a = self._decoder(rawmode, l, i) + self.tile.append( + (self._compression, + (0, min(y, ysize), w, min(y+h, ysize)), + offsets[i], a)) + if DEBUG: + print("tiles: ", self.tile) + y = y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + elif TILEOFFSETS in self.tag_v2: + # tiled image + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + a = None + for o in self.tag_v2[TILEOFFSETS]: + if not a: + a = self._decoder(rawmode, l) + # FIXME: this doesn't work if the image size + # is not a multiple of the tile size... + self.tile.append( + (self._compression, + (x, y, x+w, y+h), + o, a)) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + else: + if DEBUG: + print("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # fixup palette descriptor + + if self.mode == "P": + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TIFF" % im.mode) + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get('compression', + im.info.get('compression', 'raw')) + + libtiff = WRITE_LIBTIFF or compression != 'raw' + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + if DEBUG: + print("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except: + pass # might not be an IFD, Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, 'tag_v2'): + # preserve tags from original TIFF image file + for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, + IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype.get(key, None) + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [(IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright")]: + name_with_spaces = name.replace("_", " ") + if "_" in name and name_with_spaces in im.encoderinfo: + warnings.warn("%r is deprecated; use %r instead" % + (name_with_spaces, name), DeprecationWarning) + ifd[key] = im.encoderinfo[name.replace("_", " ")] + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode == "P": + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) + + # data orientation + stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + ifd[ROWSPERSTRIP] = im.size[1] + ifd[STRIPBYTECOUNTS] = stride * im.size[1] + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if DEBUG: + print("Saving using libtiff encoder") + print("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS] + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, 'tag'): + legacy_ifd = im.tag.to_v2() + for tag, value in itertools.chain(ifd.items(), + getattr(im, 'tag_v2', {}).items(), + legacy_ifd.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. It will segfault if it attempts + # to add a custom tag without the dictionary entry + # + # UNDONE -- add code for the custom dictionary + if tag not in TiffTags.LIBTIFF_CORE: continue + if tag not in atts and tag not in blocklist: + if isinstance(value, unicode if bytes is str else str): + atts[tag] = value.encode('ascii', 'replace') + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if DEBUG: + print("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ('I;16B', 'I;16'): + rawmode = 'I;16N' + + a = (rawmode, compression, _fp, filename, atts) + # print(im.mode, compression, a, im.encoderconfig) + e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) + e.setimage(im.im, (0, 0)+im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16*1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + + else: + offset = ifd.save(fp) + + ImageFile._save(im, fp, [ + ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) + ]) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) + +Image.register_extension(TiffImageFile.format, ".tif") +Image.register_extension(TiffImageFile.format, ".tiff") + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/server/www/packages/packages-darwin/x64/PIL/TiffTags.py b/server/www/packages/packages-darwin/x64/PIL/TiffTags.py new file mode 100644 index 0000000..07d594e --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/TiffTags.py @@ -0,0 +1,402 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=None, length=0, enum=None): + return super(TagInfo, cls).__new__( + cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + return self.enum.get(value, value) + +def lookup(tag): + """ + :param tag: Integer tag number + :returns: Taginfo namedtuple, From the TAGS_V2 info if possible, + otherwise just populating the value and name from TAGS. + If the tag is not recognized, "unknown" is returned for the name + + """ + + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown'))) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# + +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 + +TAGS_V2 = { + + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ("Compression", SHORT, 1, + {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4, + "LZW": 5, "JPEG": 6, "PackBits": 32773}), + + 262: ("PhotometricInterpretation", SHORT, 1, + {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RBG Palette": 3, + "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892}), # Adobe DNG + 263: ("Thresholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellHeight", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + + 280: ("MinSampleValue", LONG, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contigous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"inch": 1, "cm": 2}), + 297: ("PageNumber", SHORT, 2), + + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticies", SHORT, 6), + + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + + 340: ("SMinSampleValue", 12, 0), + 341: ("SMaxSampleValue", 12, 0), + 342: ("TransferRange", SHORT, 6), + + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", LONG, 0), + + 33432: ("Copyright", ASCII, 1), + + # FIXME add more tags here + 34665: ("ExifIFD", SHORT, 1), + 34675: ('ICCProfile', 7, 0), + 34853: ('GPSInfoIFD', 1, 1), + + # MPInfo + 45056: ("MPFVersion", 7, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", 7, 1), + 45059: ("ImageUIDList", 7, 0), + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", 10, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", 10, 1), + 45576: ("AxisDistance_X", 10, 1), + 45577: ("AxisDistance_Y", 10, 1), + 45578: ("AxisDistance_Z", 10, 1), + 45579: ("YawAngle", 10, 1), + 45580: ("PitchAngle", 10, 1), + 45581: ("RollAngle", 10, 1), + + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 1), + 50839: ("ImageJMetaData", 7, 1) +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = {347: 'JPEGTables', + 700: 'XMP', + + # Additional Exif Info + 33434: 'ExposureTime', + 33437: 'FNumber', + 33723: 'IptcNaaInfo', + 34377: 'PhotoshopInfo', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTImeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubSec', + 37521: 'SubSecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 40960: 'FlashPixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 40965: 'InteroperabilityIFD', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42240: 'Gamma', + + # Adobe DNG + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50740: 'DNGPrivateData', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + } + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +LIBTIFF_CORE = set ([255, 256, 257, 258, 259, 262, 263, 266, 274, 277, + 278, 280, 281, 340, 341, 282, 283, 284, 286, 287, + 296, 297, 321, 320, 338, 32995, 322, 323, 32998, + 32996, 339, 32997, 330, 531, 530, 301, 532, 333, + # as above + 269 # this has been in our tests forever, and works + ]) + +LIBTIFF_CORE.remove(320) # Array of short, crashes +LIBTIFF_CORE.remove(301) # Array of short, crashes +LIBTIFF_CORE.remove(532) # Array of long, crashes + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for tiled images in libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/server/www/packages/packages-darwin/x64/PIL/WalImageFile.py b/server/www/packages/packages-darwin/x64/PIL/WalImageFile.py new file mode 100644 index 0000000..0cbd1ca --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/WalImageFile.py @@ -0,0 +1,128 @@ +# encoding: utf-8 +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# NOTE: This format cannot be automatically recognized, so the reader +# is not registered for use with Image.open(). To open a WAL file, use +# the WalImageFile.open() function instead. + +# This reader is based on the specification available from: +# http://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +# and has been tested with a few sample files found using google. + +from __future__ import print_function + +from PIL import Image, _binary + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i32 = _binary.i32le + + +## +# Load texture from a Quake2 WAL texture file. +#

+# By default, a Quake2 standard palette is attached to the texture. +# To override the palette, use the putpalette method. +# +# @param filename WAL file name, or an opened file handle. +# @return An image instance. + +def open(filename): + # FIXME: modify to return a WalImageFile instance instead of + # plain Image object ? + + if hasattr(filename, "read"): + fp = filename + else: + fp = builtins.open(filename, "rb") + + # read header fields + header = fp.read(32+24+32+12) + size = i32(header, 32), i32(header, 36) + offset = i32(header, 40) + + # load pixel data + fp.seek(offset) + + im = Image.frombytes("P", size, fp.read(size[0] * size[1])) + im.putpalette(quake2palette) + + im.format = "WAL" + im.format_description = "Quake2 Texture" + + # strings are null-terminated + im.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56:56+32].split(b"\0", 1)[0] + if next_name: + im.info["next_name"] = next_name + + return im + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans Häggström + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/server/www/packages/packages-darwin/x64/PIL/WebPImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..6837b53 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/WebPImagePlugin.py @@ -0,0 +1,80 @@ +from PIL import Image +from PIL import ImageFile +from io import BytesIO +from PIL import _webp + + +_VALID_WEBP_MODES = { + "RGB": True, + "RGBA": True, + } + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless + } + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + return is_riff_file_format and is_webp_file and is_valid_vp8_mode + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + + def _open(self): + data, width, height, self.mode, icc_profile, exif = \ + _webp.WebPDecode(self.fp.read()) + + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + + self.size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + + def _getexif(self): + from PIL.JpegImagePlugin import _getexif + return _getexif(self) + + +def _save(im, fp, filename): + image_mode = im.mode + if im.mode not in _VALID_WEBP_MODES: + raise IOError("cannot write mode %s as WEBP" % image_mode) + + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + exif + ) + if data is None: + raise IOError("cannot write file as WEBP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +Image.register_save(WebPImageFile.format, _save) + +Image.register_extension(WebPImageFile.format, ".webp") +Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/server/www/packages/packages-darwin/x64/PIL/WmfImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..3163210 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/WmfImagePlugin.py @@ -0,0 +1,173 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +_handler = None + +if str != bytes: + long = int + + +## +# Install application-specific WMF image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler(object): + + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", "BGR", (im.size[0]*3 + 3) & -4, -1 + ) + + register_handler(WmfHandler()) + +# -------------------------------------------------------------------- + +word = _binary.i16le + + +def short(c, o=0): + v = word(c, o) + if v >= 32768: + v -= 65536 + return v + +dword = _binary.i32le + + +# +# -------------------------------------------------------------------- +# Read WMF file + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or + prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + self.info["dpi"] = 72 + + # print self.mode, self.size, self.info + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif dword(s) == 1 and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = dword(s, 8) + y0 = dword(s, 12) + x1 = dword(s, 16) + y1 = dword(s, 20) + + # get frame (in 0.01 millimeter units) + frame = dword(s, 24), dword(s, 28), dword(s, 32), dword(s, 36) + + # normalize size to 72 dots per inch + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0]) + ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self.size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("WMF save handler not installed") + _handler.save(im, fp, filename) + +# +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extension(WmfStubImageFile.format, ".wmf") +Image.register_extension(WmfStubImageFile.format, ".emf") diff --git a/server/www/packages/packages-darwin/x64/PIL/XVThumbImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..311e65d --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/XVThumbImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.1" + +o8 = _binary.o8 + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + + +## +# Image plugin for XV thumbnail images. + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + s = self.fp.read(6) + if s != b"P7 332": + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if s[0] != b'#': + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self.size = int(s[0:1]), int(s[1:2]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [ + ("raw", (0, 0)+self.size, + self.fp.tell(), (self.mode, 0, 1) + )] + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile) diff --git a/server/www/packages/packages-darwin/x64/PIL/XbmImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..bca8828 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/XbmImagePlugin.py @@ -0,0 +1,96 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL import Image, ImageFile + +__version__ = "0.6" + +# XBM header +xbm_head = re.compile( + b"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + b"[\\000-\\377]*_bits\\[\\]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if m: + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = ( + int(m.group("xhot")), int(m.group("yhot")) + ) + + self.mode = "1" + self.size = xsize, ysize + + self.tile = [("xbm", (0, 0)+self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as XBM" % im.mode) + + fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii')) + fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii')) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii')) + fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii')) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/server/www/packages/packages-darwin/x64/PIL/XpmImagePlugin.py b/server/www/packages/packages-darwin/x64/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..556adb8 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/XpmImagePlugin.py @@ -0,0 +1,130 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8, o8 + +__version__ = "0.2" + +# XPM header +xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self.size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for i in range(pal): + + s = self.fp.readline() + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] in b'\r\n': + s = s[:-1] + + c = i8(s[1]) + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i+1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[0:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = (o8((rgb >> 16) & 255) + + o8((rgb >> 8) & 255) + + o8(rgb & 255)) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1:xsize+1].ljust(xsize) + + self.fp = None + + return b"".join(s) + +# +# Registry + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/server/www/packages/packages-darwin/x64/PIL/__init__.py b/server/www/packages/packages-darwin/x64/PIL/__init__.py new file mode 100644 index 0000000..e251cfa --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/__init__.py @@ -0,0 +1,58 @@ +# +# The Python Imaging Library. +# $Id$ +# +# package placeholder +# +# Copyright (c) 1999 by Secret Labs AB. +# +# See the README file for information on usage and redistribution. +# + +# ;-) + +VERSION = '1.1.7' # PIL version +PILLOW_VERSION = '3.1.0' # Pillow + +_plugins = ['BmpImagePlugin', + 'BufrStubImagePlugin', + 'CurImagePlugin', + 'DcxImagePlugin', + 'EpsImagePlugin', + 'FitsStubImagePlugin', + 'FliImagePlugin', + 'FpxImagePlugin', + 'GbrImagePlugin', + 'GifImagePlugin', + 'GribStubImagePlugin', + 'Hdf5StubImagePlugin', + 'IcnsImagePlugin', + 'IcoImagePlugin', + 'ImImagePlugin', + 'ImtImagePlugin', + 'IptcImagePlugin', + 'JpegImagePlugin', + 'Jpeg2KImagePlugin', + 'McIdasImagePlugin', + 'MicImagePlugin', + 'MpegImagePlugin', + 'MpoImagePlugin', + 'MspImagePlugin', + 'PalmImagePlugin', + 'PcdImagePlugin', + 'PcxImagePlugin', + 'PdfImagePlugin', + 'PixarImagePlugin', + 'PngImagePlugin', + 'PpmImagePlugin', + 'PsdImagePlugin', + 'SgiImagePlugin', + 'SpiderImagePlugin', + 'SunImagePlugin', + 'TgaImagePlugin', + 'TiffImagePlugin', + 'WebPImagePlugin', + 'WmfImagePlugin', + 'XbmImagePlugin', + 'XpmImagePlugin', + 'XVThumbImagePlugin'] diff --git a/server/www/packages/packages-darwin/x64/PIL/_binary.py b/server/www/packages/packages-darwin/x64/PIL/_binary.py new file mode 100644 index 0000000..2f5e8ff --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/_binary.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + +from struct import unpack, pack + +if bytes is str: + def i8(c): + return ord(c) + + def o8(i): + return chr(i & 255) +else: + def i8(c): + return c if c.__class__ is int else c[0] + + def o8(i): + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +# TODO: replace with more readable struct.unpack equivalent +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return unpack("H", c[o:o+2])[0] + + +def i32be(c, o=0): + return unpack(">I", c[o:o+4])[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) + +# End of file diff --git a/server/www/packages/packages-darwin/x64/PIL/_imaging.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_imaging.cpython-35m-darwin.so new file mode 100644 index 0000000..1ea9b05 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_imaging.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/_imagingcms.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_imagingcms.cpython-35m-darwin.so new file mode 100644 index 0000000..cd455b2 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_imagingcms.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/_imagingft.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_imagingft.cpython-35m-darwin.so new file mode 100644 index 0000000..00ece7f Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_imagingft.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/_imagingmath.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_imagingmath.cpython-35m-darwin.so new file mode 100644 index 0000000..099e286 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_imagingmath.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/_imagingmorph.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_imagingmorph.cpython-35m-darwin.so new file mode 100644 index 0000000..30b50dc Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_imagingmorph.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/_imagingtk.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_imagingtk.cpython-35m-darwin.so new file mode 100644 index 0000000..d7943ff Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_imagingtk.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/_util.py b/server/www/packages/packages-darwin/x64/PIL/_util.py new file mode 100644 index 0000000..51c6f68 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/_util.py @@ -0,0 +1,27 @@ +import os + +if bytes is str: + def isStringType(t): + return isinstance(t, basestring) + + def isPath(f): + return isinstance(f, basestring) +else: + def isStringType(t): + return isinstance(t, str) + + def isPath(f): + return isinstance(f, (bytes, str)) + + +# Checks if an object is a string, and that it points to a directory. +def isDirectory(f): + return isPath(f) and os.path.isdir(f) + + +class deferred_error(object): + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/server/www/packages/packages-darwin/x64/PIL/_webp.cpython-35m-darwin.so b/server/www/packages/packages-darwin/x64/PIL/_webp.cpython-35m-darwin.so new file mode 100644 index 0000000..6ff1bf5 Binary files /dev/null and b/server/www/packages/packages-darwin/x64/PIL/_webp.cpython-35m-darwin.so differ diff --git a/server/www/packages/packages-darwin/x64/PIL/features.py b/server/www/packages/packages-darwin/x64/PIL/features.py new file mode 100644 index 0000000..fd87f09 --- /dev/null +++ b/server/www/packages/packages-darwin/x64/PIL/features.py @@ -0,0 +1,67 @@ +from PIL import Image + +modules = { + "pil": "PIL._imaging", + "tkinter": "PIL._imagingtk", + "freetype2": "PIL._imagingft", + "littlecms2": "PIL._imagingcms", + "webp": "PIL._webp", + "transp_webp": ("WEBP", "WebPDecoderBuggyAlpha") +} + + +def check_module(feature): + if feature not in modules: + raise ValueError("Unknown module %s" % feature) + + module = modules[feature] + + method_to_call = None + if type(module) is tuple: + module, method_to_call = module + + try: + imported_module = __import__(module) + except ImportError: + # If a method is being checked, None means that + # rather than the method failing, the module required for the method + # failed to be imported first + return None if method_to_call else False + + if method_to_call: + method = getattr(imported_module, method_to_call) + return method() is True + else: + return True + + +def get_supported_modules(): + supported_modules = [] + for feature in modules: + if check_module(feature): + supported_modules.append(feature) + return supported_modules + +codecs = { + "jpg": "jpeg", + "jpg_2000": "jpeg2k", + "zlib": "zip", + "libtiff": "libtiff" +} + + +def check_codec(feature): + if feature not in codecs: + raise ValueError("Unknown codec %s" % feature) + + codec = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def get_supported_codecs(): + supported_codecs = [] + for feature in codecs: + if check_codec(feature): + supported_codecs.append(feature) + return supported_codecs diff --git a/server/www/teleport/.idea/teleport.iml b/server/www/teleport/.idea/teleport.iml index 0eaa135..6253974 100644 --- a/server/www/teleport/.idea/teleport.iml +++ b/server/www/teleport/.idea/teleport.iml @@ -1,28 +1,14 @@ - - - - - - - + - + -

错误

导出数据时发生错误:{}'.format(sql)) - - self.finish() - - -class ImportDatabaseHandler(TPBaseAdminAuthHandler): - # TODO: 导入操作可能会比较耗时,应该分离导入和获取导入状态两个过程,在页面上可以呈现导入进度,并列出导出成功/失败的项 - - @tornado.gen.coroutine - def post(self): - """ - sql导入规则: - 以事务方式执行sql语句 - """ - ret = dict() - ret['code'] = 0 - ret['message'] = '' - - sql_filename = '' - - try: - upload_path = os.path.join(cfg.data_path, 'tmp') # 文件的暂存路径 - if not os.path.exists(upload_path): - os.mkdir(upload_path) - file_metas = self.request.files['sqlfile'] # 提取表单中‘name’为‘file’的文件元数据 - for meta in file_metas: - now = time.localtime(time.time()) - tmp_name = 'upload-{:04d}{:02d}{:02d}{:02d}{:02d}{:02d}.sql'.format(now.tm_year, now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min, now.tm_sec) - sql_filename = os.path.join(upload_path, tmp_name) - with open(sql_filename, 'wb') as f: - f.write(meta['body']) - - # file encode maybe utf8 or gbk... check it out. - file_encode = None - with open(sql_filename, encoding='utf8') as f: - try: - f.readlines() - file_encode = 'utf8' - except: - pass - - if file_encode is None: - os.remove(sql_filename) - log.e('file `{}` unknown encode, neither GBK nor UTF8.\n'.format(sql_filename)) - ret['code'] = -2 - ret['message'] = 'upload sql file is not utf8 encode.' - return self.write(json.dumps(ret).encode('utf8')) - - db_ver_checked = False - with open(sql_filename, encoding=file_encode) as f: - db = get_db() - sql = [] - lines = f.readlines() - for line in lines: - line = line.strip('\r\n') - if line.startswith('-- DATABASE VERSION '): - x = line.split(' ') - if len(x) != 4: - ret['code'] = -1 - ret['message'] = 'SQL文件格式错误,无法解析数据库版本' - return self.write(json.dumps(ret).encode('utf8')) - db_ver_sql = int(x[3].strip()) - if db.DB_VERSION != db_ver_sql: - ret['code'] = -1 - ret['message'] = 'SQL文件数据库版本为 {},当前数据版本为 {},不允许导入!'.format(db_ver_sql, db.DB_VERSION) - return self.write(json.dumps(ret).encode('utf8')) - db_ver_checked = True - continue - - if not db_ver_checked: - continue - - if line .startswith('TRUNCATE TABLE '): - x = line.split(' ', 2) - _table_name = '`{}{}`'.format(db.table_prefix, x[2][1:-2]) - if db.db_type == db.DB_TYPE_MYSQL: - x[2] = _table_name - line = ' '.join(x) - line += ';' - sql.append(line) - elif db.db_type == db.DB_TYPE_SQLITE: - sql.append('DELETE FROM {};'.format(_table_name)) - sql.append('UPDATE `sqlite_sequence` SET `seq`=0 WHERE `name`="{}";'.format(_table_name[1:-1])) - - if line.startswith('INSERT INTO '): - x = line.split(' ', 3) - _table_name = '`{}{}`'.format(db.table_prefix, x[2][1:-1]) - x[2] = _table_name - line = ' '.join(x) - sql.append(line) - - if not db_ver_checked: - ret['code'] = -1 - ret['message'] = 'SQL文件格式错误,未能确定数据库版本' - return self.write(json.dumps(ret).encode('utf8')) - - db_ret = db.transaction(sql) - if not db_ret: - ret['code'] = -1 - ret['message'] = 'SQL语句执行出错' - return self.write(json.dumps(ret).encode('utf8')) - - ret['code'] = 0 - return self.write(json.dumps(ret).encode('utf8')) - except: - log.e('error\n') - ret['code'] = -6 - ret['message'] = '发生异常.' - return self.write(json.dumps(ret).encode('utf8')) - - finally: - if os.path.exists(sql_filename): - os.remove(sql_filename) diff --git a/server/www/teleport/app/eom_app/controller/dashboard.py b/server/www/teleport/app/eom_app/controller/dashboard.py deleted file mode 100644 index 5f6030d..0000000 --- a/server/www/teleport/app/eom_app/controller/dashboard.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- - -import tornado.ioloop -from .base import TPBaseAdminAuthHandler - - -class IndexHandler(TPBaseAdminAuthHandler): - def get(self): - self.render('dashboard/index.mako') diff --git a/server/www/teleport/app/eom_app/controller/group.py b/server/www/teleport/app/eom_app/controller/group.py deleted file mode 100644 index 5615638..0000000 --- a/server/www/teleport/app/eom_app/controller/group.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- - -from eom_app.app.configs import app_cfg -from eom_app.module import host -from .base import TPBaseAdminAuthHandler, TPBaseAdminAuthJsonHandler - -cfg = app_cfg() - - -class IndexHandler(TPBaseAdminAuthHandler): - def get(self): - self.render('group/index.mako') - - -class GetListHandler(TPBaseAdminAuthJsonHandler): - def post(self): - group_list = host.get_group_list() - ret = dict() - ret['page_index'] = 10 - ret['total'] = len(group_list) - ret['data'] = group_list - self.write_json(0, data=ret) diff --git a/server/www/teleport/app/eom_app/controller/host.py b/server/www/teleport/app/eom_app/controller/host.py deleted file mode 100644 index 7d41ce8..0000000 --- a/server/www/teleport/app/eom_app/controller/host.py +++ /dev/null @@ -1,928 +0,0 @@ -# -*- coding: utf-8 -*- - -import time -import csv -import os -import json -import threading -import tornado.gen -import tornado.httpclient - -from eom_app.app.configs import app_cfg -from eom_app.app.util import * -from eom_app.module import host -from eom_common.eomcore.logger import * -from eom_app.app.session import web_session -from .base import TPBaseUserAuthHandler, TPBaseAdminAuthHandler, TPBaseUserAuthJsonHandler, TPBaseAdminAuthJsonHandler - -cfg = app_cfg() - -# 临时认证ID的基数,每次使用时均递减 -tmp_auth_id_base = -1 -tmp_auth_id_lock = threading.RLock() - - -class IndexHandler(TPBaseUserAuthHandler): - def get(self): - _user = self.get_session('user') - if _user is None: - return self.write(-1) - - param = dict() - - if cfg.core.detected: - param['core'] = { - 'ssh_port': cfg.core.ssh.port, - 'rdp_port': cfg.core.rdp.port, - 'telnet_port': cfg.core.telnet.port - } - else: - param['core'] = { - 'ssh_port': 0, - 'rdp_port': 0, - 'telnet_port': 0 - } - - param['group_list'] = host.get_group_list() - - if _user['type'] >= 100: - param['cert_list'] = host.get_cert_list() - self.render('host/admin_index.mako', page_param=json.dumps(param)) - else: - self.render('host/user_index.mako', page_param=json.dumps(param)) - - -class UploadAndImportHandler(TPBaseAdminAuthHandler): - # TODO: 导入操作可能会比较耗时,应该分离导入和获取导入状态两个过程,在页面上可以呈现导入进度,并列出导出成功/失败的项 - - @tornado.gen.coroutine - def post(self): - """ - csv导入规则: - 每一行的数据格式: 分组ID,操作系统,IP地址,端口,系统用户,系统密码,协议,密钥ID,状态,认证类型,描述 - 因为主机的唯一性在于 `IP地址 + 端口`,且允许一个 `IP地址 + 端口` 对应多个系统用户,因此,每一行的数据几乎没有限制。 - 在导入时: - 1. 对每一个第一次遇到的 `IP地址 + 端口` 组合,就在 ts_host_info 表中加一个条目,并在 ts_auth_info 表中加入一个用户。 - 2. 对于非第一次遇到的 `IP地址 + 端口` 组合,则仅仅在 ts_auth_info 表中加一个用户,不更改 ts_host_info 表中的现有数据。 - 3. `IP地址 + 端口 + 用户` 的组合不能重复。 - 4. 空行跳过,数据格式不正确的跳过。 - """ - ret = dict() - ret['code'] = 0 - ret['message'] = '' - ret['data'] = {} - ret['data']['msg'] = list() # 记录跳过的行(格式不正确,或者数据重复等) - csv_filename = '' - - try: - upload_path = os.path.join(cfg.data_path, 'tmp') # 文件的暂存路径 - if not os.path.exists(upload_path): - os.mkdir(upload_path) - file_metas = self.request.files['csvfile'] # 提取表单中‘name’为‘file’的文件元数据 - for meta in file_metas: - now = time.localtime(time.time()) - tmp_name = 'upload-{:04d}{:02d}{:02d}{:02d}{:02d}{:02d}.csv'.format(now.tm_year, now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min, now.tm_sec) - csv_filename = os.path.join(upload_path, tmp_name) - with open(csv_filename, 'wb') as f: - f.write(meta['body']) - - # file encode maybe utf8 or gbk... check it out. - file_encode = None - with open(csv_filename, encoding='gbk') as f: - try: - f.readlines() - file_encode = 'gbk' - except: - pass - - if file_encode is None: - with open(csv_filename, encoding='utf8') as f: - try: - f.readlines() - file_encode = 'utf8' - except: - pass - - if file_encode is None: - os.remove(csv_filename) - log.e('file `{}` unknown encode, neither GBK nor UTF8.\n'.format(csv_filename)) - ret['code'] = -2 - ret['message'] = 'upload csv file is neither gbk nor utf8 encode.' - return self.write(json.dumps(ret).encode('utf8')) - - with open(csv_filename, encoding=file_encode) as f: - csv_reader = csv.reader(f) - is_first_line = True - for csv_recorder in csv_reader: - # 跳过第一行,那是格式说明 - if is_first_line: - is_first_line = False - continue - - # 空行则忽略 - if len(csv_recorder) <= 1: - continue - - # 格式错误则记录在案,然后继续 - if len(csv_recorder) != 13: - ret['msg'].append({'reason': '格式错误', 'line': ', '.join(csv_recorder)}) - continue - - host_args = dict() - user_args = dict() - # 分组ID, 操作系统, IP地址, 端口, 协议, 状态, 描述, 系统用户, 系统密码, 是否加密,附加参数, 密钥ID, 认证类型 - - host_args['group_id'] = int(csv_recorder[0]) - host_args['host_sys_type'] = int(csv_recorder[1]) - host_args['host_ip'] = csv_recorder[2] - host_args['host_port'] = csv_recorder[3] - host_args['protocol'] = csv_recorder[4] - host_args['host_lock'] = csv_recorder[5] - host_args['host_desc'] = csv_recorder[6] - # 加入一个主机(如果已经存在,则直接返回已存在的条目的host_id) - host_id = host.add_host(host_args, must_not_exists=False) - if host_id < 0: - ret['msg'].append({'reason': '添加主机失败,操作数据库失败', 'line': ', '.join(csv_recorder)}) - continue - - user_args['host_id'] = host_id - user_args['user_name'] = csv_recorder[7] - user_pswd = csv_recorder[8] - is_encrypt = int(csv_recorder[9]) - user_args['user_param'] = csv_recorder[10].replace('\\n', '\n') - user_args['cert_id'] = int(csv_recorder[11]) - auth_mode = int(csv_recorder[12]) - user_args['auth_mode'] = auth_mode - user_args['user_pswd'] = '' - if auth_mode == 0: - pass - elif auth_mode == 1: - try: - if is_encrypt == 0: - _yr = async_enc(user_pswd) - return_data = yield _yr - if return_data is None: - ret['code'] = -3 - ret['message'] = 'can not encrypt by core server.' - return self.write(json.dumps(ret).encode('utf8')) - - if 'code' not in return_data or return_data['code'] != 0: - ret['code'] = -4 - ret['message'] = 'invalid result from encrypt by core server.' - return self.write(json.dumps(ret).encode('utf8')) - - tmp_pswd = return_data['data'] - - else: - tmp_pswd = user_pswd - - user_args['user_pswd'] = tmp_pswd - - except: - log.e('can not encrypt user password.\n') - ret['code'] = -5 - ret['message'] = '发生异常' - return self.write(json.dumps(ret).encode('utf8')) - - elif auth_mode == 2: - pass - else: - ret['data']['msg'].append({'reason': '未知的认证模式', 'line': ', '.join(csv_recorder)}) - log.e('auth_mode unknown\n') - continue - - uid = host.sys_user_add(user_args) - if uid < 0: - if uid == -100: - ret['data']['msg'].append({'reason': '添加登录账号失败,账号已存在', 'line': ', '.join(csv_recorder)}) - else: - ret['data']['msg'].append({'reason': '添加登录账号失败,操作数据库失败', 'line': ', '.join(csv_recorder)}) - - ret['code'] = 0 - return self.write(json.dumps(ret).encode('utf8')) - except: - log.e('error\n') - ret['code'] = -6 - ret['message'] = '发生异常.' - return self.write(json.dumps(ret).encode('utf8')) - - finally: - if os.path.exists(csv_filename): - os.remove(csv_filename) - - -class GetListHandler(TPBaseUserAuthJsonHandler): - def post(self): - _user = self.get_current_user() - if _user is None: - return self.write_json(-1, '尚未登录') - - filter = dict() - order = dict() - order['name'] = 'host_id' - order['asc'] = True - limit = dict() - limit['page_index'] = 0 - limit['per_page'] = 25 - - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - - tmp = list() - _filter = args['filter'] - for i in _filter: - if i == 'host_sys_type' and _filter[i] == 0: - tmp.append(i) - continue - if i == 'host_group' and _filter[i] == 0: - tmp.append(i) - continue - if i == 'search': - _x = _filter[i].strip() - if len(_x) == 0: - tmp.append(i) - continue - - for i in tmp: - del _filter[i] - - filter.update(_filter) - - _limit = args['limit'] - if _limit['page_index'] < 0: - _limit['page_index'] = 0 - if _limit['per_page'] < 10: - _limit['per_page'] = 10 - if _limit['per_page'] > 100: - _limit['per_page'] = 100 - - limit.update(_limit) - - _order = args['order'] - if _order is not None: - order['name'] = _order['k'] - order['asc'] = _order['v'] - if _user['type'] == 100: - _total, _hosts = host.get_all_host_info_list(filter, order, limit) - else: - filter['account_name'] = _user['name'] - _total, _hosts = host.get_host_info_list_by_user(filter, order, limit) - - ret = dict() - ret['page_index'] = limit['page_index'] - ret['total'] = _total - ret['data'] = _hosts - self.write_json(0, data=ret) - - -class GetGrouplist(TPBaseUserAuthJsonHandler): - def post(self): - group_list = host.get_group_list() - self.write_json(0, data=group_list) - - -class UpdateHandler(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - if 'host_id' not in args or 'kv' not in args: - self.write_json(-2, '缺少必要参数') - - _ret = host.update(args['host_id'], args['kv']) - - if _ret: - self.write_json(0) - else: - self.write_json(-3, '数据库操作失败') - - -class AddHost(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - try: - ret = host.add_host(args) - if ret > 0: - return self.write_json(0) - else: - if ret == -100: - return self.write_json(-100, '') - else: - return self.write_json(-2, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('add host failed.\n') - return self.write_json(-3, '发生异常') - - -class LockHost(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - host_id = args['host_id'] - lock = args['lock'] - try: - ret = host.lock_host(host_id, lock) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('lock host failed.\n') - return self.write_json(-3, '发生异常') - - -class DeleteHost(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - host_list = args['host_list'] - try: - ret = host.delete_host(host_list) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('delete host failed.\n') - return self.write_json(-3, '发生异常') - - -class ExportHostHandler(TPBaseAdminAuthHandler): - def get(self): - self.set_header('Content-Type', 'application/octet-stream') - self.set_header('Content-Disposition', 'attachment; filename=teleport-host-export.csv') - - order = dict() - order['name'] = 'host_id' - order['asc'] = True - limit = dict() - limit['page_index'] = 0 - limit['per_page'] = 999999 - _total, _hosts = host.get_all_host_info_list(dict(), order, limit, True) - - self.write("分组ID, 操作系统, IP地址, 端口, 协议, 状态, 描述, 系统用户, 系统密码, 是否加密, 附加参数, 密钥ID, 认证类型\n".encode('gbk')) - - try: - - for h in _hosts: - auth_list = h['auth_list'] - # 分组ID, 操作系统, IP地址, 端口, 协议, 状态, 描述, 系统用户, 系统密码, 是否加密,附加参数, 密钥ID, 认证类型 - for j in auth_list: - row_string = '' - # row_string = str(h['host_id']) - # row_string += ',' - row_string += str(h['group_id']) - row_string += ',' - row_string += str(h['host_sys_type']) - row_string += ',' - row_string += h['host_ip'] - row_string += ',' - row_string += str(h['host_port']) - row_string += ',' - row_string += str(h['protocol']) - row_string += ',' - row_string += str(h['host_lock']) - row_string += ',' - row_string += h['host_desc'] - row_string += ',' - - # row_string += str(j['host_auth_id']) - # row_string += ',' - row_string += j['user_name'] - row_string += ',' - row_string += j['user_pswd'] - row_string += ',' - row_string += '1' - row_string += ',' - user_param = j['user_param'] - if len(user_param) > 0: - user_param = user_param.replace('\n', '\\n') - row_string += user_param - row_string += ',' - row_string += str(j['cert_id']) - row_string += ',' - row_string += str(j['auth_mode']) - - self.write(row_string.encode('gbk')) - self.write('\n') - - except IndexError: - self.write('**********************************************\n'.encode('gbk')) - self.write('!!错误!!\n'.encode('gbk')) - self.write('导出过程中发生了错误!!\n'.encode('gbk')) - self.write('**********************************************\n'.encode('gbk')) - log.e('') - - self.finish() - - -class GetCertList(TPBaseUserAuthJsonHandler): - def post(self): - _certs = host.get_cert_list() - if _certs is None or len(_certs) == 0: - return self.write_json(-1, '参数错误') - else: - return self.write_json(0, data=_certs) - - -class AddCert(TPBaseUserAuthJsonHandler): - @tornado.gen.coroutine - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - cert_pub = args['cert_pub'] - cert_pri = args['cert_pri'] - cert_name = args['cert_name'] - - if len(cert_pri) == 0: - return self.write_json(-2, '参数错误,数据不完整') - - _yr = async_enc(cert_pri) - return_data = yield _yr - if return_data is None: - return self.write_json(-3, '调用核心服务加密失败') - - if 'code' not in return_data or return_data['code'] != 0: - return self.write_json(-4, '核心服务加密返回错误') - - cert_pri = return_data['data'] - - try: - ret = host.add_cert(cert_pub, cert_pri, cert_name) - if ret: - return self.write_json(0) - else: - return self.write_json(-5, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('add cert failed.\n') - return self.write_json(-6, '发生异常') - - -class DeleteCert(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - cert_id = args['cert_id'] - - try: - ret = host.delete_cert(cert_id) - if ret: - return self.write_json(0) - else: - if ret == -2: - return self.write_json(-2, '') - else: - return self.write_json(-3, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('add cert failed.\n') - return self.write_json(-4, '发生异常') - - -class UpdateCert(TPBaseUserAuthJsonHandler): - @tornado.gen.coroutine - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - cert_id = args['cert_id'] - cert_pub = args['cert_pub'] - cert_pri = args['cert_pri'] - cert_name = args['cert_name'] - - if len(cert_pri) > 0: - _yr = async_enc(cert_pri) - return_data = yield _yr - if return_data is None: - return self.write_json(-2, '调用核心服务加密失败') - - if 'code' not in return_data or return_data['code'] != 0: - return self.write_json(-3, '核心服务加密返回错误') - - cert_pri = return_data['data'] - - try: - ret = host.update_cert(cert_id, cert_pub, cert_pri, cert_name) - if ret: - return self.write_json(0) - else: - return self.write_json(-4, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('update cert failed.\n') - return self.write_json(-5, '发生异常') - - -class AddGroup(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - group_name = args['group_name'] - try: - ret = host.add_group(group_name) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('add group failed.\n') - return self.write_json(-3, '发生异常') - - -class UpdateGroup(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - group_id = args['group_id'] - group_name = args['group_name'] - try: - ret = host.update_group(group_id, group_name) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('update group failed.\n') - return self.write_json(-3, '发生异常') - - -class DeleteGroup(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - group_id = args['group_id'] - try: - ret = host.delete_group(group_id) - if ret == 0: - return self.write_json(0) - else: - if ret == -2: - return self.write_json(-2, '') - else: - return self.write_json(-3, '数据库操作失败,errcode:{}'.format(ret)) - except: - log.e('delete group failed.\n') - return self.write_json(-4, '发生异常') - - -class AddHostToGroup(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - host_list = args['host_list'] - group_id = args['group_id'] - try: - ret = host.add_host_to_group(host_list, group_id) - if ret: - self.write_json(0) - else: - return self.write_json(-2, '数据库操作失败,errcode:{}'.format(ret)) - return - except: - log.e('add host to group failed.\n') - return self.write_json(-3, '发生异常') - - -class GetSessionId(TPBaseUserAuthJsonHandler): - @tornado.gen.coroutine - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - if 'auth_id' not in args: - return self.write_json(-1, '参数缺失') - - auth_id = args['auth_id'] - - req = {'method': 'request_session', 'param': {'authid': auth_id}} - _yr = async_post_http(req) - return_data = yield _yr - if return_data is None: - return self.write_json(-2, '调用核心服务获取会话ID失败') - - if 'code' not in return_data: - return self.write_json(-3, '核心服务获取会话ID时返回错误数据') - - _code = return_data['code'] - if _code != 0: - return self.write_json(-4, '核心服务获取会话ID时返回错误 {}'.format(_code)) - - try: - session_id = return_data['data']['sid'] - except IndexError: - return self.write_json(-5, '核心服务获取会话ID时返回错误数据') - - data = dict() - data['session_id'] = session_id - - return self.write_json(0, data=data) - - -class AdminGetSessionId(TPBaseUserAuthJsonHandler): - @tornado.gen.coroutine - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - if 'host_auth_id' not in args: - return self.write_json(-1, '参数缺失') - - _host_auth_id = int(args['host_auth_id']) - - user = self.get_current_user() - - # host_auth_id 对应的是 ts_auth_info 表中的某个条目,含有具体的认证数据,因为管理员无需授权即可访问所有远程主机,因此 - # 直接给出 host_auth_id,且account直接指明是当前登录用户(其必然是管理员) - - tmp_auth_info = host.get_host_auth_info(_host_auth_id) - if tmp_auth_info is None: - return self.write_json(-2, '指定数据不存在') - - tmp_auth_info['account_lock'] = 0 - tmp_auth_info['account_name'] = user['name'] - - with tmp_auth_id_lock: - global tmp_auth_id_base - tmp_auth_id_base -= 1 - auth_id = tmp_auth_id_base - - # 将这个临时认证信息放到session中备后续查找使用(10秒内有效) - web_session().set('tmp-auth-info-{}'.format(auth_id), tmp_auth_info, 10) - - req = {'method': 'request_session', 'param': {'authid': auth_id}} - _yr = async_post_http(req) - return_data = yield _yr - if return_data is None: - return self.write_json(-3, '调用核心服务获取会话ID失败') - - if 'code' not in return_data: - return self.write_json(-4, '核心服务获取会话ID时返回错误数据') - - _code = return_data['code'] - if _code != 0: - return self.write_json(-5, '核心服务获取会话ID时返回错误 {}'.format(_code)) - - try: - session_id = return_data['data']['sid'] - except IndexError: - return self.write_json(-5, '核心服务获取会话ID时返回错误数据') - - data = dict() - data['session_id'] = session_id - - return self.write_json(0, data=data) - - -class AdminFastGetSessionId(TPBaseAdminAuthJsonHandler): - @tornado.gen.coroutine - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - user = self.get_current_user() - - tmp_auth_info = dict() - - try: - _host_auth_id = int(args['host_auth_id']) - _user_pswd = args['user_pswd'] - _cert_id = int(args['cert_id']) - - tmp_auth_info['host_ip'] = args['host_ip'] - tmp_auth_info['host_port'] = int(args['host_port']) - tmp_auth_info['sys_type'] = int(args['sys_type']) - tmp_auth_info['protocol'] = int(args['protocol']) - tmp_auth_info['user_name'] = args['user_name'] - tmp_auth_info['auth_mode'] = int(args['auth_mode']) - tmp_auth_info['user_param'] = args['user_param'] - tmp_auth_info['encrypt'] = 1 - tmp_auth_info['account_lock'] = 0 - tmp_auth_info['account_name'] = user['name'] - except IndexError: - return self.write_json(-2, '参数缺失') - - if tmp_auth_info['auth_mode'] == 1: - if len(_user_pswd) == 0: # 修改登录用户信息时可能不会修改密码,因此页面上可能不会传来密码,需要从数据库中直接读取 - h = host.get_host_auth_info(_host_auth_id) - tmp_auth_info['user_auth'] = h['user_auth'] - else: # 如果页面上修改了密码或者新建账号时设定了密码,那么需要先交给core服务进行加密 - req = {'method': 'enc', 'param': {'p': _user_pswd}} - _yr = async_post_http(req) - return_data = yield _yr - if return_data is None: - return self.write_json(-3, '调用核心服务加密失败') - if 'code' not in return_data or return_data['code'] != 0: - return self.write_json(-3, '核心服务加密返回错误') - - tmp_auth_info['user_auth'] = return_data['data']['c'] - - elif tmp_auth_info['auth_mode'] == 2: - tmp_auth_info['user_auth'] = host.get_cert_info(_cert_id) - if tmp_auth_info['user_auth'] is None: - self.write_json(-100, '指定私钥不存在') - return - elif tmp_auth_info['auth_mode'] == 0: - tmp_auth_info['user_auth'] = '' - else: - self.write_json(-101, '认证类型未知') - return - - with tmp_auth_id_lock: - global tmp_auth_id_base - tmp_auth_id_base -= 1 - auth_id = tmp_auth_id_base - - web_session().set('tmp-auth-info-{}'.format(auth_id), tmp_auth_info, 10) - - req = {'method': 'request_session', 'param': {'authid': auth_id}} - _yr = async_post_http(req) - return_data = yield _yr - if return_data is None: - return self.write_json(-3, '调用核心服务获取会话ID失败') - - if 'code' not in return_data: - return self.write_json(-4, '核心服务获取会话ID时返回错误数据') - - _code = return_data['code'] - if _code != 0: - return self.write_json(-5, '核心服务获取会话ID时返回错误 {}'.format(_code)) - - try: - session_id = return_data['data']['sid'] - except IndexError: - return self.write_json(-5, '核心服务获取会话ID时返回错误数据') - - data = dict() - data['session_id'] = session_id - - return self.write_json(0, data=data) - - -class SysUserList(TPBaseUserAuthJsonHandler): - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - try: - host_id = args['host_id'] - except: - return self.write_json(-1, '参数缺失') - - data = host.sys_user_list(host_id) - return self.write_json(0, data=data) - - -class SysUserAdd(TPBaseUserAuthJsonHandler): - @tornado.gen.coroutine - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - try: - auth_mode = args['auth_mode'] - user_pswd = args['user_pswd'] - cert_id = args['cert_id'] - except: - return self.write_json(-1, '参数缺失') - - if auth_mode == 1: - if 0 == len(args['user_pswd']): - return self.write_json(-2, '参数缺失') - - _yr = async_enc(user_pswd) - return_data = yield _yr - if return_data is None: - return self.write_json(-3, '调用核心服务加密失败') - - if 'code' not in return_data or return_data['code'] != 0: - return self.write_json(-3, '核心服务加密返回错误') - - args['user_pswd'] = return_data['data'] - - user_id = host.sys_user_add(args) - if user_id < 0: - if user_id == -100: - return self.write_json(user_id, '同名账户已经存在!') - else: - return self.write_json(user_id, '数据库操作失败!') - - return self.write_json(0) - - -class SysUserUpdate(TPBaseUserAuthJsonHandler): - @tornado.gen.coroutine - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - if 'host_auth_id' not in args or 'kv' not in args: - return self.write_json(-2, '参数缺失') - - kv = args['kv'] - if 'auth_mode' not in kv or 'user_pswd' not in kv or 'cert_id' not in kv: - return self.write_json(-3, '参数缺失') - - auth_mode = kv['auth_mode'] - if 'user_pswd' in kv: - user_pswd = kv['user_pswd'] - if 0 == len(user_pswd): - args['kv'].pop('user_pswd') - user_pswd = None - else: - user_pswd = None - - cert_id = kv['cert_id'] - if auth_mode == 1 and user_pswd is not None: - _yr = async_enc(user_pswd) - return_data = yield _yr - if return_data is None: - return self.write_json(-4, '调用核心服务加密失败') - - if 'code' not in return_data or return_data['code'] != 0: - return self.write_json(-5, '核心服务加密返回错误') - - args['kv']['user_pswd'] = return_data['data'] - - if host.sys_user_update(args['host_auth_id'], args['kv']): - return self.write_json(0) - - return self.write_json(-6, '数据库操作失败') - - -class SysUserDelete(TPBaseUserAuthJsonHandler): - def post(self, *args, **kwargs): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, '参数错误') - - try: - host_auth_id = args['host_auth_id'] - except IndexError: - return self.write_json(-2, '参数缺失') - - if host.sys_user_delete(host_auth_id): - return self.write_json(0) - - return self.write_json(-3, '数据库操作失败') diff --git a/server/www/teleport/app/eom_app/controller/index.py b/server/www/teleport/app/eom_app/controller/index.py deleted file mode 100644 index 1710131..0000000 --- a/server/www/teleport/app/eom_app/controller/index.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -import sys -import tornado.ioloop -from .base import TPBaseHandler, TPBaseUserAuthHandler - - -class IndexHandler(TPBaseUserAuthHandler): - def get(self): - self.redirect('/host') - - -class ExitHandler(TPBaseHandler): - def get(self): - self.write('exit ok') - tornado.ioloop.IOLoop.instance().stop() - - -class UIDesignHandler(TPBaseHandler): - def get(self): - self.render('uidesign/index.mako') - - -class UIDesignWithoutSidebarHandler(TPBaseHandler): - def get(self): - self.render('uidesign/without-sidebar.mako') - - -class UIDesignTableHandler(TPBaseHandler): - def get(self): - # from hashlib import sha1 - # import hmac - # my_sign = hmac.new('key', 'msg', sha1).digest() - # # my_sign = base64.b64encode(my_sign) - # # print my_sign - - self.render('uidesign/table.mako') - diff --git a/server/www/teleport/app/eom_app/controller/rpc.py b/server/www/teleport/app/eom_app/controller/rpc.py deleted file mode 100644 index 9f83000..0000000 --- a/server/www/teleport/app/eom_app/controller/rpc.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- - -import json -import urllib.parse - -import tornado.gen -from eom_app.app.configs import app_cfg -from eom_app.app.session import web_session -from eom_app.app.util import async_post_http -from eom_app.module import host, record -from eom_common.eomcore.logger import * -from .base import TPBaseJsonHandler - - -class RpcHandler(TPBaseJsonHandler): - @tornado.gen.coroutine - def get(self): - _uri = self.request.uri.split('?', 1) - if len(_uri) != 2: - return self.write_json(-1, message='need request param.') - - yield self._dispatch(urllib.parse.unquote(_uri[1])) - - @tornado.gen.coroutine - def post(self): - req = self.request.body.decode('utf-8') - if req == '': - return self.write_json(-1, message='need request param.') - - yield self._dispatch(req) - - @tornado.gen.coroutine - def _dispatch(self, req): - try: - _req = json.loads(req) - - if 'method' not in _req or 'param' not in _req: - return self.write_json(-1, message='invalid request format.') - except: - return self.write_json(-1, message='invalid json format.') - - if 'get_auth_info' == _req['method']: - return self._get_auth_info(_req['param']) - elif 'session_begin' == _req['method']: - return self._session_begin(_req['param']) - elif 'session_end' == _req['method']: - return self._session_end(_req['param']) - elif 'register_core' == _req['method']: - return self._register_core(_req['param']) - elif 'exit' == _req['method']: - return self._exit() - else: - log.e('WEB-JSON-RPC got unknown method: `{}`.\n'.format(_req['method'])) - - return self.write_json(-1, message='invalid method.') - - def _get_auth_info(self, param): - # 如果是页面上进行连接测试(增加或修改主机和用户时),信息并不写入数据库,而是在内存中存在,传递给core服务的 - # 应该是负数形式的authid。本接口支持区分这两种认证ID。 - - if 'authid' not in param: - return self.write_json(-1, message='invalid request.') - - authid = param['authid'] - if authid > 0: - # 根据authid从数据库中查询对应的数据,然后返回给调用者 - x = host.get_auth_info(param['authid']) - return self.write_json(0, data=x) - elif authid < 0: - x = web_session().taken('tmp-auth-info-{}'.format(authid), None) - return self.write_json(0, data=x) - else: - return self.write_json(-1, message='invalid auth id.') - - def _session_begin(self, param): - if 'sid' not in param: - return self.write_json(-1, message='invalid request.') - - try: - _sid = param['sid'] - _acc_name = param['account_name'] - _host_ip = param['host_ip'] - _sys_type = param['sys_type'] - _host_port = param['host_port'] - _auth_mode = param['auth_mode'] - _user_name = param['user_name'] - _protocol = param['protocol'] - except IndexError: - return self.write_json(-1, message='invalid request.') - - record_id = record.session_begin(_sid, _acc_name, _host_ip, _sys_type, _host_port, _auth_mode, _user_name, _protocol) - if record_id <= 0: - return self.write_json(-1, message='can not write database.') - else: - return self.write_json(0, data={'rid': record_id}) - - def _session_end(self, param): - if 'rid' not in param or 'code' not in param: - return self.write_json(-1, message='invalid request.') - - if not record.session_end(param['rid'], param['code']): - return self.write_json(-1, 'can not write database.') - else: - return self.write_json(0) - - def _register_core(self, param): - # 因为core服务启动了(之前可能非正常终止了),做一下数据库中会话状态的修复操作 - record.session_fix() - - if 'rpc' not in param: - return self.write_json(-1, 'invalid param.') - - app_cfg().common.core_server_rpc = param['rpc'] - - # 获取core服务的配置信息 - req = {'method': 'get_config', 'param': []} - _yr = async_post_http(req) - return_data = yield _yr - if return_data is None: - return self.write_json(-1, 'get config from core service failed.') - if 'code' not in return_data: - return self.write_json(-2, 'get config from core service return invalid data.') - if return_data['code'] != 0: - return self.write_json(-3, 'get config from core service return code: {}'.format(return_data['code'])) - - log.d('update core server config info.\n') - app_cfg().update_core(return_data['data']) - - return self.write_json(0) - - def _exit(self): - # set exit flag. - return self.write_json(0) diff --git a/server/www/teleport/app/eom_app/controller/user.py b/server/www/teleport/app/eom_app/controller/user.py deleted file mode 100644 index 6b05fb1..0000000 --- a/server/www/teleport/app/eom_app/controller/user.py +++ /dev/null @@ -1,296 +0,0 @@ -# -*- coding: utf-8 -*- - -import json - -from eom_app.app.configs import app_cfg -from eom_app.module import host -from eom_app.module import user -from eom_common.eomcore.logger import * -from .base import TPBaseUserAuthHandler, TPBaseUserAuthJsonHandler, TPBaseAdminAuthHandler, TPBaseAdminAuthJsonHandler - -cfg = app_cfg() - - -class IndexHandler(TPBaseAdminAuthHandler): - def get(self): - self.render('user/index.mako') - - -class PersonalHandler(TPBaseUserAuthHandler): - def get(self): - user_info = self.get_current_user() - self.render('user/personal.mako', user=user_info) - - -class AuthHandler(TPBaseAdminAuthHandler): - def get(self, user_name): - group_list = host.get_group_list() - cert_list = host.get_cert_list() - self.render('user/auth.mako', - group_list=group_list, - cert_list=cert_list, user_name=user_name) - - -class GetListHandler(TPBaseAdminAuthJsonHandler): - def post(self): - user_list = user.get_user_list(with_admin=False) - ret = dict() - ret['page_index'] = 10 - ret['total'] = len(user_list) - ret['data'] = user_list - self.write_json(0, data=ret) - - -class DeleteUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param') - - user_id = args['user_id'] - try: - ret = user.delete_user(user_id) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, 'database op failed.') - except: - log.e('delete user failed.\n') - return self.write_json(-3, 'got exception.') - - -class ModifyUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_id = args['user_id'] - user_desc = args['user_desc'] - - try: - ret = user.modify_user(user_id, user_desc) - if ret: - self.write_json(0) - else: - self.write_json(-2, 'database op failed.') - return - except: - log.e('modify user failed.\n') - self.write_json(-3, 'got exception.') - - -class AddUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_name = args['user_name'] - user_pwd = '123456' - user_desc = args['user_desc'] - if user_desc is None: - user_desc = '' - try: - ret = user.add_user(user_name, user_pwd, user_desc) - if 0 == ret: - return self.write_json(0) - else: - return self.write_json(ret, 'database op failed. errcode={}'.format(ret)) - except: - log.e('add user failed.\n') - return self.write_json(-3, 'got exception.') - - -class LockUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_id = args['user_id'] - lock_status = args['lock_status'] - - try: - ret = user.lock_user(user_id, lock_status) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, 'database op failed.') - except: - log.e('lock user failed.\m') - return self.write_json(-3, 'got exception.') - - -class ResetUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_id = args['user_id'] - # lock_status = args['lock_status'] - - try: - ret = user.reset_user(user_id) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, 'database op failed.') - except: - log.e('reset user failed.\n') - return self.write_json(-3, 'got exception.') - - -class HostList(TPBaseUserAuthJsonHandler): - def post(self): - filter = dict() - order = dict() - order['name'] = 'host_id' - order['asc'] = True - limit = dict() - limit['page_index'] = 0 - limit['per_page'] = 25 - - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - - tmp = list() - _filter = args['filter'] - for i in _filter: - if i == 'host_sys_type' and _filter[i] == 0: - tmp.append(i) - continue - if i == 'host_group' and _filter[i] == 0: - tmp.append(i) - continue - if i == 'search': - _x = _filter[i].strip() - if len(_x) == 0: - tmp.append(i) - continue - - for i in tmp: - del _filter[i] - - filter.update(_filter) - - _limit = args['limit'] - if _limit['page_index'] < 0: - _limit['page_index'] = 0 - if _limit['per_page'] < 10: - _limit['per_page'] = 10 - if _limit['per_page'] > 100: - _limit['per_page'] = 100 - - limit.update(_limit) - - _order = args['order'] - if _order is not None: - order['name'] = _order['k'] - order['asc'] = _order['v'] - - _total, _hosts = host.get_host_info_list_by_user(filter, order, limit) - - ret = dict() - ret['page_index'] = limit['page_index'] - ret['total'] = _total - ret['data'] = _hosts - self.write_json(0, data=ret) - - -class AllocHost(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_name = args['user_name'] - host_list = args['host_list'] - try: - ret = user.alloc_host(user_name, host_list) - if ret: - return self.write_json(0) - else: - return self.write_json(-2, 'database op failed.') - except: - log.e('alloc host failed.') - self.write_json(-3, 'got exception.') - - -class AllocHostUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_name = args['user_name'] - host_auth_id_list = args['host_list'] - try: - ret = user.alloc_host_user(user_name, host_auth_id_list) - if ret: - self.write_json(0) - else: - self.write_json(-2, 'database op failed.') - except: - log.e('alloc host for user failed.\n') - self.write_json(-3, 'got exception.') - - -class DeleteHost(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - return self.write_json(-1, 'invalid param.') - - user_name = args['user_name'] - host_list = args['host_list'] - - try: - ret = user.delete_host(user_name, host_list) - if ret: - self.write_json(0) - else: - self.write_json(-2, 'database op failed.') - except: - log.e('delete host failed.\n') - self.write_json(-3, 'got exception.') - - -class DeleteHostUser(TPBaseUserAuthJsonHandler): - def post(self): - args = self.get_argument('args', None) - if args is not None: - args = json.loads(args) - else: - self.write_json(-1, 'invalid param.') - - user_name = args['user_name'] - auth_id_list = args['auth_id_list'] - - try: - ret = user.delete_host_user(user_name, auth_id_list) - if ret: - self.write_json(0) - else: - self.write_json(-2, 'database op failed.') - except: - log.e('delete host for user failed.\n') - self.write_json(-3, 'got exception.') diff --git a/server/www/teleport/app/eom_app/module/host.py b/server/www/teleport/app/eom_app/module/host.py deleted file mode 100644 index d6ec01b..0000000 --- a/server/www/teleport/app/eom_app/module/host.py +++ /dev/null @@ -1,741 +0,0 @@ -# -*- coding: utf-8 -*- - -import json -import time - -from eom_app.app.db import get_db, DbItem - - -# 获取主机列表,包括主机的基本信息 -def get_all_host_info_list(_filter, order, limit, with_pwd=False): - db = get_db() - - _where = '' - - if len(_filter) > 0: - _where = 'WHERE ( ' - - need_and = False - for k in _filter: - if k == 'host_group': - if need_and: - _where += ' AND' - _where += ' `b`.`group_id`={}'.format(_filter[k]) - need_and = True - elif k == 'host_sys_type': - if need_and: - _where += ' AND' - _where += ' `a`.`host_sys_type`={}'.format(_filter[k]) - need_and = True - elif k == 'search': - # 查找,限于主机ID和IP地址,前者是数字,只能精确查找,后者可以模糊匹配 - # 因此,先判断搜索项能否转换为数字。 - - if need_and: - _where += ' AND ' - - _where += '(' - _where += '`a`.`host_ip` LIKE "%{}%" OR `a`.`host_desc` LIKE "%{}%" )'.format(_filter[k], _filter[k], _filter[k]) - need_and = True - _where += ')' - - # http://www.jb51.net/article/46015.htm - field_a = ['host_id', 'host_lock', 'host_ip', 'host_port', 'protocol', 'host_desc', 'group_id', 'host_sys_type'] - field_b = ['group_name'] - - sql = 'SELECT COUNT(*) ' \ - 'FROM `{}host_info` AS a ' \ - 'LEFT JOIN `{}group` AS b ON `a`.`group_id`=`b`.`group_id` ' \ - '{};'.format(db.table_prefix, db.table_prefix, _where) - - db_ret = db.query(sql) - if db_ret is None: - return 0, list() - total_count = db_ret[0][0] - - # 修正分页数据 - _limit = '' - if len(limit) > 0: - _page_index = limit['page_index'] - _per_page = limit['per_page'] - _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) - - if _page_index * _per_page >= total_count: - _page_index = int(total_count / _per_page) - _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) - - # 生成排序规则 - _order = '' - if order is not None: - _order = 'ORDER BY ' - if 'host_id' == order['name']: - _order += '`a`.`host_id`' - elif 'ip' == order['name']: - _order += '`a`.`host_ip`' - else: - _order = '' - - if not order['asc'] and len(_order) > 0: - _order += ' DESC' - - sql = 'SELECT {},{} ' \ - 'FROM `{}host_info` AS a ' \ - 'LEFT JOIN `{}group` AS b ON `a`.`group_id`=`b`.`group_id` ' \ - '{} {} {};'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), - ','.join(['`b`.`{}`'.format(i) for i in field_b]), - db.table_prefix, db.table_prefix, - _where, _order, _limit) - - db_ret = db.query(sql) - if db_ret is None: - return 0, list() - - ret = list() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a] + - ['b_{}'.format(i) for i in field_b]) - - h = dict() - h['host_id'] = x.a_host_id - h['host_port'] = x.a_host_port - h['protocol'] = x.a_protocol - h['host_lock'] = x.a_host_lock - h['host_ip'] = x.a_host_ip - h['host_desc'] = x.a_host_desc - h['group_id'] = x.a_group_id - h['host_sys_type'] = x.a_host_sys_type - group_name = '默认分组' - if x.b_group_name is not None: - group_name = x.b_group_name - h['group_name'] = group_name - - # h['auth_list'] = list() - # auth_list = h['auth_list'] - h['auth_list'] = sys_user_list(x.a_host_id, with_pwd) - # auth = dict() - # auth['host_auth_id'] = x.c_id - # auth['auth_mode'] = x.c_auth_mode - # auth['user_name'] = x.c_user_name - # auth_list.append(auth) - - ret.append(h) - return total_count, ret - - -def get_host_info_list_by_user(_filter, order, limit): - db = get_db() - - _where = '' - if len(_filter) > 0: - _where = 'WHERE ( ' - - need_and = False - for k in _filter: - if k == 'host_group': - if need_and: - _where += ' AND' - _where += ' b.group_id={}'.format(_filter[k]) - need_and = True - elif k == 'host_sys_type': - if need_and: - _where += ' AND' - _where += ' b.host_sys_type={}'.format(_filter[k]) - need_and = True - - elif k == 'account_name': - if need_and: - _where += ' AND' - _where += ' a.account_name=\'{}\''.format(_filter[k]) - need_and = True - - elif k == 'search': - # 查找,限于主机ID和IP地址,前者是数字,只能精确查找,后者可以模糊匹配 - # 因此,先判断搜索项能否转换为数字。 - - if need_and: - _where += ' AND ' - - _where += '(' - _where += 'b.host_ip LIKE "%{}%" OR b.host_desc LIKE "%{}%" )'.format(_filter[k], _filter[k], _filter[k]) - need_and = True - - _where += ')' - - field_a = ['auth_id', 'host_id', 'account_name', 'host_auth_id'] - field_b = ['host_id', 'host_lock', 'host_ip', 'protocol', 'host_port', 'host_desc', 'group_id', 'host_sys_type'] - field_c = ['group_name'] - field_d = ['auth_mode', 'user_name'] - sql = 'SELECT COUNT(DISTINCT a.host_id) ' \ - 'FROM {}auth AS a ' \ - 'LEFT JOIN {}host_info AS b ON a.host_id = b.host_id ' \ - '{};'.format(db.table_prefix, db.table_prefix, _where) - - db_ret = db.query(sql) - total_count = db_ret[0][0] - - # 修正分页数据 - _limit = '' - if len(limit) > 0: - _page_index = limit['page_index'] - _per_page = limit['per_page'] - _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) - - if _page_index * _per_page >= total_count: - _page_index = int(total_count / _per_page) - _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) - - # 生成排序规则 - _order = '' - # log.d(order['name']) - if order is not None: - _order = 'ORDER BY ' - if 'host_id' == order['name']: - _order += 'b.host_id' - elif 'ip' == order['name']: - _order += 'b.host_ip' - else: - _order = '' - - if not order['asc'] and len(_order) > 0: - _order += ' DESC' - - sql = 'SELECT {}, {},{},{} ' \ - 'FROM {}auth AS a ' \ - 'LEFT JOIN {}host_info AS b ON a.host_id=b.host_id ' \ - 'LEFT JOIN {}group AS c ON b.group_id = c.group_id ' \ - 'LEFT JOIN {}auth_info AS d ON d.id = a.host_auth_id ' \ - '{} {} {};' \ - ''.format(','.join(['a.{}'.format(i) for i in field_a]), - ','.join(['b.{}'.format(i) for i in field_b]), - ','.join(['c.{}'.format(i) for i in field_c]), - ','.join(['d.{}'.format(i) for i in field_d]), - db.table_prefix, db.table_prefix, db.table_prefix, db.table_prefix, - _where, _order, _limit) - - db_ret = db.query(sql) - ret = list() - temp = dict() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a] + ['b_{}'.format(i) for i in field_b] + ['c_{}'.format(i) for i in field_c] + ['d_{}'.format(i) for i in field_d]) - - host_ip = x.b_host_ip - protocol = x.b_protocol - key = '{}-{}'.format(host_ip, protocol) - temp_auth = None - extend_auth_list = sys_user_list(x.b_host_id, False, x.a_host_auth_id) - if extend_auth_list is not None and len(extend_auth_list) > 0: - auth = extend_auth_list[0] - auth['auth_id'] = x.a_auth_id - temp_auth = auth - add = False - if key in temp: - h = temp[key] - auth_list = h['auth_list'] - auth_list.append(temp_auth) - h['auth_list'] = auth_list - else: - h = dict() - h['host_id'] = x.b_host_id - h['host_lock'] = x.b_host_lock - h['host_ip'] = host_ip - h['host_port'] = x.b_host_port - h['host_desc'] = x.b_host_desc - h['group_id'] = x.b_group_id - h['host_sys_type'] = x.b_host_sys_type - h['protocol'] = x.b_protocol - group_name = '默认分组' - if x.c_group_name is not None: - group_name = x.c_group_name - h['group_name'] = group_name - add = True - temp[key] = h - h['auth_list'] = list() - auth_list = h['auth_list'] - auth_list.append(temp_auth) - h['auth_list'] = auth_list - - if add: - ret.append(h) - - return total_count, ret - - -def get_group_list(): - db = get_db() - field_a = ['group_id', 'group_name'] - sql = 'SELECT {} FROM `{}group` AS a; '.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix) - db_ret = db.query(sql) - if db_ret is None: - return list() - - ret = list() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a]) - h = dict() - - h['id'] = x.a_group_id - h['group_name'] = x.a_group_name - ret.append(h) - return ret - - -def update(host_id, kv): - db = get_db() - - if len(kv) == 0: - return False - - _val = '' - for k in kv: - if len(_val) > 0: - _val += ',' - if k == 'desc': - _val += '`host_desc`="{}"'.format(kv[k]) - elif k == 'pro_port': - temp = json.dumps(kv[k]) - _val += '`{}`="{}"'.format(k, temp) - else: - _val += '`{}`="{}"'.format(k, kv[k]) - - sql = 'UPDATE `{}host_info` SET {} WHERE `host_id`={};'.format(db.table_prefix, _val, int(host_id)) - return db.exec(sql) - - -def get_cert_list(): - db = get_db() - field_a = ['cert_id', 'cert_name', 'cert_pub', 'cert_pri', 'cert_desc'] - sql = 'SELECT {} FROM `{}key` AS a;'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix) - db_ret = db.query(sql) - - ret = list() - - if db_ret is None: - return ret - - for item in db_ret: - x = DbItem() - - x.load(item, ['a_{}'.format(i) for i in field_a]) - h = dict() - - h['cert_id'] = x.a_cert_id - if x.a_cert_name is None: - x.a_cert_name = '' - - h['cert_name'] = x.a_cert_name - h['cert_pub'] = x.a_cert_pub - - h['cert_pri'] = x.a_cert_pri - if x.a_cert_desc is None: - x.a_cert_desc = '' - h['cert_desc'] = x.a_cert_desc - ret.append(h) - return ret - - -def add_host(args, must_not_exists=True): - db = get_db() - - protocol = args['protocol'] - host_port = args['host_port'] - host_ip = args['host_ip'] - - sql = 'SELECT `host_id` FROM `{}host_info` WHERE (`host_ip`="{}" AND `protocol`={} AND `host_port`={});'.format(db.table_prefix, host_ip, protocol, host_port) - db_ret = db.query(sql) - if db_ret is not None and len(db_ret) > 0: - if not must_not_exists: - return db_ret[0][0] - else: - return -100 - - group_id = args['group_id'] - host_sys_type = args['host_sys_type'] - # pro_port = args['pro_port'] - # pro_port = json.dumps(pro_port) - # host_user_name = args['user_name'] - # host_user_pwd = args['user_pwd'] - # host_pro_type = args['pro_type'] - # cert_id = args['cert_id'] - # host_encrypt = 1 - # host_auth_mode = args['host_auth_mode'] - host_desc = args['host_desc'] - if len(host_desc) == 0: - host_desc = '描述未填写' - host_lock = 0 - - # - sql = 'INSERT INTO `{}host_info` (group_id, host_sys_type, host_ip, ' \ - 'host_port, protocol, host_lock, host_desc) ' \ - 'VALUES ({},{},"{}",{},{},{},"{}")' \ - ''.format(db.table_prefix, - group_id, host_sys_type, host_ip, - host_port, protocol, host_lock, host_desc) - - ret = db.exec(sql) - if not ret: - return -101 - - host_id = db.last_insert_id() - if host_id == -1: - return -102 - else: - return host_id - - -def lock_host(host_id, lock): - db = get_db() - sql = 'UPDATE `{}host_info` SET `host_lock`={} WHERE `host_id`={}'.format(db.table_prefix, int(lock), int(host_id)) - return db.exec(sql) - - -def delete_host(host_list): - # TODO: 使用事务的方式防止删除操作中途失败 - db = get_db() - for item in host_list: - host_id = int(item) - sql = 'DELETE FROM `{}host_info` WHERE `host_id`={};'.format(db.table_prefix, host_id) - ret = db.exec(sql) - - sql = 'DELETE FROM `{}auth_info` WHERE `host_id`={};'.format(db.table_prefix, host_id) - ret = db.exec(sql) - - sql = 'DELETE FROM `{}auth` WHERE `host_id`={};'.format(db.table_prefix, host_id) - ret = db.exec(sql) - return True - - -def add_cert(cert_pub, cert_pri, cert_name): - db = get_db() - sql = 'INSERT INTO `{}key` (`cert_pub`, `cert_pri`, `cert_name`) VALUES ("{}","{}","{}")'.format(db.table_prefix, cert_pub, cert_pri, cert_name) - return db.exec(sql) - - -def delete_cert(cert_id): - db = get_db() - sql = 'DELETE FROM `{}key` WHERE `cert_id`={};'.format(db.table_prefix, int(cert_id)) - return db.exec(sql) - - -def update_cert(cert_id, cert_pub, cert_pri, cert_name): - db = get_db() - - if 0 == len(cert_pri): - sql = 'UPDATE `{}key` SET `cert_pub`="{}",`cert_name`="{}" ' \ - 'WHERE `cert_id`={};'.format(db.table_prefix, cert_pub, cert_name, int(cert_id)) - else: - sql = 'UPDATE `{}key` SET `cert_pub`="{}", `cert_pri`="{}", `cert_name`="{}" ' \ - 'WHERE `cert_id`={};'.format(db.table_prefix, cert_pub, cert_pri, cert_name, int(cert_id)) - - return db.exec(sql) - - -def add_group(group_name): - db = get_db() - sql = 'INSERT INTO `{}group` (`group_name`) VALUES ("{}");'.format(db.table_prefix, group_name) - return db.exec(sql) - - -def delete_group(group_id): - db = get_db() - sql = 'SELECT `host_id` FROM `{}host_info` WHERE `group_id`={};'.format(db.table_prefix, int(group_id)) - db_ret = db.query(sql) - if len(db_ret) != 0: - return -2 - - sql = 'DELETE FROM `{}group` WHERE `group_id`={};'.format(db.table_prefix, group_id) - ret = db.exec(sql) - if ret: - return 0 - return -3 - - -def update_group(group_id, group_name): - db = get_db() - sql = 'UPDATE `{}group` SET `group_name`="{}" ' \ - 'WHERE `group_id`={};'.format(db.table_prefix, group_name, int(group_id)) - return db.exec(sql) - - -def add_host_to_group(host_list, group_id): - db = get_db() - group_id = int(group_id) - for item in host_list: - host_id = int(item) - sql = 'UPDATE `{}host_info` SET group_id={} ' \ - 'WHERE `host_id`={};'.format(db.table_prefix, group_id, host_id) - ret = db.exec(sql) - return ret - - -def get_host_auth_info(host_auth_id): - db = get_db() - - field_a = ['id', 'auth_mode', 'user_name', 'user_pswd', 'user_param', 'cert_id', 'encrypt'] - field_b = ['host_id', 'host_lock', 'host_ip', 'host_port', 'host_desc', 'group_id', 'host_sys_type', 'protocol'] - - sql = 'SELECT {},{} ' \ - 'FROM `{}auth_info` AS a ' \ - 'LEFT JOIN `{}host_info` AS b ON `a`.`host_id`=`b`.`host_id` ' \ - 'WHERE `a`.`id`={};'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), - ','.join(['`b`.`{}`'.format(i) for i in field_b]), - db.table_prefix, db.table_prefix, - host_auth_id) - db_ret = db.query(sql) - - if db_ret is None or len(db_ret) != 1: - return None - x = DbItem() - x.load(db_ret[0], ['a_{}'.format(i) for i in field_a] + ['b_{}'.format(i) for i in field_b]) - - h = dict() - h['host_ip'] = x.b_host_ip - h['host_port'] = x.b_host_port - h['sys_type'] = x.b_host_sys_type - h['auth_mode'] = x.a_auth_mode - h['user_name'] = x.a_user_name - h['protocol'] = x.b_protocol - - if x.a_encrypt is None: - h['encrypt'] = 1 - else: - h['encrypt'] = x.a_encrypt - - if x.a_user_param is None: - h['user_param'] = '' - else: - h['user_param'] = x.a_user_param - - h['user_auth'] = x.a_user_pswd - - if x.a_auth_mode == 1: - h['user_auth'] = x.a_user_pswd - elif x.a_auth_mode == 2: - if x.a_cert_id is None: - cert_id = 0 - else: - cert_id = int(x.a_cert_id) # int(user_auth) - sql = 'SELECT `cert_pri` FROM `{}key` WHERE `cert_id`={};'.format(db.table_prefix, cert_id) - db_ret = db.query(sql) - if db_ret is not None and len(db_ret) == 1: - (cert_pri,) = db_ret[0] - h['user_auth'] = cert_pri - else: - return None - elif x.a_auth_mode == 0: - h['user_auth'] = '' - else: - return None - - return h - - -def get_cert_info(cert_id): - db = get_db() - sql = 'SELECT `cert_pri` FROM `{}key` WHERE `cert_id`={};'.format(db.table_prefix, cert_id) - db_ret = db.query(sql) - if db_ret is not None and len(db_ret) == 1: - (cert_pri,) = db_ret[0] - return cert_pri - else: - return None - - -def sys_user_list(host_id, with_pwd=True, host_auth_id=0): - db = get_db() - field_a = ['id', 'host_id', 'auth_mode', 'user_name', 'user_pswd', 'user_param', 'cert_id', 'log_time'] - if host_auth_id == 0: - sql = 'SELECT {} ' \ - 'FROM `{}auth_info` AS a ' \ - 'WHERE `a`.`host_id`={};'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix, int(host_id)) - else: - sql = 'SELECT {} ' \ - 'FROM `{}auth_info` AS a ' \ - 'WHERE `a`.`id`={} and `a`.`host_id`={};'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix, int(host_auth_id), int(host_id)) - - db_ret = db.query(sql) - - if db_ret is None: - return None - ret = list() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a]) - - h = dict() - # h['id'] = x.a_id - - h['host_auth_id'] = x.a_id - h['host_id'] = x.a_host_id - # h['pro_type'] = x.a_pro_type - h['auth_mode'] = x.a_auth_mode - h['user_name'] = x.a_user_name - if with_pwd: - h['user_pswd'] = x.a_user_pswd - - if x.a_user_param is None: - h['user_param'] = '' - else: - h['user_param'] = x.a_user_param - - h['cert_id'] = x.a_cert_id - h['log_time'] = x.a_log_time - # if x.a_auth_mode == 2: - # h['user_auth'] = x.a_user_auth - # else: - # h['user_auth'] = "******" - ret.append(h) - - return ret - - -def GetNowTime(): - return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) - - -def sys_user_add(args): - host_id = int(args['host_id']) - auth_mode = int(args['auth_mode']) - user_name = args['user_name'] - user_pswd = args['user_pswd'] - cert_id = int(args['cert_id']) - - if 'user_param' in args: - user_param = args['user_param'] - else: - user_param = 'ogin:\nassword:' - - encrypt = 1 - - db = get_db() - - # 判断此登录账号是否已经存在,如果存在则报错 - sql = 'SELECT `id` FROM `{}auth_info` WHERE (`host_id`={} AND `auth_mode`={} AND `user_name`="{}");'.format(db.table_prefix, host_id, auth_mode, user_name) - db_ret = db.query(sql) - if db_ret is not None and len(db_ret) > 0: - return -100 - - log_time = GetNowTime() - - if auth_mode == 1: - sql = 'INSERT INTO `{}auth_info` (`host_id`,`auth_mode`,`user_name`,`user_pswd`,`user_param`,`encrypt`,`cert_id`,`log_time`) ' \ - 'VALUES ({},{},"{}","{}","{}",{}, {},"{}")' \ - ''.format(db.table_prefix, host_id, auth_mode, user_name, user_pswd, user_param, encrypt, 0, log_time) - elif auth_mode == 2: - sql = 'INSERT INTO `{}auth_info` (`host_id`,`auth_mode`,`user_name`,`user_pswd`,`user_param`,`encrypt`,`cert_id`,`log_time`) ' \ - 'VALUES ({},{},"{}","{}","{}",{},{},"{}")' \ - ''.format(db.table_prefix, host_id, auth_mode, user_name, '', user_param, encrypt, cert_id, log_time) - elif auth_mode == 0: - sql = 'INSERT INTO `{}auth_info` (`host_id`,`auth_mode`,`user_name`,`user_pswd`,`user_param`,`encrypt`,`cert_id`,`log_time`) ' \ - 'VALUES ({},{},"{}","{}","{}",{},{},"{}")' \ - ''.format(db.table_prefix, host_id, auth_mode, user_name, '', user_param, encrypt, 0, log_time) - ret = db.exec(sql) - if not ret: - return -101 - - user_id = db.last_insert_id() - if user_id == -1: - return -102 - else: - return user_id - - -def sys_user_update(_id, kv): - if len(kv) == 0: - return False - - _val = '' - for k in kv: - if len(_val) > 0: - _val += ',' - - _val += '`{}`="{}"'.format(k, kv[k]) - - db = get_db() - sql = 'UPDATE `{}auth_info` SET {} WHERE `id`={};'.format(db.table_prefix, _val, int(_id)) - return db.exec(sql) - - -def sys_user_delete(_id): - db = get_db() - try: - sql = 'DELETE FROM `{}auth_info` WHERE `id`={};'.format(db.table_prefix, int(_id)) - ret = db.exec(sql) - - sql = 'DELETE FROM `{}auth` WHERE `host_auth_id`={};'.format(db.table_prefix, int(_id)) - ret = db.exec(sql) - except: - return False - - return True - - -def get_auth_info(auth_id): - """ - 根据指定的auth_id查询相关的认证信息(远程主机IP、端口、登录用户名、登录密码或私钥,等等) - @param auth_id: integer - @return: - """ - db = get_db() - - field_a = ['auth_id', 'account_name', 'host_auth_id', 'host_id'] - field_b = ['host_sys_type', 'host_ip', 'host_port', 'protocol'] - field_c = ['user_pswd', 'cert_id', 'user_name', 'encrypt', 'auth_mode', 'user_param'] - field_d = ['account_lock'] - - sql = 'SELECT {},{},{},{} ' \ - 'FROM `{}auth` AS a ' \ - 'LEFT JOIN `{}host_info` AS b ON `a`.`host_id`=`b`.`host_id` ' \ - 'LEFT JOIN `{}auth_info` AS c ON `a`.`host_auth_id`=`c`.`id` ' \ - 'LEFT JOIN `{}account` AS d ON `a`.`account_name`=`d`.`account_name` ' \ - 'WHERE `a`.`auth_id`={};' \ - ''.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), - ','.join(['`b`.`{}`'.format(i) for i in field_b]), - ','.join(['`c`.`{}`'.format(i) for i in field_c]), - ','.join(['`d`.`{}`'.format(i) for i in field_d]), - db.table_prefix, db.table_prefix, db.table_prefix, db.table_prefix, - auth_id) - - db_ret = db.query(sql) - - if db_ret is None or len(db_ret) != 1: - return None - - db_item = DbItem() - - db_item.load(db_ret[0], - ['a_{}'.format(i) for i in field_a] + - ['b_{}'.format(i) for i in field_b] + - ['c_{}'.format(i) for i in field_c] + - ['d_{}'.format(i) for i in field_d] - ) - - ret = dict() - ret['host_ip'] = db_item.b_host_ip - ret['sys_type'] = db_item.b_host_sys_type - ret['account_name'] = db_item.a_account_name - ret['account_lock'] = db_item.d_account_lock - # h['host_lock'] = x.a_host_lock - ret['host_port'] = db_item.b_host_port - ret['protocol'] = db_item.b_protocol - ret['encrypt'] = db_item.c_encrypt - ret['auth_mode'] = db_item.c_auth_mode - ret['user_name'] = db_item.c_user_name - ret['user_param'] = db_item.c_user_param - - if db_item.c_auth_mode == 1: - ret['user_auth'] = db_item.c_user_pswd - elif db_item.c_auth_mode == 2: - cert_id = db_item.c_cert_id - - sql = 'SELECT `cert_pri` FROM `{}key` WHERE `cert_id`={}'.format(db.table_prefix, int(cert_id)) - db_ret = db.query(sql) - if db_ret is None or len(db_ret) > 1: - return None - ret['user_auth'] = db_ret[0][0] - else: - pass - - return ret diff --git a/server/www/teleport/app/eom_app/module/record.py b/server/www/teleport/app/eom_app/module/record.py deleted file mode 100644 index 3a51561..0000000 --- a/server/www/teleport/app/eom_app/module/record.py +++ /dev/null @@ -1,207 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -import shutil -import struct -import base64 - -from eom_app.app.configs import app_cfg -from eom_app.app.db import get_db -from eom_common.eomcore.logger import log -from eom_common.eomcore.utils import timestamp_utc_now - - -def read_record_head(record_id): - record_path = os.path.join(app_cfg().core.replay_path, 'ssh', '{:06d}'.format(int(record_id))) - header_file_path = os.path.join(record_path, 'tp-ssh.tpr') - file = None - try: - file = open(header_file_path, 'rb') - data = file.read() - offset = 0 - - magic, = struct.unpack_from('I', data, offset) # magic must be 1381126228, 'TPPR' - offset += 4 - ver, = struct.unpack_from('H', data, offset) - offset += 2 - protocol, = struct.unpack_from('H', data, offset) - offset += 2 - time_start, = struct.unpack_from('Q', data, offset) - offset += 8 - pkg_count, = struct.unpack_from('I', data, offset) - offset += 4 - time_used, = struct.unpack_from('I', data, offset) - offset += 4 - width, = struct.unpack_from('H', data, offset) - offset += 2 - height, = struct.unpack_from('H', data, offset) - offset += 2 - file_count, = struct.unpack_from('H', data, offset) - offset += 2 - total_size, = struct.unpack_from('I', data, offset) - offset += 4 - - account, = struct.unpack_from('16s', data, offset) - account = account.decode() - offset += 16 - user_name, = struct.unpack_from('16s', data, offset) - user_name = user_name.decode() - offset += 16 - ip, = struct.unpack_from('18s', data, offset) - ip = ip.decode() - offset += 18 - port, = struct.unpack_from('H', data, offset) - offset += 2 - - except Exception as e: - log.e(e) - return None - finally: - if file is not None: - file.close() - - header = dict() - header['start'] = time_start - header['file_count'] = file_count - header['time_used'] = time_used - header['width'] = width - header['height'] = height - header['account'] = account - header['user_name'] = user_name - header['ip'] = ip - header['port'] = port - - return header - - -def read_record_info(record_id, file_id): - record_path = os.path.join(app_cfg().core.replay_path, 'ssh', '{:06d}'.format(int(record_id))) - file_info = os.path.join(record_path, 'tp-ssh.{:03d}'.format(int(file_id))) - file = None - try: - file = open(file_info, 'rb') - data = file.read() - total_size = len(data) - - offset = 0 - data_size, = struct.unpack_from('I', data, offset) - offset += 4 - - data_list = list() - while True: - action, = struct.unpack_from('B', data, offset) - offset += 1 - - _size, = struct.unpack_from('I', data, offset) - offset += 4 - - _time, = struct.unpack_from('I', data, offset) - offset += 4 - - # skip reserved 3 bytes. - offset += 3 - - _format = '{}s'.format(_size) - _data, = struct.unpack_from(_format, data, offset) - offset += _size - - temp = dict() - temp['a'] = action - temp['t'] = _time - if action == 1: - # this is window size changed. - w, h = struct.unpack_from('HH', _data) - temp['w'] = w - temp['h'] = h - elif action == 2: - try: - _d = _data.decode() - temp['d'] = _d - except: - _data = base64.b64encode(_data) - temp['a'] = 3 - temp['d'] = _data.decode() - else: - return None - - data_list.append(temp) - if offset == total_size: - break - - except Exception as e: - log.e('failed to read record file: {}\n'.format(file_info)) - return None - finally: - if file is not None: - file.close() - return data_list - - -def delete_log(log_list): - try: - where = list() - for item in log_list: - where.append(' `id`={}'.format(item)) - - db = get_db() - sql = 'DELETE FROM `{}log` WHERE{};'.format(db.table_prefix, ' OR'.join(where)) - ret = db.exec(sql) - if not ret: - return False - - # TODO: 此处应该通过json-rpc接口通知core服务来删除重放文件。 - for item in log_list: - log_id = int(item) - try: - record_path = os.path.join(app_cfg().core.replay_path, 'ssh', '{:06d}'.format(log_id)) - if os.path.exists(record_path): - shutil.rmtree(record_path) - record_path = os.path.join(app_cfg().core.replay_path, 'rdp', '{:06d}'.format(log_id)) - if os.path.exists(record_path): - shutil.rmtree(record_path) - except Exception: - pass - - return True - except: - return False - - -def session_fix(): - try: - db = get_db() - sql = 'UPDATE `{}log` SET `ret_code`=7 WHERE `ret_code`=0;'.format(db.table_prefix) - return db.exec(sql) - except: - return False - - -def session_begin(sid, acc_name, host_ip, sys_type, host_port, auth_mode, user_name, protocol): - try: - db = get_db() - sql = 'INSERT INTO `{}log` (`session_id`,`account_name`,`host_ip`,`sys_type`,`host_port`,`auth_type`,`user_name`,`ret_code`,`begin_time`,`end_time`,`log_time`,`protocol`) ' \ - 'VALUES ("{}","{}","{}",{},{},{},"{}",{},{},{},"{}",{});' \ - ''.format(db.table_prefix, - sid, acc_name, host_ip, sys_type, host_port, auth_mode, user_name, 0, timestamp_utc_now(), 0, '', protocol) - - ret = db.exec(sql) - if not ret: - return -101 - - user_id = db.last_insert_id() - if user_id == -1: - return -102 - else: - return user_id - - except: - return False - - -def session_end(record_id, ret_code): - try: - db = get_db() - sql = 'UPDATE `{}log` SET `ret_code`={}, `end_time`={} WHERE `id`={};'.format(db.table_prefix, int(ret_code), timestamp_utc_now(), int(record_id)) - return db.exec(sql) - except: - return False diff --git a/server/www/teleport/app/eom_app/module/user.py b/server/www/teleport/app/eom_app/module/user.py deleted file mode 100644 index 44f603c..0000000 --- a/server/www/teleport/app/eom_app/module/user.py +++ /dev/null @@ -1,361 +0,0 @@ -# -*- coding: utf-8 -*- - -import hashlib - -from eom_app.app.configs import app_cfg -from eom_app.app.const import * -from eom_app.app.db import get_db, DbItem -from eom_app.app.util import sec_generate_password, sec_verify_password -from eom_app.app.oath import verify_oath_code - - -def verify_user(name, password, oath_code): - cfg = app_cfg() - db = get_db() - - sql = 'SELECT `account_id`, `account_type`, `account_desc`, `account_pwd`, `account_lock` FROM `{}account` WHERE `account_name`="{}";'.format(db.table_prefix, name) - db_ret = db.query(sql) - if db_ret is None: - # 特别地,如果无法取得数据库连接,有可能是新安装的系统,尚未建立数据库,此时应该处于维护模式 - # 因此可以特别地处理用户验证:用户名admin,密码admin可以登录为管理员 - if cfg.app_mode == APP_MODE_MAINTENANCE: - if name == 'admin' and password == 'admin': - return 1, 100, '系统管理员', 0 - return 0, 0, '', 0 - - if len(db_ret) != 1: - return 0, 0, '', 0 - - user_id = db_ret[0][0] - account_type = db_ret[0][1] - desc = db_ret[0][2] - locked = db_ret[0][4] - if locked == 1: - return 0, 0, '', locked - - if not sec_verify_password(password, db_ret[0][3]): - # 按新方法验证密码失败,可能是旧版本的密码散列格式,再尝试一下 - if db_ret[0][3] != hashlib.sha256(password.encode()).hexdigest(): - return 0, 0, '', locked - else: - # 发现此用户的密码散列格式还是旧的,更新成新的吧! - _new_sec_password = sec_generate_password(password) - sql = 'UPDATE `{}account` SET `account_pwd`="{}" WHERE `account_id`={}'.format(db.table_prefix, _new_sec_password, int(user_id)) - db.exec(sql) - - if oath_code is not None: - if not verify_oath(user_id, oath_code): - return 0, 0, '', 0 - - return user_id, account_type, desc, locked - - -def verify_oath(user_id, oath_code): - db = get_db() - - sql = 'SELECT `oath_secret` FROM `{}account` WHERE `account_id`={};'.format(db.table_prefix, user_id) - db_ret = db.query(sql) - if db_ret is None: - return False - - if len(db_ret) != 1: - return False - - oath_secret = str(db_ret[0][0]).strip() - if 0 == len(oath_secret): - return False - - return verify_oath_code(oath_secret, oath_code) - - -def modify_pwd(old_pwd, new_pwd, user_id): - db = get_db() - sql = 'SELECT `account_pwd` FROM `{}account` WHERE `account_id`={};'.format(db.table_prefix, int(user_id)) - db_ret = db.query(sql) - if db_ret is None or len(db_ret) != 1: - return -100 - - if not sec_verify_password(old_pwd, db_ret[0][0]): - # 按新方法验证密码失败,可能是旧版本的密码散列格式,再尝试一下 - if db_ret[0][0] != hashlib.sha256(old_pwd.encode()).hexdigest(): - return -101 - - _new_sec_password = sec_generate_password(new_pwd) - sql = 'UPDATE `{}account` SET `account_pwd`="{}" WHERE `account_id`={}'.format(db.table_prefix, _new_sec_password, int(user_id)) - db_ret = db.exec(sql) - if db_ret: - return 0 - else: - return -102 - - -def update_oath_secret(user_id, oath_secret): - db = get_db() - sql = 'UPDATE `{}account` SET `oath_secret`="{}" WHERE `account_id`={}'.format(db.table_prefix, oath_secret, int(user_id)) - db_ret = db.exec(sql) - if db_ret: - return 0 - else: - return -102 - - -def get_user_list(with_admin=False): - db = get_db() - ret = list() - - field_a = ['account_id', 'account_type', 'account_name', 'account_status', 'account_lock', 'account_desc'] - - if with_admin: - where = '' - else: - where = 'WHERE `a`.`account_type`<100' - - sql = 'SELECT {} FROM `{}account` as a {} ORDER BY `account_name`;'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix, where) - db_ret = db.query(sql) - if db_ret is None: - return ret - - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a]) - h = dict() - h['user_id'] = x.a_account_id - h['user_type'] = x.a_account_type - h['user_name'] = x.a_account_name - h['user_status'] = x.a_account_status - h['user_lock'] = x.a_account_lock - h['user_desc'] = x.a_account_desc - ret.append(h) - return ret - - -def delete_user(user_id): - db = get_db() - sql = 'DELETE FROM `{}account` WHERE `account_id`={};'.format(db.table_prefix, int(user_id)) - return db.exec(sql) - - -def lock_user(user_id, lock_status): - db = get_db() - sql = 'UPDATE `{}account` SET `account_lock`={} WHERE `account_id`={};'.format(db.table_prefix, lock_status, int(user_id)) - return db.exec(sql) - - -def reset_user(user_id): - db = get_db() - _new_sec_password = sec_generate_password('123456') - sql = 'UPDATE `{}account` SET `account_pwd`="{}" WHERE `account_id`={};'.format(db.table_prefix, _new_sec_password, int(user_id)) - return db.exec(sql) - - -def modify_user(user_id, user_desc): - db = get_db() - sql = 'UPDATE `{}account` SET `account_desc`="{}" WHERE `account_id`={};'.format(db.table_prefix, user_desc, int(user_id)) - return db.exec(sql) - - -def add_user(user_name, user_pwd, user_desc): - db = get_db() - sql = 'SELECT `account_id` FROM `{}account` WHERE `account_name`="{}";'.format(db.table_prefix, user_name) - db_ret = db.query(sql) - if db_ret is None or len(db_ret) != 0: - return -100 - - sec_password = sec_generate_password(user_pwd) - sql = 'INSERT INTO `{}account` (`account_type`, `account_name`, `account_pwd`, `account_status`,' \ - '`account_lock`,`account_desc`) VALUES (1,"{}","{}",0,0,"{}")'.format(db.table_prefix, user_name, sec_password, user_desc) - ret = db.exec(sql) - if ret: - return 0 - return -101 - - -def alloc_host(user_name, host_list): - db = get_db() - field_a = ['host_id'] - sql = 'SELECT {} FROM `{}auth` AS a WHERE `account_name`="{}";'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix, user_name) - db_ret = db.query(sql) - ret = dict() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a]) - host_id = int(x.a_host_id) - ret[host_id] = host_id - - a_list = list() - for item in host_list: - if item in ret: - pass - else: - a_list.append(item) - try: - for item in a_list: - host_id = int(item) - sql = 'INSERT INTO `{}auth` (`account_name`, `host_id`) VALUES ("{}", {});'.format(db.table_prefix, user_name, host_id) - ret = db.exec(sql) - if not ret: - return False - return True - except: - return False - - -def alloc_host_user(user_name, host_auth_dict): - db = get_db() - field_a = ['host_id', 'host_auth_id'] - sql = 'SELECT {} FROM `{}auth` AS a WHERE `account_name`="{}";'.format(','.join(['`a`.`{}`'.format(i) for i in field_a]), db.table_prefix, user_name) - db_ret = db.query(sql) - ret = dict() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a]) - host_id = int(x.a_host_id) - host_auth_id = int(x.a_host_auth_id) - if host_id not in ret: - ret[host_id] = dict() - - temp = ret[host_id] - temp[host_auth_id] = host_id - ret[host_id] = temp - - add_dict = dict() - for k, v in host_auth_dict.items(): - host_id = int(k) - auth_id_list = v - for item in auth_id_list: - host_auth_id = int(item) - if host_id not in ret: - add_dict[host_auth_id] = host_id - continue - temp = ret[host_id] - if host_auth_id not in temp: - add_dict[host_auth_id] = host_id - continue - - try: - for k, v in add_dict.items(): - host_auth_id = int(k) - host_id = int(v) - sql = 'INSERT INTO `{}auth` (`account_name`, `host_id`, `host_auth_id`) VALUES ("{}", {}, {});'.format(db.table_prefix, user_name, host_id, host_auth_id) - ret = db.exec(sql) - if not ret: - return False - return True - except: - return False - - -def delete_host(user_name, host_list): - db = get_db() - try: - for item in host_list: - host_id = int(item) - sql = 'DELETE FROM `{}auth` WHERE `account_name`="{}" AND `host_id`={};'.format(db.table_prefix, user_name, host_id) - ret = db.exec(sql) - if not ret: - return False - return True - except: - return False - - -def delete_host_user(user_name, auth_id_list): - db = get_db() - try: - for item in auth_id_list: - auth_id = int(item) - sql = 'DELETE FROM `{}auth` WHERE `account_name`="{}" AND `auth_id`={};'.format(db.table_prefix, user_name, auth_id) - ret = db.exec(sql) - if not ret: - return False - return True - except: - return False - - -def get_log_list(_filter, limit): - db = get_db() - - _where = '' - - if len(_filter) > 0: - _where = 'WHERE ( ' - - need_and = False - for k in _filter: - if k == 'account_name': - if need_and: - _where += ' AND' - _where += ' `a`.`account_name`="{}"'.format(_filter[k]) - need_and = True - - if k == 'user_name': - if need_and: - _where += ' AND' - _where += ' `a`.`account_name`="{}"'.format(_filter[k]) - need_and = True - - elif k == 'search': - # 查找,限于主机ID和IP地址,前者是数字,只能精确查找,后者可以模糊匹配 - # 因此,先判断搜索项能否转换为数字。 - - if need_and: - _where += ' AND ' - - _where += '(' - _where += '`a`.`host_ip` LIKE "%{}%" )'.format(_filter[k]) - need_and = True - _where += ')' - - # http://www.jb51.net/article/46015.htm - field_a = ['id', 'session_id', 'account_name', 'host_ip', 'host_port', 'auth_type', 'sys_type', 'user_name', 'ret_code', - 'begin_time', 'end_time', 'log_time', 'protocol'] - - sql = 'SELECT COUNT(*) FROM `{}log` AS a {};'.format(db.table_prefix, _where) - - db_ret = db.query(sql) - total_count = db_ret[0][0] - # 修正分页数据 - _limit = '' - if len(limit) > 0: - _page_index = limit['page_index'] - _per_page = limit['per_page'] - _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) - - if _page_index * _per_page >= total_count: - _page_index = int(total_count / _per_page) - _limit = 'LIMIT {},{}'.format(_page_index * _per_page, (_page_index + 1) * _per_page) - - sql = 'SELECT {} FROM `{}log` AS a {} ORDER BY `begin_time` DESC {};'.format(','.join(['a.{}'.format(i) for i in field_a]), db.table_prefix, _where, _limit) - db_ret = db.query(sql) - - ret = list() - for item in db_ret: - x = DbItem() - x.load(item, ['a_{}'.format(i) for i in field_a]) - h = dict() - h['id'] = x.a_id - h['session_id'] = x.a_session_id - h['account_name'] = x.a_account_name - h['host_ip'] = x.a_host_ip - h['host_port'] = x.a_host_port - h['auth_type'] = x.a_auth_type - h['sys_type'] = x.a_sys_type - h['user_name'] = x.a_user_name - h['ret_code'] = x.a_ret_code - cost_time = (x.a_end_time - x.a_begin_time) - if cost_time < 0: - cost_time = 0 - h['cost_time'] = cost_time - h['begin_time'] = x.a_begin_time - if x.a_protocol is not None: - h['protocol'] = x.a_protocol - else: - if x.a_sys_type == 1: - h['protocol'] = 1 - else: - h['protocol'] = 2 - - ret.append(h) - - return total_count, ret diff --git a/server/www/teleport/app/eom_common/__init__.py b/server/www/teleport/app/eom_common/__init__.py deleted file mode 100644 index 633f866..0000000 --- a/server/www/teleport/app/eom_common/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- - diff --git a/server/www/teleport/app/eom_common/eomcore/__init__.py b/server/www/teleport/app/eom_common/eomcore/__init__.py deleted file mode 100644 index 2b99267..0000000 --- a/server/www/teleport/app/eom_common/eomcore/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- - -"""EOM Core Package.""" - -__version__ = '1.0.0.1' - -# import eomcore.logger as eom_log diff --git a/server/www/teleport/app/eom_common/eomcore/eom_mysql.py b/server/www/teleport/app/eom_common/eomcore/eom_mysql.py deleted file mode 100644 index aeb9908..0000000 --- a/server/www/teleport/app/eom_common/eomcore/eom_mysql.py +++ /dev/null @@ -1,243 +0,0 @@ -# -*- coding: utf-8 -*- - -import pymysql -import threading -from .logger import * - -mysql_pool = None - - -def get_mysql_pool(): - global mysql_pool - if mysql_pool is None: - mysql_pool = MySqlPool() - return mysql_pool - - -class MySQL: - - def __init__(self, host, user, pwd, db, port=3306): - self.host = host - self.port = port - self.user = user - self.pwd = pwd - self.db = db - self.login_timeout = 3 - self.conn = None - - def connect(self): - """ - 得到连接信息 - 返回: conn.cursor() - """ - if not self.db: - raise (NameError, "没有设置数据库信息") - # self.conn = pymysql.connect(host=self.host, port=self.port, user=self.user, password=self.pwd, - # login_timeout=self.login_timeout, database=self.db, charset="utf8") - try: - if self.conn is not None: - self.conn.ping() - else: - self.conn = pymysql.connect(host=self.host, - user=self.user, - passwd=self.pwd, - db=self.db, - port=self.port, - connect_timeout=self.login_timeout, - charset='utf8') - except pymysql.err.OperationalError: - log.e('pymsql 连接数据库失败[%s:%d]\n' % (self.host, self.port)) - return None - except Exception as e: - log.e('con 连接数据库失败[%s:%d]\n' % (self.host, self.port)) - return None - - cur = self.conn.cursor() - if not cur: - log.e('cur 连接数据库失败[%s:%d]\n' % (self.host, self.port)) - raise (NameError, "连接数据库失败") - else: - return cur - - # 调用实例 ms.ExecProcQuery('exec P_Agent_Cmd_Get @CmdGroupId=7') - def ExecProcQuery(self, sql): - try: - if self.connect() is None: - self.conn = None - return None - - cur = self.conn.cursor() - - cur.execute(sql) - - resList = cur.fetchall() - self.conn.commit() - except pymysql.OperationalError as e: - if self.conn is not None: - self.conn.close() - log.e('ExecProcQuery[%s,%s]\n' % (sql, str(e))) - return None - except Exception as e: - if self.conn is not None: - self.conn.close() - log.e('ExecProcQuery[%s,%s]\n' % (sql, str(e))) - return None - return resList - - def ExecProcNonQuery(self, sql): - try: - - if self.connect() is None: - self.conn = None - return False - - cur = self.conn.cursor() - cur.execute(sql) - self.conn.commit() - return True - except pymysql.OperationalError as e: - # self.conn.close() - if self.conn is not None: - self.conn.close() - log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) - return False - except Exception as e: - if self.conn is not None: - self.conn.close() - log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) - return False - - @staticmethod - def ExecNonQuery(mysql, sql): - try: - if mysql.connect() is None: - mysql.conn = None - return False - - cur = mysql.conn.cursor() - cur.execute(sql) - # self.conn.commit() - return True - except pymysql.OperationalError as e: - # self.conn.close() - if mysql.conn is not None: - mysql.conn.close() - log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) - return False - except Exception as e: - if mysql.conn is not None: - mysql.conn.close() - log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) - return False - - @staticmethod - def EndExecNonQuery(mysql): - try: - if mysql is None or mysql.conn is None: - return False - mysql.conn.commit() - return True - except pymysql.OperationalError as e: - # self.conn.close() - if mysql.conn is not None: - mysql.conn.close() - return False - except Exception as e: - if mysql.conn is not None: - mysql.conn.close() - return False - - def CallProc(self, proc_name, in_args, out_in_args=None): - sql = '' - ret_code = list() - try: - # print(in_args) - result = list() - - self.connect() - - cur = self.conn.cursor() - cur.callproc(proc_name, in_args) - # - - data_set = cur.fetchall() - result.append(data_set) - while True: - has_set = cur.nextset() - if not has_set: - break - data_set = cur.fetchall() - result.append(data_set) - - cur.execute('select 0;') - self.conn.commit() - - if out_in_args is not None: - sql = 'select ' - for item in out_in_args: - str_item = '@_{0}_{1},'.format(proc_name, item) - sql += str_item - - sql = sql[:-1] - code = cur.execute(sql) - # code = cur.execute('select @_p_test_1_2,@_p_test_1_3,@_p_test_1_4') - # ret_code = list() - if code == 1: - (data_set,) = cur.fetchall() - length = len(data_set) - for i in range(length): - ret_code.append(data_set[i]) - return result, ret_code - - except pymysql.OperationalError as e: - if self.conn is not None: - self.conn.close() - log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) - return None - except Exception as e: - if self.conn is not None: - self.conn.close() - log.e('ExecProcNonQuery[%s,%s]\n' % (sql, str(e))) - return None - - -class MySqlPool: - def __init__(self): - self._conn_log = dict() - self._conn_sys = dict() - self._conn_common = dict() - self._db_ip = '' - self._db_port = 0 - self._db_user = '' - self._db_pass = '' - self._locker_log = threading.RLock() - self._locker_sys1 = threading.RLock() - self._locker_sys2 = threading.RLock() - - def init(self, db_ip, db_port, db_user, db_pass): - self._db_ip = db_ip - self._db_port = db_port - self._db_user = db_user - self._db_pass = db_pass - - def get_websqlcon(self): - with self._locker_log: - thread_id = threading.get_ident() - if thread_id not in self._conn_log: - my_sql = MySQL(self._db_ip, self._db_user, self._db_pass, 'ts_web', self._db_port) - self._conn_log[thread_id] = my_sql - return my_sql - - my_sql = self._conn_log[thread_id] - return my_sql - - def get_tssqlcon(self): - with self._locker_sys1: - thread_id = threading.get_ident() - if thread_id not in self._conn_sys: - my_sql = MySQL(self._db_ip, self._db_user, self._db_pass, 'ts_db', self._db_port) - self._conn_sys[thread_id] = my_sql - return my_sql - - my_sql = self._conn_sys[thread_id] - return my_sql diff --git a/server/www/teleport/app/eom_common/eomcore/eom_sqlite.py b/server/www/teleport/app/eom_common/eomcore/eom_sqlite.py deleted file mode 100644 index a4c693a..0000000 --- a/server/www/teleport/app/eom_common/eomcore/eom_sqlite.py +++ /dev/null @@ -1,153 +0,0 @@ -# coding=utf-8 -# -# Created: 04/02/2012 -# ------------------------------------------------------------------------------- - -import os -import sqlite3 -import threading - -from .logger import log - -sqlite_pool = None - - -def get_sqlite_pool(): - global sqlite_pool - if sqlite_pool is None: - sqlite_pool = SqlitePool() - return sqlite_pool - - -class eom_sqlite: - """ - """ - - def __init__(self, path): - self._db_file = path - self._conn = None - - def connect(self): - # if not os.path.exists(self._db_file): - # return None - try: - self._conn = sqlite3.connect(self._db_file) - except: - self._conn = None - raise RuntimeError('can not open database.') - return self._conn - - def ExecProcQuery(self, sql): - if self._conn is None: - if self.connect() is None: - return None - cursor = self._conn.cursor() - try: - - cursor.execute(sql) - db_ret = cursor.fetchall() - return db_ret - except Exception: - return None - finally: - cursor.close() - - def ExecProcNonQuery(self, sql): - if self._conn is None: - if self.connect() is None: - return False - - cursor = self._conn.cursor() - try: - cursor.execute(sql) - self._conn.commit() - except Exception: - log.e('can not create/open database.\n') - return False - finally: - cursor.close() - - return True - - def ExecManyProcNonQuery(self, sql): - if self._conn is None: - if self.connect() is None: - return False - - cursor = self._conn.cursor() - try: - cursor.executescript(sql) - self._conn.commit() - cursor.close() - except Exception as e: - log.e('can not create/open database.\n') - return False - - return True - - def close(self): - self._conn.close() - self._conn = None - - -class SqlitePool: - def __init__(self): - self._conn_sys = dict() - self._path = '' - self._locker_sys = threading.RLock() - self._config_server_ip = '' - - def init(self, path): - self._conn_sys.clear() - self._path = os.path.join(path, 'ts_db.db') - log.w('use sqlite database, db file: {}\n'.format(self._path)) - if not os.path.exists(self._path): - return False - - try: - sql_con = self.get_tssqlcon() - str_sql = 'SELECT value FROM ts_config WHERE name=\"ts_server_ip\";' - db_ret = sql_con.ExecProcQuery(str_sql) - self._config_server_ip = db_ret[0][0] - except Exception: - self._config_server_ip = '127.0.0.1' - return True - - def init_full_path(self, full_path): - self._conn_sys.clear() - self._path = full_path - if not os.path.exists(self._path): - return False - - try: - sql_con = self.get_tssqlcon() - str_sql = 'SELECT value FROM ts_config WHERE name=\"ts_server_ip\";' - db_ret = sql_con.ExecProcQuery(str_sql) - self._config_server_ip = db_ret[0][0] - except Exception: - self._config_server_ip = '127.0.0.1' - return True - - def get_config_server_ip(self): - return self._config_server_ip - - def get_tssqlcon(self): - with self._locker_sys: - thread_id = threading.get_ident() - if thread_id not in self._conn_sys: - _eom_sqlite = eom_sqlite(self._path) - self._conn_sys[thread_id] = _eom_sqlite - else: - _eom_sqlite = self._conn_sys[thread_id] - - return _eom_sqlite - - def close(self): - with self._locker_sys: - thread_id = threading.get_ident() - if thread_id not in self._conn_sys: - return - else: - _eom_sqlite = self._conn_sys[thread_id] - self._conn_sys.pop(thread_id) - _eom_sqlite.close() diff --git a/server/www/teleport/app/eom_main.py b/server/www/teleport/app/eom_main.py deleted file mode 100644 index ace7453..0000000 --- a/server/www/teleport/app/eom_main.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -import sys -from eom_env import * -import eom_app.app as app - - -def main(): - options = { - # app_path 网站程序根路径(应该是本文件所在目录的上一级目录) - 'app_path': PATH_APP_ROOT, - - # cfg_path 网站配置文件路径 - # 'cfg_path': PATH_CONF, - - # log_path 网站运行时日志文件路径 - # 'log_path': PATH_LOG, - - # static_path 网站静态文件路径 - 'static_path': os.path.join(PATH_APP_ROOT, 'static'), - - # data_path 网站数据文件路径 - 'data_path': PATH_DATA, - - # template_path 网站模板文件路径 - 'template_path': os.path.join(PATH_APP_ROOT, 'view'), - - # res_path 网站资源文件路径 - 'res_path': os.path.join(PATH_APP_ROOT, 'res') - } - - return app.run(options) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/server/www/teleport/app_bootstrap.py b/server/www/teleport/app_bootstrap.py new file mode 100644 index 0000000..324dca3 --- /dev/null +++ b/server/www/teleport/app_bootstrap.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +import os +import sys + +sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'webroot')) + + +def main(): + from app.app_env import PATH_APP_ROOT, PATH_DATA + from app.base.webapp import get_web_app + _web_app = get_web_app() + if not _web_app.init(PATH_APP_ROOT, PATH_DATA): + return 1 + + return _web_app.run() + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/server/www/teleport/static/css/auth.css b/server/www/teleport/static/css/auth.css deleted file mode 100644 index 7b7d0c6..0000000 --- a/server/www/teleport/static/css/auth.css +++ /dev/null @@ -1 +0,0 @@ -@charset "utf-8";body{padding-top:70px;padding-bottom:24px;background-color:#ececed}#head nav.navbar{height:70px;line-height:70px;background-color:#333;color:#fff}#head .logo .desc{display:block;float:right;color:#ccc;margin-top:10px;font-size:18px}#foot nav.navbar{min-height:24px;height:24px;line-height:24px;background-color:#ddd;color:#fff;font-size:12px;border-top:1px solid #ccc}#foot nav.navbar .container{height:24px}#foot nav.navbar p{margin:0 auto;text-align:center;color:#333}#content{margin:10px 0 50px 0}.auth-box{margin-top:30px;min-height:120px;border:1px solid #ccc;border-radius:8px;background-color:rgba(255,255,255,0.6)}.auth-box .header{min-height:50px;height:50px;border:none;box-shadow:none;border-bottom:1px solid #ccc}.auth-box .header .title{display:inline-block;float:left;margin-left:60px;height:24px;margin-top:25px;line-height:16px;font-size:20px;color:#999}.auth-box .header .selected{border-bottom:2px solid #4882cc;color:#555}.auth-box .header .title:hover{border-bottom:2px solid #5396eb}.auth-box .inputarea{margin:30px}.auth-box .inputarea .input-group-addon{padding:0 5px 0 5px}.auth-box .inputarea p.input-addon-desc{text-align:right;padding:0 5px 0 5px;color:#999}#leftside{width:560px;height:560px;padding-top:60px;background:url(../img/login/side-001.jpg) 0 0 no-repeat}@media screen and (max-width:990px){#leftside{display:none}}#leftside h1{font-size:24px;color:#888}#leftside p{font-size:18px;color:#888;padding-left:24px}.auth-box .inputbox{margin-bottom:10px}.auth-box-lg .inputbox{margin-bottom:20px}.auth-box .op_box{display:block;padding:5px;border-radius:3px;text-align:center;margin:5px 20px 10px 20px}.auth-box .op_error{background:#fbb}.auth-box .op_wait{background:#ccc}.auth-box .quick-area{padding:80px 0 80px 0}.auth-box .quick-area .quick-disc{text-align:center;margin-bottom:20px}.auth-box .quick-area .quick-no{padding-top:80px;padding-bottom:100px}.auth-box .quick-area .quick-yes{text-align:center}.auth-box .quick-area .quick-yes .quick-account{display:inline-block;margin:auto;margin-bottom:20px}.auth-box .quick-area .quick-yes .quick-account:hover .quick-image{box-shadow:0 0 8px #00c2f6}.auth-box .quick-area .quick-yes .quick-image{display:block;width:82px;height:82px;line-height:80px;font-size:64px;margin:auto;border:1px solid #a4cdf6;box-shadow:0 0 6px #a7d1fb}.auth-box .quick-area .quick-yes .quick-name{display:block;margin-top:5px} \ No newline at end of file diff --git a/server/www/teleport/static/css/dashboard.css b/server/www/teleport/static/css/dashboard.css index be5ea68..1d335e8 100644 --- a/server/www/teleport/static/css/dashboard.css +++ b/server/www/teleport/static/css/dashboard.css @@ -1 +1 @@ -@charset "utf-8";.page-content{padding:15px 30px 15px 30px}.stats{overflow:hidden;padding:15px;color:#686868;background-color:#fff;border-radius:3px;margin-bottom:10px}.stats span.sub-name{font-size:11px;font-weight:300}.stats.stats-warning{position:relative;background-color:#c86124}.stats.stats-warning .stats-content{text-align:center;font-size:24px}.stats .loading{color:#fff;font-size:14px}.stats.stats-id-host{border-top:5px solid #407deb}.stats.stats-id-user{border-top:5px solid #eba61e}.stats.stats-id-connect{border-top:5px solid #14c13c}.stats.stats-box{position:relative;height:116px}.stats.stats-box .stats-content{padding-left:100px}.stats.stats-box .stats-icon{font-size:130px;line-height:130px;left:20px;text-align:center;position:absolute;color:rgba(0,0,0,0.05)}.stats.stats-box .stats-name{font-size:18px;font-weight:500;color:rgba(0,0,0,0.6)}.stats.stats-box .stats-value{margin-top:5px;color:rgba(0,0,0,0.6);font-size:48px;line-height:52px;font-weight:300;white-space:nowrap;padding-left:20px}.stats.stats-bar{position:relative;height:320px}.stats.stats-bar .stats-name{font-size:16px;font-weight:500;color:rgba(0,0,0,0.6)}.stats.stats-bar .stats-value{margin-top:5px;color:rgba(0,0,0,0.6);font-size:48px;line-height:52px;font-weight:300;white-space:nowrap;padding-left:20px}.stats.stats-first{border-left:none}.stats a{color:#eee;color:rgba(255,255,255,0.7)}.stats a:hover{color:#fff}/*# sourceMappingURL=dashboard.css.map */ \ No newline at end of file +@charset "utf-8";html{font-size:13px}body{font-family:-apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"PingFang SC","Hiragino Sans GB","Microsoft YaHei",sans-serif;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-state{text-align:center;white-space:nowrap}.remote-action-group ul li.remote-action-state.state-disabled{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.remote-action-group ul li.remote-action-state.state-disabled>i.fa{color:#b53a2f}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li.remote-action-btn:first-child{border:none}.remote-action-group ul li.remote-action-btn:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li.remote-action-btn:last-child{border:none}.remote-action-group ul li.remote-action-btn:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-info-group{margin-bottom:3px;height:28px}.remote-info-group ul{display:inline-block;height:28px;margin:0;padding:0}.remote-info-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-info-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-info-group ul li.remote-action-input{background:none;padding:4px 0}.remote-info-group ul li.remote-action-input select{border:none}.remote-info-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-info-group ul li select{margin-top:-3px}.remote-info-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-info-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}.disable-bg{position:absolute;background:url(../img/css/disable-bg.png) repeat;opacity:.45;z-index:990}.disable-message{display:inline-block;font-size:160%;position:absolute;padding:20px 40px;border:1px solid #2b0002;background-color:#65181a;color:#fff;opacity:.85;z-index:991}.btn-group-sm>.btn{padding:3px 5px;font-size:12px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{padding:3px 6px;border-radius:10px;font-size:13px;font-weight:400;background-color:#999}.badge.badge-sm{font-size:12px;padding:3px 5px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-8px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.btn-success .badge{color:#fff}.label{display:inline-block;padding:5px 10px;margin:2px;font-size:13px;font-weight:400;background-color:#999}.label.label-sm{font-size:12px;padding:3px 8px 4px 8px;margin-top:0;border-radius:3px}.label.label-ignore{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.modal .modal-content{border-radius:0}.modal .modal-header .close{margin-top:-4px;margin-right:-6px}.modal .modal-header .close:hover{color:#9c3023;opacity:1}.modal .modal-header .close:active,.modal .modal-header .close:focus,.modal .modal-header .close:visited{-webkit-appearance:none}.modal .form-horizontal .form-group,.modal .row{margin-left:0;margin-right:0}.alert{border-radius:0;padding:5px;margin-bottom:10px}.dropdown-menu{min-width:0;font-size:13px}.form-control-sm{padding:3px 5px;font-size:13px;height:inherit}.form-group .control-label.require{color:#505050;font-weight:bold}.form-group .control-label.require:before{font-weight:normal;color:#ac4e43;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f069";font-size:8px;font-family:'FontAwesome'}.form-group .control-desc{padding-top:6px;color:#999}label.form-control-static input{display:inline-block;position:relative;margin-top:4px}.control-desc .popover{max-width:none;font-size:13px}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important}#gritter-notice-wrapper{width:320px;max-width:480px}.page-content-dashboard{padding:20px 25px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:10px;padding-left:10px}.stats{overflow:hidden;color:#686868;background-color:#fff;margin-bottom:10px}.stats.stats-id-host{border-top:5px solid #348fe2;background-color:#e7f1fb}.stats.stats-id-user{border-top:5px solid #f57523;background-color:#feefe6}.stats.stats-id-account{border-top:5px solid #d34242;background-color:#fae7e7}.stats.stats-id-connect{border-top:5px solid #368142;background-color:#def1e1}.stats.stats-box{position:relative;height:98px;box-shadow:1px 1px 2px rgba(0,0,0,0.3)}.stats.stats-box .stats-content{padding-left:20px}.stats.stats-box .stats-icon{font-size:130px;line-height:130px;top:5px;right:-20px;position:absolute;color:rgba(0,0,0,0.07)}.stats.stats-box .stats-name{font-size:16px;font-weight:500;padding-top:10px;color:rgba(0,0,0,0.6)}.stats.stats-box .stats-value{color:rgba(0,0,0,0.6);font-size:42px;font-weight:300;white-space:nowrap;padding-left:20px}.stats.stats-bar{position:relative;height:280px;padding:8px;box-shadow:1px 1px 2px rgba(0,0,0,0.3)}.stats.stats-bar .stats-name{font-size:14px;font-weight:500;color:rgba(0,0,0,0.6)}.stats.stats-bar .stats-value{margin-top:5px}.stats.stats-first{border-left:none}.stats a{color:#eee;color:rgba(255,255,255,0.7)}.stats a:hover{color:#fff}/*# sourceMappingURL=dashboard.css.map */ \ No newline at end of file diff --git a/server/www/teleport/static/css/dashboard.css.map b/server/www/teleport/static/css/dashboard.css.map new file mode 100644 index 0000000..bc7f7f3 --- /dev/null +++ b/server/www/teleport/static/css/dashboard.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["dashboard.less","_base.less","_overwrite_bootstrap.less","_overwrite_gritter.less"],"names":[],"mappings":"AAAA,SAAS,QCeT,KACE,eAGF,KACE,uDAViE,kBAAoB,uBAAyB,cAAe,mBAAoB,4BAUjJ,CAEA,wBAAA,CACA,WAGF,KAAM,KACJ,YAGF,gBACE,iBAGF,EACE,qBAGF,CAAC,MACC,qBAGF,CAAC,OACC,qBAGF,CAAC,QACC,qBAGF,CAAC,SACC,qBAGF,OACE,aAGF,MACE,mBAGF,aACE,WAGF,QACE,eAGF,aACE,cAAA,CACA,WAGF,MACE,mDAjE4D,wBAoE9D,EAAE,OACA,cAAA,CACA,kBAOF,iBACE,mBAEE,gBADF,KACG,aACC,0BAAA,CACA,8BAEF,gBALF,KAKG,YACC,2BAAA,CACA,+BAKN,qBACE,iBAAA,CACA,YAFF,oBAKE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UATJ,oBAKE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,oBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,oBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,oBAxBJ,GAME,GAkBG,oBAIC,QACE,YAIJ,oBAjCJ,GAME,GA2BG,4BACC,WAGF,oBArCJ,GAME,GA+BG,qBAEC,iBAAA,CACA,mBAEA,oBA1CN,GAME,GA+BG,oBAKE,gBACC,wBAAA,CACA,UAAA,CACA,+BAEA,oBA/CR,GAME,GA+BG,oBAKE,eAKG,EAAG,IACH,cAKN,oBArDJ,GAME,GA+CG,wBAAyB,oBArD9B,GAME,GA+C6B,oBAAqB,oBArDpD,GAME,GA+CmD,wBAC/C,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,eAAA,CACA,uBAEF,oBA5DJ,GAME,GAsDG,wBACC,aAAA,CACA,WAEF,oBAhEJ,GAME,GA0DG,oBAAqB,oBAhE1B,GAME,GA0DyB,wBAAyB,oBAhEpD,GAME,GA0DmD,4BAC/C,WAEF,oBAnEJ,GAME,GA6DG,oBAAqB,oBAnE1B,GAME,GA6DyB,4BACrB,iBAGF,oBAvEJ,GAME,GAiEG,wBAAyB,oBAvE9B,GAME,GAiE6B,sBAAuB,oBAvEtD,GAME,GAiEqD,sBACjD,iBAAA,CACA,eAAA,CACA,WAEF,oBA5EJ,GAME,GAsEG,wBACC,wBAAA,CACA,WAEF,oBAhFJ,GAME,GA0EG,sBACC,wBAAA,CACA,WAEF,oBApFJ,GAME,GA8EG,sBACC,wBAAA,CACA,WA3FR,oBAKE,GAME,GAmFE,MACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,gBAnGR,oBAKE,GAME,GA2FE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3GR,oBAKE,GAME,GAkGE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAhHR,oBAKE,GAME,GAuGE,QACE,gBAGF,oBAjHJ,GAME,GA2GG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,oBAvHJ,GAME,GAiHG,kBAAkB,aACjB,YADF,oBAvHJ,GAME,GAiHG,kBAAkB,YAEjB,MACE,0BAAA,CACA,8BAIJ,oBA/HJ,GAME,GAyHG,YACC,2BAAA,CACA,+BAGF,oBApIJ,GAME,GA8HG,kBAAkB,YACjB,YADF,oBApIJ,GAME,GA8HG,kBAAkB,WAEjB,MACE,2BAAA,CACA,+BAQV,mBACE,iBAAA,CACA,YAFF,kBAIE,IACE,oBAAA,CACA,WAAA,CACA,QAAA,CACA,UARJ,kBAIE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAGA,yBAAA,CACA,2BAAA,CACA,6BAEA,kBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,kBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,kBAxBJ,GAME,GAkBG,oBAIC,QACE,YAjCV,kBAIE,GAME,GA2BE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA1CR,kBAIE,GAME,GAkCE,QACE,gBAGF,kBA5CJ,GAME,GAsCG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,kBAlDJ,GAME,GA4CG,YACC,2BAAA,CACA,+BAMR,EAAE,OACA,aAGF,oBACE,kBADF,mBAGE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UAPJ,mBAGE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,mBAlBJ,GAME,GAYG,WACC,YASF,mBA5BJ,GAME,GAsBG,UACC,eAAA,CACA,SAAA,CACA,YAGF,mBAlCJ,GAME,GA4BG,YACC,eAAA,CACA,WAAA,CACA,UAxCR,mBAGE,GAME,GAkCE,eACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,YAnDR,mBAGE,GAME,GA6CE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3DR,mBAGE,GAME,GAoDE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAMF,mBAnEJ,GAME,GA6DG,aACC,0BAAA,CACA,0BAAA,CACA,8BAHF,mBAnEJ,GAME,GA6DG,YAKC,MALF,mBAnEJ,GAME,GA6DG,YAKO,eACJ,0BAAA,CACA,8BAIJ,mBA9EJ,GAME,GAwEG,YACC,2BAAA,CACA,+BAFF,mBA9EJ,GAME,GAwEG,WAIC,MAJF,mBA9EJ,GAME,GAwEG,WAIO,eACJ,2BAAA,CACA,+BAUV,gBAAgB,OACd,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,UAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAAA,CACA,iBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAClF,QAAS,GAAT,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,YACE,iBAAA,CACA,gDAAA,CACA,WAAA,CACA,YAGF,iBACE,oBAAA,CAGA,cAAA,CACA,iBAAA,CACA,iBAAA,CAMA,wBAAA,CAEA,wBAAA,CACA,UAAA,CACA,WAAA,CAEA,YC7eF,aAAc,MACZ,eAAA,CAEA,eAyBF,mBACE,cAAA,CACA,KAAA,CACA,OAAA,CACA,QAAA,CACA,MAAA,CACA,aAIF,YACE,kBAKF,OACE,eAAA,CACA,kBAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,eAAA,CACA,YAAA,CACA,iBAAA,CACA,iBAGF,MAAC,WACC,gBAAA,CACA,iBAGF,MAAC,cACC,wBAAA,CACA,WAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAIJ,YAAa,QACX,WAGF,OACE,oBAAA,CACA,gBAAA,CACA,UAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,uBAAA,CACA,YAAA,CACA,kBAGF,MAAC,cACC,wBAAA,CACA,UAAA,CACA,+BAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAKJ,SAAS,aACP,WAAA,CACA,iBAAA,CACA,sBACA,SAJO,YAIN,QACC,eALJ,SAAS,YAQP,eACE,aAAA,CACA,cAAA,CACA,WA8BJ,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,iBAAA,CACA,iBAOF,YAEE,mBACE,cAHJ,YAME,oBACE,cAPJ,YAUE,MAAK,uBAVP,YAWE,SAAQ,uBACN,cAZJ,YAeE,MAAK,4BAfP,YAeoC,SAAQ,4BACxC,cAIJ,MAEE,gBACE,gBAHJ,MAME,cAAc,QACZ,eAAA,CACA,kBACA,MAHF,cAAc,OAGX,OACC,aAAA,CACA,UAEF,MAPF,cAAc,OAOX,QAAS,MAPZ,cAAc,OAOD,OAAQ,MAPrB,cAAc,OAOQ,SAClB,wBAdN,MAkBE,iBAAiB,aAlBnB,MAkBgC,MAC5B,aAAA,CACA,eAOJ,OACE,eAAA,CACA,WAAA,CACA,mBAGF,eACE,WAAA,CACA,eAMF,iBACE,eAAA,CACA,cAAA,CACA,eAGF,WACE,eAAc,SACZ,aAAA,CACA,iBACA,WAHF,eAAc,QAGX,QACC,kBAAA,CACA,aAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,aAAA,CACA,YAAa,cAZnB,WAeE,eACE,eAAA,CACA,WAIJ,KAAK,oBACH,OACE,oBAAA,CACA,iBAAA,CAEA,eAIJ,aACE,UACE,cAAA,CACA,eC7QJ,wBAKE,aAGF,gBAAiB,cAAe,aAE9B,0BAAA,YAGF,aACE,0BAAA,CACA,4BAGF,gBACE,6BAAA,CACA,+BAGF,eAAgB,cAAe,gBAU7B,SAAA,YACA,SAAA,YACA,OAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,aAAA,YACA,kBAGF,cAAc,QAAS,cAAe,eAAc,QAClD,QAAS,OAAT,YACA,uBAAA,YACA,aAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,UAAA,YACA,aAAA,YACA,iBAAA,YACA,iBAAA,YACA,OAAA,YACA,KAAA,YAcF,eACE,cAAA,YACA,gBAAA,YACA,kBAAA,YACA,eAAA,YACA,UAAA,YACA,gBAAA,YAQF,cAEE,UAAA,YACA,cAAA,YACA,oBAAA,YAGF,cACE,iBADF,cACmB,eADnB,cACkC,cAC9B,8BAAA,YAFJ,cAKE,gBACE,UAAA,YANJ,cASE,eACE,UAAA,YAVJ,cAaE,gBACE,SAAA,YACA,SAAA,YACA,OAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,aAAA,YACA,iBAAA,CACA,kBAAA,YAIJ,gBACE,iBADF,gBACmB,eADnB,gBACkC,cAE9B,4BAAA,YAHJ,gBAME,gBAEE,UAAA,YARJ,gBAWE,eAEE,UAAA,YAbJ,gBAgBE,gBACE,kBAAA,YAKJ,wBACE,WAAA,CAEA,gBHnJF,wBACE,kBAGF,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,kBAAA,CACA,kBA2IF,OACE,eAAA,CAEA,aAAA,CACA,qBAAA,CAEA,mBA6BA,MAAC,eACC,4BAAA,CACA,yBAEF,MAAC,eACC,4BAAA,CACA,yBAEF,MAAC,kBACC,4BAAA,CACA,yBAEF,MAAC,kBACC,4BAAA,CAEA,yBAGF,MAAC,WACC,iBAAA,CACA,WAAA,CACA,uCAHF,MAAC,UAKC,gBACE,kBANJ,MAAC,UASC,aACE,eAAA,CACA,iBAAA,CACA,OAAA,CACA,WAAA,CACA,iBAAA,CACA,uBAfJ,MAAC,UAiBC,aACE,cAAA,CACA,eAAA,CACA,gBAAA,CACA,sBArBJ,MAAC,UAuBC,cACE,qBAAA,CACA,cAAA,CACA,eAAA,CACA,kBAAA,CACA,kBAoCJ,MAAC,WACC,iBAAA,CACA,YAAA,CACA,WAAA,CACA,uCAJF,MAAC,UAUC,aACE,cAAA,CACA,eAAA,CAEA,sBAdJ,MAAC,UAgBC,cACE,eAoMJ,MAAC,aACC,iBA3UJ,MA8UE,GACE,UAAA,CACA,4BACA,MAHF,EAGG,OACC","file":"dashboard.css","sourceRoot":"..\\less"} \ No newline at end of file diff --git a/server/www/teleport/static/css/doc.css b/server/www/teleport/static/css/doc.css deleted file mode 100644 index eca947a..0000000 --- a/server/www/teleport/static/css/doc.css +++ /dev/null @@ -1 +0,0 @@ -@charset "utf-8";body{font-family:"Open Sans","Helvetica Neue","Microsoft YaHei","微软雅黑",Helvetica,Arial,sans-serif;font-size:13px;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-group-sm>.btn,.btn-sm{padding:2px 5px}.btn.btn-sm{padding:3px 8px}.btn.btn-icon{padding:3px 6px}.btn.btn-icon.btn-sm{padding:0;font-size:14px;height:24px;width:24px;line-height:24px;border-radius:0}.form-group-sm .input-group .input-group-btn>.btn{height:30px;padding:0 8px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{display:inline-block;min-width:8px;padding:5px 10px;border-radius:10px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.badge.badge-plain{text-shadow:none}.badge.badge-sm{font-size:11px;padding:3px 6px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-6px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.label{display:inline-block;min-width:8px;padding:5px 10px;border-radius:5px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.label.label-plain{text-shadow:none}.label.label-sm{font-size:11px;padding:3px 8px;margin-top:0;border-radius:5px;text-shadow:none}.label.label-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.alert-sm{padding:5px;margin-bottom:10px}.modal-dialog-sm .modal-header{padding:10px}.modal-dialog-sm .modal-body{padding:10px}.modal-dialog-sm .modal-footer{padding:10px}.modal-dialog-sm .form-horizontal .form-group{margin-right:-5px;margin-left:-5px}.modal-dialog-sm .col-sm-1,.modal-dialog-sm .col-sm-2,.modal-dialog-sm .col-sm-3,.modal-dialog-sm .col-sm-4,.modal-dialog-sm .col-sm-5,.modal-dialog-sm .col-sm-6,.modal-dialog-sm .col-sm-7,.modal-dialog-sm .col-sm-8,.modal-dialog-sm .col-sm-9,.modal-dialog-sm .col-sm-10,.modal-dialog-sm .col-sm-11{padding-right:5px;padding-left:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px;min-width:390px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.form-group-sm .form-control-static{padding:6px 0}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.page-header-fixed{padding-top:48px}.header{border:none;min-height:48px;height:48px;top:0;width:100%;position:fixed;z-index:999}.header .top-navbar{min-height:48px;height:48px;line-height:48px;background-color:#3a3a3a;color:#ccc}.header .top-navbar a{color:#d5d5d5}.header .top-navbar a:hover{color:#5a8fee}.header .top-navbar .brand{float:left;display:inline-block;padding:12px 0;margin:0}.header .top-navbar .brand .site-logo{display:block;width:86px;height:24px;background:url(../img/site-logo-small.png) no-repeat}.header .top-navbar .title-container{float:left;display:inline-block;margin:0;padding:0}.header .top-navbar .title-container .title{font-size:18px}.header .top-navbar .status-container{float:right}.footer{width:100%;height:36px;line-height:36px;background-color:#d5d5d5;border-top:1px solid #a2a2a2;border-bottom:1px solid #efefef;z-index:998;text-align:center}.footer.footer-fixed-bottom{bottom:0;position:fixed}.row-sm .col-sm-1,.row-sm .col-sm-2,.row-sm .col-sm-3,.row-sm .col-sm-4,.row-sm .col-sm-5,.row-sm .col-sm-6,.row-sm .col-sm-7,.row-sm .col-sm-8,.row-sm .col-sm-9,.row-sm .col-sm-10,.row-sm .col-sm-11{padding-right:5px;padding-left:5px}.sidebar{background-color:#fff;width:285px;position:fixed}.search-box{padding:10px;border-bottom:1px solid #eee;margin-bottom:10px}.tree-view{overflow-x:auto;overflow-y:auto;padding:0 10px}.content{margin-top:15px;margin-bottom:56px;min-height:360px;background-color:#fff;border-radius:5px;padding:10px;margin-left:300px}.jstree-ocl{cursor:default}.jstree-default .fa-folder:before{color:#f59c1a}.jstree-default .jstree-wholerow{cursor:default}.jstree-default>ul>li{padding:4px 0}.jstree-default .jstree-node{margin-left:12px}.jstree-default .jstree-open>.jstree-anchor>.fa-folder:before{content:'\f07c';color:#a26307}.jstree-default>.jstree-no-dots .jstree-closed>.jstree-ocl,.jstree-default>.jstree-no-dots .jstree-open>.jstree-ocl{background:none}.jstree-default>.jstree-no-dots .jstree-closed>.jstree-ocl:before,.jstree-default>.jstree-no-dots .jstree-open>.jstree-ocl:before{font-style:normal;font-family:"FontAwesome"}.jstree-default>.jstree-no-dots .jstree-closed>.jstree-ocl:before{content:'\f054';color:#ccc}.jstree-default>.jstree-no-dots .jstree-open>.jstree-ocl:before{content:'\f078';color:#666}.jstree-default>.jstree-no-dots .jstree-loading>.jstree-ocl{background:url("img/loading_01.gif") center center no-repeat}.jstree-default>.jstree-no-dots .jstree-loading>.jstree-ocl:before{content:''}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important} \ No newline at end of file diff --git a/server/www/teleport/static/css/error.css b/server/www/teleport/static/css/error.css new file mode 100644 index 0000000..51a9402 --- /dev/null +++ b/server/www/teleport/static/css/error.css @@ -0,0 +1 @@ +@charset "utf-8";html{font-size:13px}body{font-family:-apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"PingFang SC","Hiragino Sans GB","Microsoft YaHei",sans-serif;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-state{text-align:center;white-space:nowrap}.remote-action-group ul li.remote-action-state.state-disabled{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.remote-action-group ul li.remote-action-state.state-disabled>i.fa{color:#b53a2f}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li.remote-action-btn:first-child{border:none}.remote-action-group ul li.remote-action-btn:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li.remote-action-btn:last-child{border:none}.remote-action-group ul li.remote-action-btn:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-info-group{margin-bottom:3px;height:28px}.remote-info-group ul{display:inline-block;height:28px;margin:0;padding:0}.remote-info-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-info-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-info-group ul li.remote-action-input{background:none;padding:4px 0}.remote-info-group ul li.remote-action-input select{border:none}.remote-info-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-info-group ul li select{margin-top:-3px}.remote-info-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-info-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}.disable-bg{position:absolute;background:url(../img/css/disable-bg.png) repeat;opacity:.45;z-index:990}.disable-message{display:inline-block;font-size:160%;position:absolute;padding:20px 40px;border:1px solid #2b0002;background-color:#65181a;color:#fff;opacity:.85;z-index:991}body{padding:0;margin:0}.container{min-width:460px}#page-header{height:36px;margin-bottom:10px;border:none;background-color:#2a2a2a;color:#fff}#page-header .title{line-height:36px;font-size:18px}#page-header .logo{margin-top:5px;display:inline-block;width:93px;height:30px;background:url(../img/site-logo-small.png) no-repeat}#page-content{margin-bottom:54px}#page-footer nav.navbar{min-height:24px;height:24px;line-height:24px;background-color:#cdcdcd;font-size:12px;color:#6d6d6d}#page-footer nav.navbar .container{height:24px}#page-footer nav.navbar p{margin:0 auto;text-align:center}.error-box{width:100%;padding-left:20px;margin:30px auto}.error-box .error-icon-box{z-index:-1;position:absolute;min-height:194px;overflow:hidden}.error-box .error-icon-box .fa{margin-top:20px;font-size:164px;color:#ff6500}.error-box .error-message-box{min-height:328px;min-width:300px;border:1px solid #fff;background-color:rgba(255,255,255,0.76);box-shadow:2px 2px 3px rgba(0,0,0,0.3);padding:15px 15px 15px 30px;margin-left:123px}.error-box .error-message-box .title{font-size:180%;margin:15px 0}.error-box .error-message-box hr{border-top:1px solid #d3d3d3;border-bottom:1px solid #fff}.btn-group-sm>.btn{padding:3px 5px;font-size:12px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{padding:3px 6px;border-radius:10px;font-size:13px;font-weight:400;background-color:#999}.badge.badge-sm{font-size:12px;padding:3px 5px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-8px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.btn-success .badge{color:#fff}.label{display:inline-block;padding:5px 10px;margin:2px;font-size:13px;font-weight:400;background-color:#999}.label.label-sm{font-size:12px;padding:3px 8px 4px 8px;margin-top:0;border-radius:3px}.label.label-ignore{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.modal .modal-content{border-radius:0}.modal .modal-header .close{margin-top:-4px;margin-right:-6px}.modal .modal-header .close:hover{color:#9c3023;opacity:1}.modal .modal-header .close:active,.modal .modal-header .close:focus,.modal .modal-header .close:visited{-webkit-appearance:none}.modal .form-horizontal .form-group,.modal .row{margin-left:0;margin-right:0}.alert{border-radius:0;padding:5px;margin-bottom:10px}.dropdown-menu{min-width:0;font-size:13px}.form-control-sm{padding:3px 5px;font-size:13px;height:inherit}.form-group .control-label.require{color:#505050;font-weight:bold}.form-group .control-label.require:before{font-weight:normal;color:#ac4e43;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f069";font-size:8px;font-family:'FontAwesome'}.form-group .control-desc{padding-top:6px;color:#999}label.form-control-static input{display:inline-block;position:relative;margin-top:4px}.control-desc .popover{max-width:none;font-size:13px}/*# sourceMappingURL=error.css.map */ \ No newline at end of file diff --git a/server/www/teleport/static/css/error.css.map b/server/www/teleport/static/css/error.css.map new file mode 100644 index 0000000..749d1aa --- /dev/null +++ b/server/www/teleport/static/css/error.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["error.less","_base.less","_overwrite_bootstrap.less"],"names":[],"mappings":"AAAA,SAAS,QCeT,KACE,eAGF,KACE,uDAViE,kBAAoB,uBAAyB,cAAe,mBAAoB,4BAUjJ,CAEA,wBAAA,CACA,WAGF,KAAM,KACJ,YAGF,gBACE,iBAGF,EACE,qBAGF,CAAC,MACC,qBAGF,CAAC,OACC,qBAGF,CAAC,QACC,qBAGF,CAAC,SACC,qBAGF,OACE,aAGF,MACE,mBAGF,aACE,WAGF,QACE,eAGF,aACE,cAAA,CACA,WAGF,MACE,mDAjE4D,wBAoE9D,EAAE,OACA,cAAA,CACA,kBAOF,iBACE,mBAEE,gBADF,KACG,aACC,0BAAA,CACA,8BAEF,gBALF,KAKG,YACC,2BAAA,CACA,+BAKN,qBACE,iBAAA,CACA,YAFF,oBAKE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UATJ,oBAKE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,oBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,oBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,oBAxBJ,GAME,GAkBG,oBAIC,QACE,YAIJ,oBAjCJ,GAME,GA2BG,4BACC,WAGF,oBArCJ,GAME,GA+BG,qBAEC,iBAAA,CACA,mBAEA,oBA1CN,GAME,GA+BG,oBAKE,gBACC,wBAAA,CACA,UAAA,CACA,+BAEA,oBA/CR,GAME,GA+BG,oBAKE,eAKG,EAAG,IACH,cAKN,oBArDJ,GAME,GA+CG,wBAAyB,oBArD9B,GAME,GA+C6B,oBAAqB,oBArDpD,GAME,GA+CmD,wBAC/C,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,eAAA,CACA,uBAEF,oBA5DJ,GAME,GAsDG,wBACC,aAAA,CACA,WAEF,oBAhEJ,GAME,GA0DG,oBAAqB,oBAhE1B,GAME,GA0DyB,wBAAyB,oBAhEpD,GAME,GA0DmD,4BAC/C,WAEF,oBAnEJ,GAME,GA6DG,oBAAqB,oBAnE1B,GAME,GA6DyB,4BACrB,iBAGF,oBAvEJ,GAME,GAiEG,wBAAyB,oBAvE9B,GAME,GAiE6B,sBAAuB,oBAvEtD,GAME,GAiEqD,sBACjD,iBAAA,CACA,eAAA,CACA,WAEF,oBA5EJ,GAME,GAsEG,wBACC,wBAAA,CACA,WAEF,oBAhFJ,GAME,GA0EG,sBACC,wBAAA,CACA,WAEF,oBApFJ,GAME,GA8EG,sBACC,wBAAA,CACA,WA3FR,oBAKE,GAME,GAmFE,MACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,gBAnGR,oBAKE,GAME,GA2FE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3GR,oBAKE,GAME,GAkGE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAhHR,oBAKE,GAME,GAuGE,QACE,gBAGF,oBAjHJ,GAME,GA2GG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,oBAvHJ,GAME,GAiHG,kBAAkB,aACjB,YADF,oBAvHJ,GAME,GAiHG,kBAAkB,YAEjB,MACE,0BAAA,CACA,8BAIJ,oBA/HJ,GAME,GAyHG,YACC,2BAAA,CACA,+BAGF,oBApIJ,GAME,GA8HG,kBAAkB,YACjB,YADF,oBApIJ,GAME,GA8HG,kBAAkB,WAEjB,MACE,2BAAA,CACA,+BAQV,mBACE,iBAAA,CACA,YAFF,kBAIE,IACE,oBAAA,CACA,WAAA,CACA,QAAA,CACA,UARJ,kBAIE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAGA,yBAAA,CACA,2BAAA,CACA,6BAEA,kBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,kBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,kBAxBJ,GAME,GAkBG,oBAIC,QACE,YAjCV,kBAIE,GAME,GA2BE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA1CR,kBAIE,GAME,GAkCE,QACE,gBAGF,kBA5CJ,GAME,GAsCG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,kBAlDJ,GAME,GA4CG,YACC,2BAAA,CACA,+BAMR,EAAE,OACA,aAGF,oBACE,kBADF,mBAGE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UAPJ,mBAGE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,mBAlBJ,GAME,GAYG,WACC,YASF,mBA5BJ,GAME,GAsBG,UACC,eAAA,CACA,SAAA,CACA,YAGF,mBAlCJ,GAME,GA4BG,YACC,eAAA,CACA,WAAA,CACA,UAxCR,mBAGE,GAME,GAkCE,eACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,YAnDR,mBAGE,GAME,GA6CE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3DR,mBAGE,GAME,GAoDE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAMF,mBAnEJ,GAME,GA6DG,aACC,0BAAA,CACA,0BAAA,CACA,8BAHF,mBAnEJ,GAME,GA6DG,YAKC,MALF,mBAnEJ,GAME,GA6DG,YAKO,eACJ,0BAAA,CACA,8BAIJ,mBA9EJ,GAME,GAwEG,YACC,2BAAA,CACA,+BAFF,mBA9EJ,GAME,GAwEG,WAIC,MAJF,mBA9EJ,GAME,GAwEG,WAIO,eACJ,2BAAA,CACA,+BAUV,gBAAgB,OACd,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,UAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAAA,CACA,iBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAClF,QAAS,GAAT,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,YACE,iBAAA,CACA,gDAAA,CACA,WAAA,CACA,YAGF,iBACE,oBAAA,CAGA,cAAA,CACA,iBAAA,CACA,iBAAA,CAMA,wBAAA,CAEA,wBAAA,CACA,UAAA,CACA,WAAA,CAEA,YD1eF,KACE,SAAA,CACA,SAGF,WACE,gBAGF,aACE,WAAA,CACA,kBAAA,CACA,WAAA,CACA,wBAAA,CACA,WALF,YAME,QACE,gBAAA,CACA,eARJ,YAUE,OACE,cAAA,CACA,oBAAA,CACA,UAAA,CACA,WAAA,CACA,qDAIJ,cACE,mBAGF,YACE,IAAG,QACD,eAAA,CACA,WAAA,CACA,gBAAA,CAEA,wBAAA,CACA,cAAA,CACA,cARJ,YACE,IAAG,OAQD,YACE,YAVN,YACE,IAAG,OAWD,GACE,aAAA,CACA,kBAKN,WAEE,UAAA,CACA,iBAAA,CACA,iBAJF,UAME,iBACE,UAAA,CACA,iBAAA,CACA,gBAAA,CACA,gBAVJ,UAME,gBAKE,KACE,eAAA,CACA,eAAA,CACA,cAdN,UAkBE,oBACE,gBAAA,CACA,eAAA,CACA,qBAAA,CACA,uCAAA,CACA,sCAAA,CACA,2BAAA,CACA,kBAzBJ,UAkBE,mBASE,QACE,cAAA,CAEA,cA9BN,UAkBE,mBAeE,IACE,4BAAA,CACA,6BEzFN,aAAc,MACZ,eAAA,CAEA,eAyBF,mBACE,cAAA,CACA,KAAA,CACA,OAAA,CACA,QAAA,CACA,MAAA,CACA,aAIF,YACE,kBAKF,OACE,eAAA,CACA,kBAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,eAAA,CACA,YAAA,CACA,iBAAA,CACA,iBAGF,MAAC,WACC,gBAAA,CACA,iBAGF,MAAC,cACC,wBAAA,CACA,WAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAIJ,YAAa,QACX,WAGF,OACE,oBAAA,CACA,gBAAA,CACA,UAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,uBAAA,CACA,YAAA,CACA,kBAGF,MAAC,cACC,wBAAA,CACA,UAAA,CACA,+BAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAKJ,SAAS,aACP,WAAA,CACA,iBAAA,CACA,sBACA,SAJO,YAIN,QACC,eALJ,SAAS,YAQP,eACE,aAAA,CACA,cAAA,CACA,WA8BJ,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,iBAAA,CACA,iBAOF,YAEE,mBACE,cAHJ,YAME,oBACE,cAPJ,YAUE,MAAK,uBAVP,YAWE,SAAQ,uBACN,cAZJ,YAeE,MAAK,4BAfP,YAeoC,SAAQ,4BACxC,cAIJ,MAEE,gBACE,gBAHJ,MAME,cAAc,QACZ,eAAA,CACA,kBACA,MAHF,cAAc,OAGX,OACC,aAAA,CACA,UAEF,MAPF,cAAc,OAOX,QAAS,MAPZ,cAAc,OAOD,OAAQ,MAPrB,cAAc,OAOQ,SAClB,wBAdN,MAkBE,iBAAiB,aAlBnB,MAkBgC,MAC5B,aAAA,CACA,eAOJ,OACE,eAAA,CACA,WAAA,CACA,mBAGF,eACE,WAAA,CACA,eAMF,iBACE,eAAA,CACA,cAAA,CACA,eAGF,WACE,eAAc,SACZ,aAAA,CACA,iBACA,WAHF,eAAc,QAGX,QACC,kBAAA,CACA,aAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,aAAA,CACA,YAAa,cAZnB,WAeE,eACE,eAAA,CACA,WAIJ,KAAK,oBACH,OACE,oBAAA,CACA,iBAAA,CAEA,eAIJ,aACE,UACE,cAAA,CACA","file":"error.css","sourceRoot":"..\\less"} \ No newline at end of file diff --git a/server/www/teleport/static/css/img/desktop/icons-tree-24x24.png b/server/www/teleport/static/css/img/desktop/icons-tree-24x24.png deleted file mode 100644 index 25cea67..0000000 Binary files a/server/www/teleport/static/css/img/desktop/icons-tree-24x24.png and /dev/null differ diff --git a/server/www/teleport/static/css/img/desktop/mimetype-16.png b/server/www/teleport/static/css/img/desktop/mimetype-16.png deleted file mode 100644 index 74e4469..0000000 Binary files a/server/www/teleport/static/css/img/desktop/mimetype-16.png and /dev/null differ diff --git a/server/www/teleport/static/css/img/loading_01.gif b/server/www/teleport/static/css/img/loading_01.gif deleted file mode 100644 index e8c2892..0000000 Binary files a/server/www/teleport/static/css/img/loading_01.gif and /dev/null differ diff --git a/server/www/teleport/static/css/login.css b/server/www/teleport/static/css/login.css new file mode 100644 index 0000000..06f5c3b --- /dev/null +++ b/server/www/teleport/static/css/login.css @@ -0,0 +1 @@ +@charset "utf-8";html{font-size:13px}body{font-family:-apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"PingFang SC","Hiragino Sans GB","Microsoft YaHei",sans-serif;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-state{text-align:center;white-space:nowrap}.remote-action-group ul li.remote-action-state.state-disabled{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.remote-action-group ul li.remote-action-state.state-disabled>i.fa{color:#b53a2f}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li.remote-action-btn:first-child{border:none}.remote-action-group ul li.remote-action-btn:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li.remote-action-btn:last-child{border:none}.remote-action-group ul li.remote-action-btn:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-info-group{margin-bottom:3px;height:28px}.remote-info-group ul{display:inline-block;height:28px;margin:0;padding:0}.remote-info-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-info-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-info-group ul li.remote-action-input{background:none;padding:4px 0}.remote-info-group ul li.remote-action-input select{border:none}.remote-info-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-info-group ul li select{margin-top:-3px}.remote-info-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-info-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}.disable-bg{position:absolute;background:url(../img/css/disable-bg.png) repeat;opacity:.45;z-index:990}.disable-message{display:inline-block;font-size:160%;position:absolute;padding:20px 40px;border:1px solid #2b0002;background-color:#65181a;color:#fff;opacity:.85;z-index:991}body{padding:0;margin:0;overflow:hidden}.bg-blur{position:fixed;top:-20px;left:-20px}.bg-blur-overlay{position:absolute;width:100%;height:100%;background-image:url('data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4gPHN2ZyB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJncmFkIiBncmFkaWVudFVuaXRzPSJvYmplY3RCb3VuZGluZ0JveCIgeDE9IjAuNSIgeTE9IjAuMCIgeDI9IjAuNSIgeTI9IjEuMCI+PHN0b3Agb2Zmc2V0PSI0NiUiIHN0b3AtY29sb3I9IiMwMDAwMDAiIHN0b3Atb3BhY2l0eT0iMC4wOCIvPjxzdG9wIG9mZnNldD0iNTklIiBzdG9wLWNvbG9yPSIjMDAwMDAwIiBzdG9wLW9wYWNpdHk9IjAuMDgiLz48c3RvcCBvZmZzZXQ9IjEwMCUiIHN0b3AtY29sb3I9IiMwMDAwMDAiIHN0b3Atb3BhY2l0eT0iMC45Ii8+PC9saW5lYXJHcmFkaWVudD48L2RlZnM+PHJlY3QgeD0iMCIgeT0iMCIgd2lkdGg9IjEwMCUiIGhlaWdodD0iMTAwJSIgZmlsbD0idXJsKCNncmFkKSIgLz48L3N2Zz4g');background-size:100%;background-image:-webkit-gradient(linear, 50% 0, 50% 100%, color-stop(46%, rgba(0,0,0,0.08)), color-stop(59%, rgba(0,0,0,0.08)), color-stop(100%, rgba(0,0,0,0.9)));background-image:-moz-linear-gradient(top, rgba(0,0,0,0.08) 46%, rgba(0,0,0,0.08) 59%, rgba(0,0,0,0.9) 100%);background-image:-webkit-linear-gradient(top, rgba(0,0,0,0.08) 46%, rgba(0,0,0,0.08) 59%, rgba(0,0,0,0.9) 100%);background-image:linear-gradient(to bottom, rgba(0,0,0,0.08) 46%, rgba(0,0,0,0.08) 59%, rgba(0,0,0,0.9) 100%)}.container{min-width:460px}#page-header nav.navbar{height:70px;margin-bottom:0;border:none;background-color:rgba(0,0,0,0.6);color:#fff}#page-header nav li{display:inline-block;float:left}#page-header .logo{position:relative;margin-top:10px;display:inline-block}#page-header .desc{position:relative;display:inline-block;color:#ccc;top:32px;font-size:24px;vertical-align:baseline;font-family:"Kai","华文楷体","楷体","Microsoft YaHei","微软雅黑",Helvetica,Arial,sans-serif}#page-footer nav.navbar{min-height:36px;height:36px;line-height:36px;background-color:rgba(0,0,0,0.6);font-size:12px;color:#6d6d6d}#page-footer nav.navbar .container{height:24px}#page-footer nav.navbar p{margin:0 auto;text-align:center}.auth-box-container{position:fixed;left:0;top:70px;width:100%}.auth-box-container .auth-box{margin:80px auto 0 auto;width:450px;border:1px solid #fff;border-radius:8px;background-color:rgba(255,255,255,0.6);box-shadow:0 5px 50px rgba(0,0,0,0.9)}.auth-box-container .auth-box .header{min-height:50px;height:50px;border:none;box-shadow:none;border-bottom:1px solid rgba(255,255,255,0.3)}.auth-box-container .auth-box .header .title{display:inline-block;float:left;margin-left:60px;height:24px;margin-top:25px;line-height:16px;font-size:20px;color:#484848}.auth-box-container .auth-box .header .title:hover{border-bottom:2px solid #5396eb}.auth-box-container .auth-box .header .selected{border-bottom:2px solid #4882cc;color:#262b40}.auth-box-container .auth-box .inputarea{margin:30px}.auth-box-container .auth-box .inputarea .input-group-addon{padding:0 20px 0 5px}.auth-box-container .auth-box .inputarea p.input-addon-desc{text-align:right;padding:0 5px 0 5px;color:#636363}.auth-box-container .auth-box .inputbox{margin-bottom:20px}.auth-box-container .auth-box .op_box{display:block;padding:5px;border-radius:3px;text-align:center;margin:5px 20px 10px 20px}.auth-box-container .auth-box .op_error{background:rgba(255,5,0,0.5);color:#fff}.auth-box-container .auth-box .op_wait{background:rgba(255,255,255,0.3)}#slogan-box{padding-top:80px}#msg-slogan{text-align:right;font-size:24px;line-height:48px;font-family:"Kai","华文楷体","楷体","Microsoft YaHei","微软雅黑",Helvetica,Arial,sans-serif;color:rgba(255,255,255,0.5)}@media screen and (max-width:990px){#slogan-box{display:none}.auth-box{margin:30px auto 0 auto}}.btn-group-sm>.btn{padding:3px 5px;font-size:12px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{padding:3px 6px;border-radius:10px;font-size:13px;font-weight:400;background-color:#999}.badge.badge-sm{font-size:12px;padding:3px 5px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-8px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.btn-success .badge{color:#fff}.label{display:inline-block;padding:5px 10px;margin:2px;font-size:13px;font-weight:400;background-color:#999}.label.label-sm{font-size:12px;padding:3px 8px 4px 8px;margin-top:0;border-radius:3px}.label.label-ignore{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.modal .modal-content{border-radius:0}.modal .modal-header .close{margin-top:-4px;margin-right:-6px}.modal .modal-header .close:hover{color:#9c3023;opacity:1}.modal .modal-header .close:active,.modal .modal-header .close:focus,.modal .modal-header .close:visited{-webkit-appearance:none}.modal .form-horizontal .form-group,.modal .row{margin-left:0;margin-right:0}.alert{border-radius:0;padding:5px;margin-bottom:10px}.dropdown-menu{min-width:0;font-size:13px}.form-control-sm{padding:3px 5px;font-size:13px;height:inherit}.form-group .control-label.require{color:#505050;font-weight:bold}.form-group .control-label.require:before{font-weight:normal;color:#ac4e43;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f069";font-size:8px;font-family:'FontAwesome'}.form-group .control-desc{padding-top:6px;color:#999}label.form-control-static input{display:inline-block;position:relative;margin-top:4px}.control-desc .popover{max-width:none;font-size:13px}/*# sourceMappingURL=login.css.map */ \ No newline at end of file diff --git a/server/www/teleport/static/css/login.css.map b/server/www/teleport/static/css/login.css.map new file mode 100644 index 0000000..c00d336 --- /dev/null +++ b/server/www/teleport/static/css/login.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["login.less","_base.less","_overwrite_bootstrap.less"],"names":[],"mappings":"AAAA,SAAS,QCeT,KACE,eAGF,KACE,uDAViE,kBAAoB,uBAAyB,cAAe,mBAAoB,4BAUjJ,CAEA,wBAAA,CACA,WAGF,KAAM,KACJ,YAGF,gBACE,iBAGF,EACE,qBAGF,CAAC,MACC,qBAGF,CAAC,OACC,qBAGF,CAAC,QACC,qBAGF,CAAC,SACC,qBAGF,OACE,aAGF,MACE,mBAGF,aACE,WAGF,QACE,eAGF,aACE,cAAA,CACA,WAGF,MACE,mDAjE4D,wBAoE9D,EAAE,OACA,cAAA,CACA,kBAOF,iBACE,mBAEE,gBADF,KACG,aACC,0BAAA,CACA,8BAEF,gBALF,KAKG,YACC,2BAAA,CACA,+BAKN,qBACE,iBAAA,CACA,YAFF,oBAKE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UATJ,oBAKE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,oBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,oBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,oBAxBJ,GAME,GAkBG,oBAIC,QACE,YAIJ,oBAjCJ,GAME,GA2BG,4BACC,WAGF,oBArCJ,GAME,GA+BG,qBAEC,iBAAA,CACA,mBAEA,oBA1CN,GAME,GA+BG,oBAKE,gBACC,wBAAA,CACA,UAAA,CACA,+BAEA,oBA/CR,GAME,GA+BG,oBAKE,eAKG,EAAG,IACH,cAKN,oBArDJ,GAME,GA+CG,wBAAyB,oBArD9B,GAME,GA+C6B,oBAAqB,oBArDpD,GAME,GA+CmD,wBAC/C,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,eAAA,CACA,uBAEF,oBA5DJ,GAME,GAsDG,wBACC,aAAA,CACA,WAEF,oBAhEJ,GAME,GA0DG,oBAAqB,oBAhE1B,GAME,GA0DyB,wBAAyB,oBAhEpD,GAME,GA0DmD,4BAC/C,WAEF,oBAnEJ,GAME,GA6DG,oBAAqB,oBAnE1B,GAME,GA6DyB,4BACrB,iBAGF,oBAvEJ,GAME,GAiEG,wBAAyB,oBAvE9B,GAME,GAiE6B,sBAAuB,oBAvEtD,GAME,GAiEqD,sBACjD,iBAAA,CACA,eAAA,CACA,WAEF,oBA5EJ,GAME,GAsEG,wBACC,wBAAA,CACA,WAEF,oBAhFJ,GAME,GA0EG,sBACC,wBAAA,CACA,WAEF,oBApFJ,GAME,GA8EG,sBACC,wBAAA,CACA,WA3FR,oBAKE,GAME,GAmFE,MACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,gBAnGR,oBAKE,GAME,GA2FE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3GR,oBAKE,GAME,GAkGE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAhHR,oBAKE,GAME,GAuGE,QACE,gBAGF,oBAjHJ,GAME,GA2GG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,oBAvHJ,GAME,GAiHG,kBAAkB,aACjB,YADF,oBAvHJ,GAME,GAiHG,kBAAkB,YAEjB,MACE,0BAAA,CACA,8BAIJ,oBA/HJ,GAME,GAyHG,YACC,2BAAA,CACA,+BAGF,oBApIJ,GAME,GA8HG,kBAAkB,YACjB,YADF,oBApIJ,GAME,GA8HG,kBAAkB,WAEjB,MACE,2BAAA,CACA,+BAQV,mBACE,iBAAA,CACA,YAFF,kBAIE,IACE,oBAAA,CACA,WAAA,CACA,QAAA,CACA,UARJ,kBAIE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAGA,yBAAA,CACA,2BAAA,CACA,6BAEA,kBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,kBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,kBAxBJ,GAME,GAkBG,oBAIC,QACE,YAjCV,kBAIE,GAME,GA2BE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA1CR,kBAIE,GAME,GAkCE,QACE,gBAGF,kBA5CJ,GAME,GAsCG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,kBAlDJ,GAME,GA4CG,YACC,2BAAA,CACA,+BAMR,EAAE,OACA,aAGF,oBACE,kBADF,mBAGE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UAPJ,mBAGE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,mBAlBJ,GAME,GAYG,WACC,YASF,mBA5BJ,GAME,GAsBG,UACC,eAAA,CACA,SAAA,CACA,YAGF,mBAlCJ,GAME,GA4BG,YACC,eAAA,CACA,WAAA,CACA,UAxCR,mBAGE,GAME,GAkCE,eACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,YAnDR,mBAGE,GAME,GA6CE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3DR,mBAGE,GAME,GAoDE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAMF,mBAnEJ,GAME,GA6DG,aACC,0BAAA,CACA,0BAAA,CACA,8BAHF,mBAnEJ,GAME,GA6DG,YAKC,MALF,mBAnEJ,GAME,GA6DG,YAKO,eACJ,0BAAA,CACA,8BAIJ,mBA9EJ,GAME,GAwEG,YACC,2BAAA,CACA,+BAFF,mBA9EJ,GAME,GAwEG,WAIC,MAJF,mBA9EJ,GAME,GAwEG,WAIO,eACJ,2BAAA,CACA,+BAUV,gBAAgB,OACd,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,UAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAAA,CACA,iBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAClF,QAAS,GAAT,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,YACE,iBAAA,CACA,gDAAA,CACA,WAAA,CACA,YAGF,iBACE,oBAAA,CAGA,cAAA,CACA,iBAAA,CACA,iBAAA,CAMA,wBAAA,CAEA,wBAAA,CACA,UAAA,CACA,WAAA,CAEA,YD7eF,KACE,SAAA,CACA,QAAA,CACA,gBAGF,SACE,cAAA,CACA,SAAA,CACA,WAGF,iBACE,iBAAA,CACA,UAAA,CACA,WAAA,CACA,qBAAsB,qpBAAtB,CACA,oBAAA,CACA,iBAAkB,0CAA2C,mCAAsC,mCAAsC,kCAAzI,CACA,iBAAkB,2FAAlB,CACA,iBAAkB,8FAAlB,CACA,iBAAkB,6FAGpB,WACE,gBAGF,YACE,IAAG,QACD,WAAA,CACA,eAAA,CACA,WAAA,CACA,gCAAA,CACA,WAEF,YAAE,IAAI,IACJ,oBAAA,CACA,WAVJ,YAaE,OACE,iBAAA,CACA,eAAA,CACA,qBAhBJ,YAkBE,OACE,iBAAA,CACA,oBAAA,CACA,UAAA,CACA,QAAA,CACA,cAAA,CACA,uBAAA,CACA,YAAa,MAAO,OAAQ,KAAM,kBAAmB,kCAIzD,YACE,IAAG,QACD,eAAA,CACA,WAAA,CACA,gBAAA,CACA,gCAAA,CACA,cAAA,CACA,cAPJ,YACE,IAAG,OAOD,YACE,YATN,YACE,IAAG,OAUD,GACE,aAAA,CACA,kBAKN,oBACE,cAAA,CACA,MAAA,CACA,QAAA,CACA,WAJF,mBAME,WACE,uBAAA,CACA,WAAA,CACA,qBAAA,CACA,iBAAA,CACA,sCAAA,CACA,sCAZJ,mBAME,UAQE,SACE,eAAA,CACA,WAAA,CACA,WAAA,CACA,eAAA,CACA,8CAnBN,mBAME,UAQE,QAOE,QACE,oBAAA,CACA,UAAA,CACA,gBAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,cAAA,CACA,cAEA,mBAzBN,UAQE,QAOE,OAUG,OACC,gCAhCV,mBAME,UAQE,QAsBE,WACE,+BAAA,CACA,cAtCR,mBAME,UAmCE,YACE,YA1CN,mBAME,UAmCE,WAEE,oBACE,qBA5CR,mBAME,UAmCE,WAKE,EAAC,kBACC,gBAAA,CACA,mBAAA,CACA,cAjDR,mBAME,UA8CE,WACE,mBArDN,mBAME,UAiDE,SACE,aAAA,CACA,WAAA,CACA,iBAAA,CACA,iBAAA,CACA,0BA5DN,mBAME,UAwDE,WACE,4BAAA,CACA,WAhEN,mBAME,UA4DE,UACE,iCAKN,YACE,iBAGF,YACE,gBAAA,CACA,cAAA,CACA,gBAAA,CACA,YAAa,MAAO,OAAQ,KAAM,kBAAmB,iCAArD,CACA,4BAaF,mBAVqC,iBACnC,YACE,aAGF,UACE,yBErKJ,aAAc,MACZ,eAAA,CAEA,eAyBF,mBACE,cAAA,CACA,KAAA,CACA,OAAA,CACA,QAAA,CACA,MAAA,CACA,aAIF,YACE,kBAKF,OACE,eAAA,CACA,kBAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,eAAA,CACA,YAAA,CACA,iBAAA,CACA,iBAGF,MAAC,WACC,gBAAA,CACA,iBAGF,MAAC,cACC,wBAAA,CACA,WAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAIJ,YAAa,QACX,WAGF,OACE,oBAAA,CACA,gBAAA,CACA,UAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,uBAAA,CACA,YAAA,CACA,kBAGF,MAAC,cACC,wBAAA,CACA,UAAA,CACA,+BAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAKJ,SAAS,aACP,WAAA,CACA,iBAAA,CACA,sBACA,SAJO,YAIN,QACC,eALJ,SAAS,YAQP,eACE,aAAA,CACA,cAAA,CACA,WA8BJ,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,iBAAA,CACA,iBAOF,YAEE,mBACE,cAHJ,YAME,oBACE,cAPJ,YAUE,MAAK,uBAVP,YAWE,SAAQ,uBACN,cAZJ,YAeE,MAAK,4BAfP,YAeoC,SAAQ,4BACxC,cAIJ,MAEE,gBACE,gBAHJ,MAME,cAAc,QACZ,eAAA,CACA,kBACA,MAHF,cAAc,OAGX,OACC,aAAA,CACA,UAEF,MAPF,cAAc,OAOX,QAAS,MAPZ,cAAc,OAOD,OAAQ,MAPrB,cAAc,OAOQ,SAClB,wBAdN,MAkBE,iBAAiB,aAlBnB,MAkBgC,MAC5B,aAAA,CACA,eAOJ,OACE,eAAA,CACA,WAAA,CACA,mBAGF,eACE,WAAA,CACA,eAMF,iBACE,eAAA,CACA,cAAA,CACA,eAGF,WACE,eAAc,SACZ,aAAA,CACA,iBACA,WAHF,eAAc,QAGX,QACC,kBAAA,CACA,aAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,aAAA,CACA,YAAa,cAZnB,WAeE,eACE,eAAA,CACA,WAIJ,KAAK,oBACH,OACE,oBAAA,CACA,iBAAA,CAEA,eAIJ,aACE,UACE,cAAA,CACA","file":"login.css","sourceRoot":"..\\less"} \ No newline at end of file diff --git a/server/www/teleport/static/css/main.css b/server/www/teleport/static/css/main.css deleted file mode 100644 index 270a3eb..0000000 --- a/server/www/teleport/static/css/main.css +++ /dev/null @@ -1 +0,0 @@ -@charset "utf-8";body{font-family:"Open Sans","Helvetica Neue","Microsoft YaHei","微软雅黑",Helvetica,Arial,sans-serif;font-size:13px;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-group-sm>.btn,.btn-sm{padding:2px 5px}.btn.btn-sm{padding:3px 8px}.btn.btn-icon{padding:3px 6px}.btn.btn-icon.btn-sm{padding:0;font-size:14px;height:24px;width:24px;line-height:24px;border-radius:0}.form-group-sm .input-group .input-group-btn>.btn{height:30px;padding:0 8px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{display:inline-block;min-width:8px;padding:5px 10px;border-radius:10px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.badge.badge-plain{text-shadow:none}.badge.badge-sm{font-size:11px;padding:3px 6px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-6px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.label{display:inline-block;min-width:8px;padding:5px 10px;border-radius:5px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.label.label-plain{text-shadow:none}.label.label-sm{font-size:11px;padding:3px 8px;margin-top:0;border-radius:5px;text-shadow:none}.label.label-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.alert-sm{padding:5px;margin-bottom:10px}.modal-dialog-sm .modal-header{padding:10px}.modal-dialog-sm .modal-body{padding:10px}.modal-dialog-sm .modal-footer{padding:10px}.modal-dialog-sm .form-horizontal .form-group{margin-right:-5px;margin-left:-5px}.modal-dialog-sm .col-sm-1,.modal-dialog-sm .col-sm-2,.modal-dialog-sm .col-sm-3,.modal-dialog-sm .col-sm-4,.modal-dialog-sm .col-sm-5,.modal-dialog-sm .col-sm-6,.modal-dialog-sm .col-sm-7,.modal-dialog-sm .col-sm-8,.modal-dialog-sm .col-sm-9,.modal-dialog-sm .col-sm-10,.modal-dialog-sm .col-sm-11{padding-right:5px;padding-left:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px;min-width:390px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.form-group-sm .form-control-static{padding:6px 0}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.table{margin-bottom:10px}.table>thead>tr>th{padding:5px 5px;outline:none;white-space:nowrap;font-weight:normal;text-align:center;background-color:#ededed}.table>tbody>tr>td{padding:5px;text-align:center;vertical-align:middle}.table>tbody>tr>td .nowrap{white-space:nowrap}.table.table-data thead .sorting,.table.table-data thead .sorting_asc,.table.table-data thead .sorting_desc{cursor:pointer;position:relative}.table.table-data thead .sorting>span:after,.table.table-data thead .sorting_asc>span:after,.table.table-data thead .sorting_desc>span:after{bottom:4px;padding-left:5px;display:inline-block;font-family:'FontAwesome';opacity:.8}.table.table-data thead .sorting>span:after{opacity:.2;content:"\f0dc"}.table.table-data thead .sorting_asc>span:after{content:"\f0de"}.table.table-data thead .sorting_desc>span:after{content:"\f0dd"}.host-id{display:block;font-size:16px;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;color:#333}.host-id.not-active{font-size:14px;font-weight:400;color:#999}.host-desc{font-size:12px;color:#999;display:inline-block;white-space:nowrap;width:160px;overflow:hidden;text-overflow:ellipsis}a.host-desc:hover:before{display:inline-block;padding-right:3px;line-height:12px;content:"\f040";font-family:'FontAwesome'}.td-ip-list{padding-right:20px;padding-left:5px}.td-ip-show-more{font-size:14px;width:12px;float:right;display:block}.td-ip-item{min-width:12em;width:12em;height:18px;padding:2px 4px;margin:1px 0;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.td-ip-item span{display:inline-block;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.td-ip-item a{display:inline-block;width:14px;float:right;font-size:14px}.admin{background-color:#930;color:#fff;padding:5px 15px;border-radius:5px}.page-header-fixed{padding-top:48px}.header{border:none;box-shadow:0 0 3px rgba(0,0,0,0.5)}.header .container-fluid{padding-left:0}.header .breadcrumb-container{display:inline-block;padding-top:6px}.header .breadcrumb{background-color:transparent;padding-left:20px;font-size:16px}.header.navbar{min-height:48px;height:48px;margin:0}.header.navbar .brand{display:inline-block;float:left;width:180px;height:48px;padding:12px 0 0;text-align:center;margin:0 auto;background-color:#3a3a3a}.header.navbar .brand .navbar-logo{display:inline-block;width:93px;height:30px;background:url(../img/site-logo-small.png) no-repeat}.header.navbar .breadcrumb>li+li:before{font-size:18px;padding:0 5px;color:#ccc;content:"\f105";font-family:'FontAwesome'}.page-sidebar-fixed .sidebar{position:fixed}.sidebar{top:0;bottom:0;left:0;width:180px;padding-top:48px;z-index:1010;background-color:#3a3a3a}.sidebar .nav-menu>li>a{padding:8px 0 8px 20px;line-height:24px;font-size:13px;color:#c2c2c2;border-left:5px solid #3a3a3a}.sidebar .nav-menu>li>a:focus{background-color:#3a3a3a;border-left:5px solid #3a3a3a}.sidebar .nav-menu>li>a:hover{background-color:#2d2d2d;border-left:5px solid #005c74}.sidebar .nav-menu>li>a.active{color:#fff;background-color:#0084a7;border-left:5px solid #0084a7}.sidebar .nav-menu>li>a.active:hover{border-left:5px solid #00acda}.sidebar .nav-menu>li>a.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:relative;top:1px;display:inline-block;font-style:normal;font-weight:400;float:right;color:#e9e9e9;font-size:20px;line-height:24px;margin-right:-6px}.sidebar .nav-menu li .menu-caret:after{display:inline-block;width:12px;height:12px;margin-left:5px;top:1px;position:relative;border:none;font-family:'FontAwesome';font-style:normal}.sidebar .nav-menu li .menu-caret:after{content:'\f0da'}.sidebar .nav-menu li.expand .menu-caret:after{content:'\f0d7'}.sidebar .nav-menu>li.super-admin>a:hover{background-color:#620;border-left:5px solid #4d1a00}.sidebar .nav-menu>li.super-admin>a.active{background-color:#930;border-left:5px solid #930}.sidebar .nav-menu>li.super-admin>a.active:hover{border-left:5px solid #c40}.sidebar .nav-menu>li>a>i.icon{float:left;margin-top:1px;margin-right:15px;text-align:center;line-height:24px;font-size:14px}.sidebar .sub-menu{padding:0;margin:0;background-color:#292929;position:relative;list-style-type:none;border-top:1px solid #202020;border-bottom:1px solid #464646}.sidebar .sub-menu>li>a{padding:8px 0 8px 40px;line-height:20px;font-size:13px;display:block;position:relative;color:#889097;border-left:5px solid #292929}.sidebar .sub-menu>li>a:before{display:inline-block;padding-right:8px;line-height:20px;content:"\f105";font-family:'FontAwesome'}.sidebar .sub-menu>li>a:hover{color:#fff;border-left:5px solid #005c74}.sidebar .sub-menu>li>a.active{color:#fff;background-color:#0084a7;border-left:5px solid #0084a7}.sidebar .sub-menu>li>a.active:hover{border-left:5px solid #00acda}.sidebar .sub-menu>li>a.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:relative;top:-2px;display:inline-block;font-style:normal;font-weight:400;float:right;color:#e9e9e9;font-size:20px;line-height:24px;margin-right:-6px}.sidebar .nav-profile{padding:15px 10px;color:#ccc;background-color:#333;border-bottom:1px solid #464646}.sidebar .nav-profile a.title{color:#ccc}.sidebar .nav-profile a.title:hover{color:#fff;background-color:transparent}.sidebar .nav-profile a.title:focus{background-color:transparent}.sidebar .nav-profile .image{float:left;margin-top:3px;font-size:24px;color:#69f;width:36px;height:36px;border-radius:5px;background-color:#eee;text-align:center;margin-right:10px;overflow:hidden}.sidebar .nav-profile .image img{margin-top:-3px}.sidebar .nav-profile .name{display:block;font-size:16px}.sidebar .nav-profile .role{display:block;font-size:12px;color:#999}.sidebar .nav-profile .dropdown-menu{font-size:13px}.sidebar .nav-profile .dropdown-menu>li>a{padding:5px 20px}.sidebar .nav-profile .dropdown-menu>li>a:hover{background-color:#ccc}.sidebar .nav-profile .dropdown-menu .divider{margin:5px 0}.sidebar .badge{margin-top:-10px;margin-left:5px}.content{margin-left:180px}.page-content{padding:15px}.page-content-dashboard{padding:20px 25px}.widget{overflow:hidden;border-radius:3px;padding:15px;margin-bottom:20px;color:#fff}.widget.widget-stats{position:relative}.widget .stats-icon{font-size:52px;top:12px;right:21px;width:56px;height:56px;text-align:center;line-height:56px;margin-left:15px;color:#fff;position:absolute;opacity:.2}.widget .stats-title{color:#fff;color:rgba(255,255,255,0.6)}.widget .stats-split{height:2px;margin:0 -15px 10px;background:rgba(0,0,0,0.2)}.widget .stats-content{font-size:24px;font-weight:300;margin-bottom:10px}.widget .stats-desc{display:inline-block;color:#fff;color:rgba(255,255,255,0.6)}.widget .stats-action{display:inline-block;float:right}.widget a{color:#eee;color:rgba(255,255,255,0.7)}.widget a:hover{color:#fff}.widget.widget-info{background-color:#33b7d0}.widget.widget-primary{background-color:#348fe2}.widget.widget-success{background-color:#368142}.widget.widget-warning{background-color:#f57523}.widget.widget-danger{background-color:#d34242}.panel{border:none;box-shadow:none;border-radius:3px}.panel .panel-heading{padding:6px 15px;color:#fff}.panel .panel-heading .panel-title{font-size:14px}.panel .panel-heading .panel-heading-btn{float:right}.panel .panel-heading .panel-heading-btn .btn{display:inline-block;padding:0;border:none;text-align:center}.panel .panel-heading .panel-heading-btn .btn.btn-xs{width:18px;height:18px;line-height:18px;font-size:12px}.panel .panel-heading .panel-heading-btn .btn.btn-circle{border-radius:50%}.place-holder-h200{width:100%;height:300px;background-color:#eee;border:1px solid #ccc;line-height:200px;text-align:center}.dashboard-panel2-holder{width:100%;height:1150px;background-color:#eee;border:1px solid #ccc;line-height:200px;text-align:center}.dashboard-panel-time{color:#cecece}.box{border:none;box-shadow:none;border-radius:3px;background-color:#fff;padding:15px;margin-bottom:15px}.box-fluid{border:none;box-shadow:none;border-radius:3px;margin-bottom:15px}.box.box-sm,.box-fluid.box-sm{padding:5px 15px}.box .box-title,.box-fluid .box-title{margin-bottom:10px}.box .box-title .title,.box-fluid .box-title .title{display:inline-block;font-size:18px;color:#333;height:30px;line-height:30px}.box .box-title .btn-sm,.box-fluid .box-title .btn-sm{padding:3px 8px;margin-top:-5px}.box .nav-tabs,.box-fluid .nav-tabs{font-size:14px;font-weight:bold}.box .nav-tabs>li:first-child,.box-fluid .nav-tabs>li:first-child{margin-left:50px}.box .tab-content>.tab-pane,.box-fluid .tab-content>.tab-pane{background-color:#fff;padding:20px;border:1px solid #ddd;border-top:none;border-bottom-left-radius:3px;border-bottom-right-radius:3px}.box-license{line-height:30px}.box-license .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.box-btn-bar{line-height:30px}.box-btn-bar a.btn{margin-right:20px}.page-nav{height:30px;line-height:30px}.page-nav .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.page-nav .pagination{margin:0 0}.page-nav .btn{margin-top:-3px}.page-filter{height:36px;line-height:36px;margin-bottom:10px}.page-filter .form-control{margin-top:5px;margin-right:4px}.btn.btn-sm .dropdown-menu li a{font-size:11px}.invite{text-align:center;padding-bottom:20px}.invite .code{color:#2f3991;font-size:36px;font-weight:700;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}.invite .link{padding:5px;color:#2f3991;font-size:13px;font-weight:700;background-color:#eee;border-radius:5px}.invite-send-box{width:300px;margin:0 auto}.form-group .input-group{margin-bottom:5px}.op_box{display:block;padding:5px;border-radius:3px;text-align:center;margin-top:5px}.op_error{background:#fbb}.op_wait{background:#ccc}.table-data td.loading{text-align:left;padding:20px}.table-data .btn-group.open .dropdown-toggle{-webkit-box-shadow:none;box-shadow:none}.more-action{position:absolute !important}.more-action .dropdown-menu{background-color:rgba(60,60,60,0.9);color:#fff;font-size:13px}.more-action .dropdown-menu.dropdown-menu-left{margin-left:-120px}.more-action .dropdown-menu>li>a{padding:5px 20px;color:#fff}.more-action .dropdown-menu>li>a:hover,.more-action .dropdown-menu>li>a:active,.more-action .dropdown-menu>li>a:visited{background-color:#0084a7}.more-action .dropdown-menu .divider{margin:5px 0;background-color:#666}.popover-inline-edit input,.popover-inline-edit .btn{height:30px}.popover-inline-edit .popover-title{background-color:#ddd}.popover-inline-edit .popover-content{padding:20px 10px}.popover-inline-edit .popover{padding:0;max-width:500px}.popover-inline-edit .popover .popover-content{padding:10px 10px 20px 10px}.popover-inline-edit .popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#ddd}.user-info-table{font-size:14px}.user-info-table tbody>tr>td{padding:8px}.user-info-table .user-field{min-width:100px;width:100px;color:#999;text-align:right}.user-info-table .user-value{color:#333;font-weight:bold}.user-info-table .user-value a{font-weight:normal}.breadcrumb.breadcrumb-trans{background-color:transparent}.biz-box{display:inline-block;width:20%;max-width:20%}.biz-box .bb-inner{background-color:#368142;margin:3px;border-radius:4px}.biz-box .bb-name{color:#fff;padding:9px;margin-bottom:3px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;text-align:center;padding-top:1px;padding-bottom:1px}.biz-box .bb-ver{font-size:11px;height:16px;text-align:center;padding:0 5px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;border-bottom-left-radius:4px;border-bottom-right-radius:4px;color:rgba(255,255,255,0.85);background-color:rgba(0,0,0,0.3)}.biz-box.mp-disabled .mp-inner{background-color:#e5e5e5}.biz-box.mp-disabled .mp-name{color:#999}.biz-box.mp-success .mp-inner{background-color:#368142}.biz-box.mp-success .mp-name{color:#fff}.biz-box.mp-danger .mp-inner{background-color:#d34242}.biz-box.mp-danger .mp-name{color:#fff}.biz-box.mp-warning .mp-inner{background-color:#f57523}.biz-box.mp-warning .mp-name{color:#fff}textarea.textarea-resize-y{resize:vertical}textarea.textarea-resize-none{resize:none}textarea.textarea-code{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}textarea.cert_pub{width:100%;height:64px;border:1px solid #e2e2e2;background-color:#e4ffe5}.icon{display:inline-block}.icon16{width:16px;height:16px;line-height:16px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/mimetype-16.png") !important}.icon16.icon-disk{background-position:0 0 !important}.icon16.icon-folder{background-position:-16px 0 !important}.icon16.icon-file{background-position:0 -16px !important}.icon16.icon-txt{background-position:-16px -16px !important}.icon16.icon-help{background-position:-32px -16px !important}.icon16.icon-sys{background-position:-48px -16px !important}.icon16.icon-exe{background-position:-64px -16px !important}.icon16.icon-office{background-position:0 -32px !important}.icon16.icon-word{background-position:-16px -32px !important}.icon16.icon-excel{background-position:-32px -32px !important}.icon16.icon-ppt{background-position:-48px -32px !important}.icon16.icon-access{background-position:-64px -32px !important}.icon16.icon-visio{background-position:-80px -32px !important}.icon16.icon-audio{background-position:0 -48px !important}.icon16.icon-video{background-position:-16px -48px !important}.icon16.icon-pic{background-position:-32px -48px !important}.icon16.icon-pdf{background-position:-48px -48px !important}.icon16.icon-font{background-position:-64px -48px !important}.icon16.icon-script{background-position:0 -64px !important}.icon16.icon-html{background-position:-16px -64px !important}.icon16.icon-py{background-position:-32px -64px !important}.icon16.icon-h{background-position:-48px -64px !important}.icon16.icon-c{background-position:-64px -64px !important}.icon16.icon-cpp{background-position:-80px -64px !important}.icon16.icon-cs{background-position:-96px -64px !important}.icon16.icon-php{background-position:-112px -64px !important}.icon16.icon-ruby{background-position:-128px -64px !important}.icon16.icon-java{background-position:-144px -64px !important}.icon16.icon-vs{background-position:-160px -64px !important}.icon16.icon-js{background-position:-176px -64px !important}.icon16.icon-archive{background-position:0 -80px !important}.icon16.icon-rar{background-position:-16px -80px !important}.icon16.icon-zip{background-position:-32px -80px !important}.icon16.icon-7z{background-position:-48px -80px !important}.icon16.icon-tar{background-position:-64px -80px !important}.icon16.icon-gz{background-position:-80px -80px !important}.icon16.icon-jar{background-position:-96px -80px !important}.icon16.icon-bz2{background-position:-112px -80px !important}.icon24{width:24px;height:24px;line-height:24px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/icons-tree-24x24.png") !important}.icon24.icon-disk{background-position:0 0 !important}.icon24.icon-folder{background-position:-24px 0 !important}.icon24.icon-folder-open{background-position:-48px 0 !important}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important} \ No newline at end of file diff --git a/server/www/teleport/static/css/maintenance.css b/server/www/teleport/static/css/maintenance.css new file mode 100644 index 0000000..9ac57b7 --- /dev/null +++ b/server/www/teleport/static/css/maintenance.css @@ -0,0 +1 @@ +@charset "utf-8";html{font-size:13px}body{font-family:-apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"PingFang SC","Hiragino Sans GB","Microsoft YaHei",sans-serif;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-state{text-align:center;white-space:nowrap}.remote-action-group ul li.remote-action-state.state-disabled{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.remote-action-group ul li.remote-action-state.state-disabled>i.fa{color:#b53a2f}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li.remote-action-btn:first-child{border:none}.remote-action-group ul li.remote-action-btn:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li.remote-action-btn:last-child{border:none}.remote-action-group ul li.remote-action-btn:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-info-group{margin-bottom:3px;height:28px}.remote-info-group ul{display:inline-block;height:28px;margin:0;padding:0}.remote-info-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-info-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-info-group ul li.remote-action-input{background:none;padding:4px 0}.remote-info-group ul li.remote-action-input select{border:none}.remote-info-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-info-group ul li select{margin-top:-3px}.remote-info-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-info-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}.disable-bg{position:absolute;background:url(../img/css/disable-bg.png) repeat;opacity:.45;z-index:990}.disable-message{display:inline-block;font-size:160%;position:absolute;padding:20px 40px;border:1px solid #2b0002;background-color:#65181a;color:#fff;opacity:.85;z-index:991}.btn-group-sm>.btn{padding:3px 5px;font-size:12px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{padding:3px 6px;border-radius:10px;font-size:13px;font-weight:400;background-color:#999}.badge.badge-sm{font-size:12px;padding:3px 5px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-8px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.btn-success .badge{color:#fff}.label{display:inline-block;padding:5px 10px;margin:2px;font-size:13px;font-weight:400;background-color:#999}.label.label-sm{font-size:12px;padding:3px 8px 4px 8px;margin-top:0;border-radius:3px}.label.label-ignore{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.modal .modal-content{border-radius:0}.modal .modal-header .close{margin-top:-4px;margin-right:-6px}.modal .modal-header .close:hover{color:#9c3023;opacity:1}.modal .modal-header .close:active,.modal .modal-header .close:focus,.modal .modal-header .close:visited{-webkit-appearance:none}.modal .form-horizontal .form-group,.modal .row{margin-left:0;margin-right:0}.alert{border-radius:0;padding:5px;margin-bottom:10px}.dropdown-menu{min-width:0;font-size:13px}.form-control-sm{padding:3px 5px;font-size:13px;height:inherit}.form-group .control-label.require{color:#505050;font-weight:bold}.form-group .control-label.require:before{font-weight:normal;color:#ac4e43;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f069";font-size:8px;font-family:'FontAwesome'}.form-group .control-desc{padding-top:6px;color:#999}label.form-control-static input{display:inline-block;position:relative;margin-top:4px}.control-desc .popover{max-width:none;font-size:13px}body{padding:0;margin:0}#page-header nav.navbar{height:70px;margin-bottom:0;border:none;background-color:#192e4b;color:#fff}#page-header nav li{display:inline-block;float:left}#page-content{padding-top:70px;padding-bottom:50px}#page-footer nav.navbar{min-height:24px;height:24px;line-height:24px;background-color:#c1c1c1;border-top:1px solid #7b7b7b;font-size:12px;color:#4e4e4e}#page-footer nav.navbar .container{height:24px}#page-footer nav.navbar p{margin:0 auto;text-align:center}.content-box{margin-top:20px;background-color:#fff;padding:20px;box-shadow:1px 1px 1px rgba(0,0,0,0.3)}.content-box .welcome-message .fa{display:block;color:#ff272a;font-size:18px;float:left}.content-box .welcome-message span{display:block;line-height:18px;padding-left:23px}.content-box .table{width:auto;margin-left:20px}.content-box .table td{border-top:1px dotted #ddd;padding:5px}.content-box .table td.key{text-align:right}.content-box .table td.value{text-align:left;font-weight:bold}.content-box .table tr:first-child td{border-top:none}.content-box table.form{width:auto;margin-left:20px}.content-box table.form td{padding:5px;line-height:1.42857143;vertical-align:middle}.content-box table.form td.key{text-align:right}.content-box table.form td label{margin-bottom:0}.content-box table.form td .form-control{height:30px;padding:5px 10px;font-size:inherit;line-height:inherit}h1 .fa-spin{color:#aaa}h1{font-size:180%}h2{font-size:140%}.op_box{display:block;padding:5px;border-radius:3px;margin:5px 0}.op_error{background-color:#cc3632;border:1px solid #9c2a26;color:#fff}.op_wait{background:rgba(255,255,255,0.3)}.steps-detail{display:none;margin:10px;padding:10px;border:1px solid #b4b4b4;background-color:#dcdcdc}.steps-detail p{padding-left:5px;margin:2px 0 2px 1px}.steps-detail p.error{color:#fff;margin:2px 0 2px 0;background-color:#cc3632;border:1px solid #9c2a26}.alert.alert-warning{color:#6a542d;border:1px solid #e2cab4;background-color:#ffe4cb}.error{color:#cc3632}/*# sourceMappingURL=maintenance.css.map */ \ No newline at end of file diff --git a/server/www/teleport/static/css/maintenance.css.map b/server/www/teleport/static/css/maintenance.css.map new file mode 100644 index 0000000..0bee72d --- /dev/null +++ b/server/www/teleport/static/css/maintenance.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["maintenance.less","_base.less","_overwrite_bootstrap.less"],"names":[],"mappings":"AAAA,SAAS,QCeT,KACE,eAGF,KACE,uDAViE,kBAAoB,uBAAyB,cAAe,mBAAoB,4BAUjJ,CAEA,wBAAA,CACA,WAGF,KAAM,KACJ,YAGF,gBACE,iBAGF,EACE,qBAGF,CAAC,MACC,qBAGF,CAAC,OACC,qBAGF,CAAC,QACC,qBAGF,CAAC,SACC,qBAGF,OACE,aAGF,MACE,mBAGF,aACE,WAGF,QACE,eAGF,aACE,cAAA,CACA,WAGF,MACE,mDAjE4D,wBAoE9D,EAAE,OACA,cAAA,CACA,kBAOF,iBACE,mBAEE,gBADF,KACG,aACC,0BAAA,CACA,8BAEF,gBALF,KAKG,YACC,2BAAA,CACA,+BAKN,qBACE,iBAAA,CACA,YAFF,oBAKE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UATJ,oBAKE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,oBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,oBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,oBAxBJ,GAME,GAkBG,oBAIC,QACE,YAIJ,oBAjCJ,GAME,GA2BG,4BACC,WAGF,oBArCJ,GAME,GA+BG,qBAEC,iBAAA,CACA,mBAEA,oBA1CN,GAME,GA+BG,oBAKE,gBACC,wBAAA,CACA,UAAA,CACA,+BAEA,oBA/CR,GAME,GA+BG,oBAKE,eAKG,EAAG,IACH,cAKN,oBArDJ,GAME,GA+CG,wBAAyB,oBArD9B,GAME,GA+C6B,oBAAqB,oBArDpD,GAME,GA+CmD,wBAC/C,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,eAAA,CACA,uBAEF,oBA5DJ,GAME,GAsDG,wBACC,aAAA,CACA,WAEF,oBAhEJ,GAME,GA0DG,oBAAqB,oBAhE1B,GAME,GA0DyB,wBAAyB,oBAhEpD,GAME,GA0DmD,4BAC/C,WAEF,oBAnEJ,GAME,GA6DG,oBAAqB,oBAnE1B,GAME,GA6DyB,4BACrB,iBAGF,oBAvEJ,GAME,GAiEG,wBAAyB,oBAvE9B,GAME,GAiE6B,sBAAuB,oBAvEtD,GAME,GAiEqD,sBACjD,iBAAA,CACA,eAAA,CACA,WAEF,oBA5EJ,GAME,GAsEG,wBACC,wBAAA,CACA,WAEF,oBAhFJ,GAME,GA0EG,sBACC,wBAAA,CACA,WAEF,oBApFJ,GAME,GA8EG,sBACC,wBAAA,CACA,WA3FR,oBAKE,GAME,GAmFE,MACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,gBAnGR,oBAKE,GAME,GA2FE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3GR,oBAKE,GAME,GAkGE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAhHR,oBAKE,GAME,GAuGE,QACE,gBAGF,oBAjHJ,GAME,GA2GG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,oBAvHJ,GAME,GAiHG,kBAAkB,aACjB,YADF,oBAvHJ,GAME,GAiHG,kBAAkB,YAEjB,MACE,0BAAA,CACA,8BAIJ,oBA/HJ,GAME,GAyHG,YACC,2BAAA,CACA,+BAGF,oBApIJ,GAME,GA8HG,kBAAkB,YACjB,YADF,oBApIJ,GAME,GA8HG,kBAAkB,WAEjB,MACE,2BAAA,CACA,+BAQV,mBACE,iBAAA,CACA,YAFF,kBAIE,IACE,oBAAA,CACA,WAAA,CACA,QAAA,CACA,UARJ,kBAIE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAGA,yBAAA,CACA,2BAAA,CACA,6BAEA,kBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,kBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,kBAxBJ,GAME,GAkBG,oBAIC,QACE,YAjCV,kBAIE,GAME,GA2BE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA1CR,kBAIE,GAME,GAkCE,QACE,gBAGF,kBA5CJ,GAME,GAsCG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,kBAlDJ,GAME,GA4CG,YACC,2BAAA,CACA,+BAMR,EAAE,OACA,aAGF,oBACE,kBADF,mBAGE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UAPJ,mBAGE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,mBAlBJ,GAME,GAYG,WACC,YASF,mBA5BJ,GAME,GAsBG,UACC,eAAA,CACA,SAAA,CACA,YAGF,mBAlCJ,GAME,GA4BG,YACC,eAAA,CACA,WAAA,CACA,UAxCR,mBAGE,GAME,GAkCE,eACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,YAnDR,mBAGE,GAME,GA6CE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3DR,mBAGE,GAME,GAoDE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAMF,mBAnEJ,GAME,GA6DG,aACC,0BAAA,CACA,0BAAA,CACA,8BAHF,mBAnEJ,GAME,GA6DG,YAKC,MALF,mBAnEJ,GAME,GA6DG,YAKO,eACJ,0BAAA,CACA,8BAIJ,mBA9EJ,GAME,GAwEG,YACC,2BAAA,CACA,+BAFF,mBA9EJ,GAME,GAwEG,WAIC,MAJF,mBA9EJ,GAME,GAwEG,WAIO,eACJ,2BAAA,CACA,+BAUV,gBAAgB,OACd,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,UAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAAA,CACA,iBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAClF,QAAS,GAAT,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,YACE,iBAAA,CACA,gDAAA,CACA,WAAA,CACA,YAGF,iBACE,oBAAA,CAGA,cAAA,CACA,iBAAA,CACA,iBAAA,CAMA,wBAAA,CAEA,wBAAA,CACA,UAAA,CACA,WAAA,CAEA,YC7eF,aAAc,MACZ,eAAA,CAEA,eAyBF,mBACE,cAAA,CACA,KAAA,CACA,OAAA,CACA,QAAA,CACA,MAAA,CACA,aAIF,YACE,kBAKF,OACE,eAAA,CACA,kBAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,eAAA,CACA,YAAA,CACA,iBAAA,CACA,iBAGF,MAAC,WACC,gBAAA,CACA,iBAGF,MAAC,cACC,wBAAA,CACA,WAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAIJ,YAAa,QACX,WAGF,OACE,oBAAA,CACA,gBAAA,CACA,UAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,uBAAA,CACA,YAAA,CACA,kBAGF,MAAC,cACC,wBAAA,CACA,UAAA,CACA,+BAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAKJ,SAAS,aACP,WAAA,CACA,iBAAA,CACA,sBACA,SAJO,YAIN,QACC,eALJ,SAAS,YAQP,eACE,aAAA,CACA,cAAA,CACA,WA8BJ,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,iBAAA,CACA,iBAOF,YAEE,mBACE,cAHJ,YAME,oBACE,cAPJ,YAUE,MAAK,uBAVP,YAWE,SAAQ,uBACN,cAZJ,YAeE,MAAK,4BAfP,YAeoC,SAAQ,4BACxC,cAIJ,MAEE,gBACE,gBAHJ,MAME,cAAc,QACZ,eAAA,CACA,kBACA,MAHF,cAAc,OAGX,OACC,aAAA,CACA,UAEF,MAPF,cAAc,OAOX,QAAS,MAPZ,cAAc,OAOD,OAAQ,MAPrB,cAAc,OAOQ,SAClB,wBAdN,MAkBE,iBAAiB,aAlBnB,MAkBgC,MAC5B,aAAA,CACA,eAOJ,OACE,eAAA,CACA,WAAA,CACA,mBAGF,eACE,WAAA,CACA,eAMF,iBACE,eAAA,CACA,cAAA,CACA,eAGF,WACE,eAAc,SACZ,aAAA,CACA,iBACA,WAHF,eAAc,QAGX,QACC,kBAAA,CACA,aAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,aAAA,CACA,YAAa,cAZnB,WAeE,eACE,eAAA,CACA,WAIJ,KAAK,oBACH,OACE,oBAAA,CACA,iBAAA,CAEA,eAIJ,aACE,UACE,cAAA,CACA,eFpRJ,KACE,SAAA,CACA,SAIF,YACE,IAAG,QACD,WAAA,CACA,eAAA,CACA,WAAA,CAEA,wBAAA,CACA,WAEF,YAAE,IAAI,IACJ,oBAAA,CACA,WAIJ,cACE,gBAAA,CACA,oBAGF,YACE,IAAG,QACD,eAAA,CACA,WAAA,CACA,gBAAA,CACA,wBAAA,CACA,4BAAA,CACA,cAAA,CACA,cARJ,YACE,IAAG,OAQD,YACE,YAVN,YACE,IAAG,OAWD,GACE,aAAA,CACA,kBAKN,aACE,eAAA,CACA,qBAAA,CACA,YAAA,CACA,uCAJF,YAME,iBAGE,KACE,aAAA,CACA,aAAA,CACA,cAAA,CACA,WAbN,YAME,iBASE,MACE,aAAA,CACA,gBAAA,CACA,kBAlBN,YAsBE,QACE,UAAA,CACA,iBAxBJ,YAsBE,OAIE,IACE,0BAAA,CACA,YACA,YAPJ,OAIE,GAGG,KACC,iBAEF,YAVJ,OAIE,GAMG,OACC,eAAA,CACA,iBAlCR,YAsBE,OAgBE,GAAE,YACA,IACE,gBAxCR,YA6CE,MAAK,MACH,UAAA,CACA,iBAEA,YAJF,MAAK,KAID,IACA,WAAA,CACA,sBAAA,CACA,sBACA,YARJ,MAAK,KAID,GAIC,KACC,iBALJ,YAJF,MAAK,KAID,GAQA,OACE,gBATJ,YAJF,MAAK,KAID,GAWA,eACE,WAAA,CACA,gBAAA,CACA,iBAAA,CACA,oBAMR,EAAG,UACD,WAGF,GACE,eAGF,GACE,eAGF,QACE,aAAA,CACA,WAAA,CACA,iBAAA,CAGA,aAGF,UAEM,wBAAA,CACA,wBAAA,CACJ,WAGF,SACE,iCAGF,cACE,YAAA,CACA,WAAA,CACA,YAAA,CACA,wBAAA,CACA,yBALF,aAME,GACE,gBAAA,CACA,qBACA,aAHF,EAGG,OACC,UAAA,CACA,kBAAA,CACA,wBAAA,CACA,yBAMJ,MAAC,eACC,aAAA,CACA,wBAAA,CACA,yBAOJ,OACE","file":"maintenance.css","sourceRoot":"..\\less"} \ No newline at end of file diff --git a/server/www/teleport/static/css/single.css b/server/www/teleport/static/css/single.css new file mode 100644 index 0000000..12c4a5d --- /dev/null +++ b/server/www/teleport/static/css/single.css @@ -0,0 +1 @@ +@charset "utf-8";html{font-size:13px}body{font-family:-apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"PingFang SC","Hiragino Sans GB","Microsoft YaHei",sans-serif;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-state{text-align:center;white-space:nowrap}.remote-action-group ul li.remote-action-state.state-disabled{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.remote-action-group ul li.remote-action-state.state-disabled>i.fa{color:#b53a2f}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li.remote-action-btn:first-child{border:none}.remote-action-group ul li.remote-action-btn:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li.remote-action-btn:last-child{border:none}.remote-action-group ul li.remote-action-btn:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-info-group{margin-bottom:3px;height:28px}.remote-info-group ul{display:inline-block;height:28px;margin:0;padding:0}.remote-info-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-info-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-info-group ul li.remote-action-input{background:none;padding:4px 0}.remote-info-group ul li.remote-action-input select{border:none}.remote-info-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-info-group ul li select{margin-top:-3px}.remote-info-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-info-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}.disable-bg{position:absolute;background:url(../img/css/disable-bg.png) repeat;opacity:.45;z-index:990}.disable-message{display:inline-block;font-size:160%;position:absolute;padding:20px 40px;border:1px solid #2b0002;background-color:#65181a;color:#fff;opacity:.85;z-index:991}.table{margin-bottom:10px}.table>thead>tr>th{vertical-align:middle;border-bottom:2px solid #ddd}.table.table-info-list{width:auto}.table.table-info-list td{border-top:1px dotted #ddd;padding:5px 10px;vertical-align:top}.table.table-info-list td.key{text-align:right;width:1px;white-space:nowrap}.table.table-info-list td.value{text-align:left;color:#767676}.table.table-info-list td .error{color:#cc3632}.table.table-info-list.table-info-list-lite{width:100%}.table.table-info-list.table-info-list-lite td{padding:5px 5px}.table.table-info-list.table-info-list-lite td.value{font-weight:normal}.table.table-info-list tr:first-child td{border-top:none}.table.table-config-list{width:100%}.table.table-config-list td{border:none;padding:5px}.table.table-config-list td.title{text-align:left;font-size:110%;font-weight:bolder}.table.table-config-list td.key{width:1px;white-space:nowrap;text-align:right;padding-right:15px}.table.table-config-list td.value{text-align:left}.table.table-config-list td.value input{width:4em;padding:0 5px;text-align:right}.table.table-config-list td.value .unit{margin-left:5px}.table.table-config-list td.value .desc{color:#999;margin-left:15px;display:inline-block}.table>thead>tr>th{padding:5px 5px;outline:none;white-space:nowrap;font-weight:normal;text-align:center;background-color:#ededed}.table>tbody>tr>td{padding:5px;text-align:center;vertical-align:middle}.table>tbody>tr>td .nowrap{white-space:nowrap}.table-data td.loading{text-align:left;padding:20px}.table-data .btn-group.open .dropdown-toggle{-webkit-box-shadow:none;box-shadow:none}.table.table-data thead .sorting,.table.table-data thead .sorting_asc,.table.table-data thead .sorting_desc{cursor:pointer}.table.table-data thead .sorting:after,.table.table-data thead .sorting_asc:after,.table.table-data thead .sorting_desc:after{bottom:4px;padding-left:5px;display:inline-block;font-family:'FontAwesome';opacity:.8}.table.table-data thead .sorting:after{opacity:.2;content:"\f0dc"}.table.table-data thead .sorting_asc:after{content:"\f0de"}.table.table-data thead .sorting_desc:after{content:"\f0dd"}.host-name{font-size:16px}.host-name-desc{cursor:pointer}.host-ip{font-size:12px;color:#999;display:inline-block;white-space:nowrap;font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace;overflow:hidden;text-overflow:ellipsis}.td-ip-list{padding-right:20px;padding-left:5px}.td-ip-show-more{font-size:14px;width:12px;float:right;display:block}.td-ip-item{min-width:12em;width:12em;height:18px;padding:2px 4px;margin:1px 0;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.td-ip-item span{display:inline-block;font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.td-ip-item a{display:inline-block;width:14px;float:right;font-size:14px}.btn-group-sm>.btn{padding:3px 5px;font-size:12px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{padding:3px 6px;border-radius:10px;font-size:13px;font-weight:400;background-color:#999}.badge.badge-sm{font-size:12px;padding:3px 5px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-8px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.btn-success .badge{color:#fff}.label{display:inline-block;padding:5px 10px;margin:2px;font-size:13px;font-weight:400;background-color:#999}.label.label-sm{font-size:12px;padding:3px 8px 4px 8px;margin-top:0;border-radius:3px}.label.label-ignore{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.modal .modal-content{border-radius:0}.modal .modal-header .close{margin-top:-4px;margin-right:-6px}.modal .modal-header .close:hover{color:#9c3023;opacity:1}.modal .modal-header .close:active,.modal .modal-header .close:focus,.modal .modal-header .close:visited{-webkit-appearance:none}.modal .form-horizontal .form-group,.modal .row{margin-left:0;margin-right:0}.alert{border-radius:0;padding:5px;margin-bottom:10px}.dropdown-menu{min-width:0;font-size:13px}.form-control-sm{padding:3px 5px;font-size:13px;height:inherit}.form-group .control-label.require{color:#505050;font-weight:bold}.form-group .control-label.require:before{font-weight:normal;color:#ac4e43;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f069";font-size:8px;font-family:'FontAwesome'}.form-group .control-desc{padding-top:6px;color:#999}label.form-control-static input{display:inline-block;position:relative;margin-top:4px}.control-desc .popover{max-width:none;font-size:13px}.page-header-fixed{padding-top:48px}.header{border:none;box-shadow:0 0 3px rgba(0,0,0,0.5);min-height:48px;height:48px;top:0;width:100%;position:fixed;z-index:999}.header .top-navbar{min-height:48px;height:48px;line-height:48px;background-color:#3a3a3a;color:#ccc}.header .top-navbar a{color:#d5d5d5}.header .top-navbar a:hover{color:#5a8fee}.header .top-navbar .brand{float:left;display:inline-block;padding:12px 0;margin:0}.header .top-navbar .brand .site-logo{display:block;width:86px;height:24px;background:url(../img/site-logo-small.png) no-repeat}.header .top-navbar .title-container{float:left;display:inline-block;margin:0;padding:0;margin-left:20px}.header .top-navbar .title-container .title{font-size:16px}.header .top-navbar .breadcrumb-container{float:left;display:inline-block;margin:0;padding:0}.header .top-navbar .breadcrumb-container .breadcrumb{background-color:#3a3a3a;height:48px;margin:0;border-radius:0;border:none;padding:0 0 0 20px;font-size:16px;color:#ccc}.header .top-navbar .breadcrumb-container .breadcrumb>li+li:before{font-size:18px;padding:0 5px;color:#555;content:'|'}.header .top-navbar .breadcrumb-container .breadcrumb .title{font-size:18px}.header .top-navbar .breadcrumb-container .breadcrumb .sub-title{font-size:14px;color:#b3b3b3}.header .top-navbar .status-container{float:right}.page-content{margin-top:10px;margin-bottom:44px}.footer{width:100%;height:24px;line-height:24px;background-color:#d5d5d5;border-top:1px solid #a2a2a2;border-bottom:1px solid #efefef;z-index:998;text-align:center;font-size:12px}.footer.footer-fixed-bottom{bottom:0;position:fixed}.row-sm .col-sm-1,.row-sm .col-sm-2,.row-sm .col-sm-3,.row-sm .col-sm-4,.row-sm .col-sm-5,.row-sm .col-sm-6,.row-sm .col-sm-7,.row-sm .col-sm-8,.row-sm .col-sm-9,.row-sm .col-sm-10,.row-sm .col-sm-11{padding-right:5px;padding-left:5px}.content{margin-top:15px;margin-bottom:20px;background-color:#fff;border-radius:5px;padding:10px}.content:last-child{margin-bottom:54px}.table-host{width:100%;border-top:10px solid #b3cfe7;border-bottom:1px solid #b3cfe7}.table-host .cell-host-id{border-left:1px solid #e7e7e7;padding:5px;text-align:center;width:168px;vertical-align:middle}.table-host .cell-host-id .host-id{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace;font-size:13px;color:#999;display:inline-block}.table-host .cell-host-id .host-name{display:block;width:168px;text-align:center;overflow:hidden;white-space:nowrap;text-overflow:ellipsis;font-size:16px;margin:auto;margin-bottom:10px}.table-host .cell-host-id .td-ip-item{width:10em;height:18px;padding:2px 4px;margin:1px auto;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.table-host .cell-host-id .td-ip-item span{display:inline-block;font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.table-host .cell-host-id .actions{margin-top:20px}.table-host .cell-host-id .actions a{margin-left:5px;margin-right:5px}.table-host .cell-host-id .actions a:first-child{margin-left:0}.table-host .cell-host-id .actions a:last-child{margin-right:0}.table-host .cell-detail{border-left:1px solid #e7e7e7;border-right:1px solid #e7e7e7;vertical-align:top}.table-host .cell-detail tr{border-top:1px solid #e7e7e7}.table-host .cell-detail tr:last-child{border-bottom:1px solid #e7e7e7}.table-host .cell-detail .row-host-info{background-color:#ececed}.table-host .cell-detail.host-offline{background-color:#ffcecc;text-align:center;vertical-align:middle}.table-host .cell-detail.host-offline .host-offline-msg{color:#802506;font-size:24px}.table-host .cell-log td{border:1px solid #e7e7e7}.table-host .cell-log td .host-log{font-size:12px;outline:none;width:100%;height:120px;overflow-y:auto;resize:none;border:none;padding:5px}.table-host .cell-log td .host-log div{margin-bottom:3px}.table-host .cell-log td .host-log div .datetime{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}.log-box{margin-top:15px}.log-box .log-list{margin-top:5px;border:1px solid #e7e7e7;font-size:12px;outline:none;width:100%;max-height:480px;overflow-y:auto;resize:none;padding:5px}.log-box .log-list div{margin-bottom:3px}.log-box .log-list div:hover{background-color:#f3f3f3}.log-box .log-list div .log-dt{padding:0 3px;padding-top:2px;padding-bottom:1px;margin-right:3px;background-color:#f57523;color:#fff;font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}.log-box .log-list div .log-hid{padding:0 3px;margin-right:3px;background-color:#348fe2;color:#fff}.log-box .log-list div .log-hname{padding:0 3px;margin-right:3px;background-color:#348fe2;color:#fff}.page-nav{height:30px;line-height:30px}.page-nav .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.page-nav .pagination{margin:0 0}.page-nav .btn{margin-top:-3px}.mp{display:inline-block;width:20%;max-width:20%}.mp .mp-inner{background-color:#e5e5e5;margin:3px;border-radius:4px}.mp .mp-name{color:#999;padding:9px;margin-bottom:3px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;text-align:center}.mp .mp-name.with-target{padding-top:17px;padding-bottom:1px}.mp .mp-target{display:inline-block;float:left;position:absolute;font-size:11px;padding:0 5px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;border-top-left-radius:4px;border-bottom-right-radius:4px;color:rgba(255,255,255,0.85);background-color:rgba(0,0,0,0.1)}.mp.mp-disabled .mp-inner{background-color:#e5e5e5}.mp.mp-disabled .mp-name{color:#999}.mp.mp-success .mp-inner{background-color:#368142}.mp.mp-success .mp-name{color:#fff}.mp.mp-danger .mp-inner{background-color:#d34242}.mp.mp-danger .mp-name{color:#fff}.mp.mp-warning .mp-inner{background-color:#f57523}.mp.mp-warning .mp-name{color:#fff}.host-offline{background-color:#ffcecc;height:36px;line-height:36px;padding:0 10px;color:#802506;font-size:20px;cursor:pointer}.host-offline .tips{display:none;font-size:12px}.host-offline:hover .tips{display:inline-block}.host-no-strategy{color:#999;font-size:16px}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important}#gritter-notice-wrapper{width:320px;max-width:480px}/*# sourceMappingURL=single.css.map */ \ No newline at end of file diff --git a/server/www/teleport/static/css/single.css.map b/server/www/teleport/static/css/single.css.map new file mode 100644 index 0000000..a948f77 --- /dev/null +++ b/server/www/teleport/static/css/single.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["single.less","_base.less","_table.less","_overwrite_bootstrap.less","_overwrite_gritter.less"],"names":[],"mappings":"AAAA,SAAS,QCeT,KACE,eAGF,KACE,uDAViE,kBAAoB,uBAAyB,cAAe,mBAAoB,4BAUjJ,CAEA,wBAAA,CACA,WAGF,KAAM,KACJ,YAGF,gBACE,iBAGF,EACE,qBAGF,CAAC,MACC,qBAGF,CAAC,OACC,qBAGF,CAAC,QACC,qBAGF,CAAC,SACC,qBAGF,OACE,aAGF,MACE,mBAGF,aACE,WAGF,QACE,eAGF,aACE,cAAA,CACA,WAGF,MACE,mDAjE4D,wBAoE9D,EAAE,OACA,cAAA,CACA,kBAOF,iBACE,mBAEE,gBADF,KACG,aACC,0BAAA,CACA,8BAEF,gBALF,KAKG,YACC,2BAAA,CACA,+BAKN,qBACE,iBAAA,CACA,YAFF,oBAKE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UATJ,oBAKE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,oBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,oBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,oBAxBJ,GAME,GAkBG,oBAIC,QACE,YAIJ,oBAjCJ,GAME,GA2BG,4BACC,WAGF,oBArCJ,GAME,GA+BG,qBAEC,iBAAA,CACA,mBAEA,oBA1CN,GAME,GA+BG,oBAKE,gBACC,wBAAA,CACA,UAAA,CACA,+BAEA,oBA/CR,GAME,GA+BG,oBAKE,eAKG,EAAG,IACH,cAKN,oBArDJ,GAME,GA+CG,wBAAyB,oBArD9B,GAME,GA+C6B,oBAAqB,oBArDpD,GAME,GA+CmD,wBAC/C,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,eAAA,CACA,uBAEF,oBA5DJ,GAME,GAsDG,wBACC,aAAA,CACA,WAEF,oBAhEJ,GAME,GA0DG,oBAAqB,oBAhE1B,GAME,GA0DyB,wBAAyB,oBAhEpD,GAME,GA0DmD,4BAC/C,WAEF,oBAnEJ,GAME,GA6DG,oBAAqB,oBAnE1B,GAME,GA6DyB,4BACrB,iBAGF,oBAvEJ,GAME,GAiEG,wBAAyB,oBAvE9B,GAME,GAiE6B,sBAAuB,oBAvEtD,GAME,GAiEqD,sBACjD,iBAAA,CACA,eAAA,CACA,WAEF,oBA5EJ,GAME,GAsEG,wBACC,wBAAA,CACA,WAEF,oBAhFJ,GAME,GA0EG,sBACC,wBAAA,CACA,WAEF,oBApFJ,GAME,GA8EG,sBACC,wBAAA,CACA,WA3FR,oBAKE,GAME,GAmFE,MACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,gBAnGR,oBAKE,GAME,GA2FE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3GR,oBAKE,GAME,GAkGE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAhHR,oBAKE,GAME,GAuGE,QACE,gBAGF,oBAjHJ,GAME,GA2GG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,oBAvHJ,GAME,GAiHG,kBAAkB,aACjB,YADF,oBAvHJ,GAME,GAiHG,kBAAkB,YAEjB,MACE,0BAAA,CACA,8BAIJ,oBA/HJ,GAME,GAyHG,YACC,2BAAA,CACA,+BAGF,oBApIJ,GAME,GA8HG,kBAAkB,YACjB,YADF,oBApIJ,GAME,GA8HG,kBAAkB,WAEjB,MACE,2BAAA,CACA,+BAQV,mBACE,iBAAA,CACA,YAFF,kBAIE,IACE,oBAAA,CACA,WAAA,CACA,QAAA,CACA,UARJ,kBAIE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAGA,yBAAA,CACA,2BAAA,CACA,6BAEA,kBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,kBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,kBAxBJ,GAME,GAkBG,oBAIC,QACE,YAjCV,kBAIE,GAME,GA2BE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA1CR,kBAIE,GAME,GAkCE,QACE,gBAGF,kBA5CJ,GAME,GAsCG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,kBAlDJ,GAME,GA4CG,YACC,2BAAA,CACA,+BAMR,EAAE,OACA,aAGF,oBACE,kBADF,mBAGE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UAPJ,mBAGE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,mBAlBJ,GAME,GAYG,WACC,YASF,mBA5BJ,GAME,GAsBG,UACC,eAAA,CACA,SAAA,CACA,YAGF,mBAlCJ,GAME,GA4BG,YACC,eAAA,CACA,WAAA,CACA,UAxCR,mBAGE,GAME,GAkCE,eACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,YAnDR,mBAGE,GAME,GA6CE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3DR,mBAGE,GAME,GAoDE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAMF,mBAnEJ,GAME,GA6DG,aACC,0BAAA,CACA,0BAAA,CACA,8BAHF,mBAnEJ,GAME,GA6DG,YAKC,MALF,mBAnEJ,GAME,GA6DG,YAKO,eACJ,0BAAA,CACA,8BAIJ,mBA9EJ,GAME,GAwEG,YACC,2BAAA,CACA,+BAFF,mBA9EJ,GAME,GAwEG,WAIC,MAJF,mBA9EJ,GAME,GAwEG,WAIO,eACJ,2BAAA,CACA,+BAUV,gBAAgB,OACd,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,UAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAAA,CACA,iBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAClF,QAAS,GAAT,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,YACE,iBAAA,CACA,gDAAA,CACA,WAAA,CACA,YAGF,iBACE,oBAAA,CAGA,cAAA,CACA,iBAAA,CACA,iBAAA,CAMA,wBAAA,CAEA,wBAAA,CACA,UAAA,CACA,WAAA,CAEA,YC/eF,OACE,mBAGA,MAAE,MAAQ,GAAK,IACb,qBAAA,CACA,6BAGF,MAAC,iBACC,WADF,MAAC,gBAIC,IACE,0BAAA,CACA,gBAAA,CACA,mBACA,MARH,gBAIC,GAIG,KACC,gBAAA,CACA,SAAA,CACA,mBAEF,MAbH,gBAIC,GASG,OACC,eAAA,CAGA,cAjBN,MAAC,gBAIC,GAeE,QACE,cAIJ,MAxBD,gBAwBE,sBACC,WADF,MAxBD,gBAwBE,qBAEC,IACE,gBACA,MA5BL,gBAwBE,qBAEC,GAEG,OACC,mBA7BR,MAAC,gBAkCC,GAAE,YACA,IACE,gBAKN,MAAC,mBACC,WADF,MAAC,kBAGC,IACE,WAAA,CACA,YAGA,MARH,kBAGC,GAKG,OACC,eAAA,CACA,cAAA,CACA,mBAGF,MAdH,kBAGC,GAWG,KACC,SAAA,CACA,kBAAA,CACA,gBAAA,CACA,mBAEF,MApBH,kBAGC,GAiBG,OACC,gBADF,MApBH,kBAGC,GAiBG,MAGC,OACE,SAAA,CACA,aAAA,CACA,iBANJ,MApBH,kBAGC,GAiBG,MASC,OAEE,gBAXJ,MApBH,kBAGC,GAiBG,MAcC,OACE,UAAA,CACA,gBAAA,CACA,qBAOV,MAAO,MAAQ,GAAK,IAClB,eAAA,CACA,YAAA,CACA,kBAAA,CACA,kBAAA,CACA,iBAAA,CACA,yBAGF,MAAO,MAAQ,GAAK,IAClB,WAAA,CACA,iBAAA,CACA,sBAGF,MAAO,MAAQ,GAAK,GAAK,SACvB,mBAGF,WACE,GAAE,SACA,eAAA,CACA,aAHJ,WAME,WAAU,KAAM,kBACd,uBAAA,CACA,gBAIJ,MAAM,WAAY,MAAM,UAAU,MAAM,WAAY,MAAM,cAAc,MAAM,WAAY,MAAM,eAC9F,eAWF,MAAM,WAAY,MAAM,SAAQ,OAAQ,MAAM,WAAY,MAAM,aAAY,OAAQ,MAAM,WAAY,MAAM,cAAa,OACvH,UAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,aAAb,CACA,WAeF,MAAM,WAAY,MAAM,SAAQ,OAC9B,UAAA,CACA,QAAS,QAGX,MAAM,WAAY,MAAM,aAAY,OAClC,QAAS,QAGX,MAAM,WAAY,MAAM,cAAa,OACnC,QAAS,QAGX,WAGE,eAaF,gBAGE,eAGF,SACE,cAAA,CACA,UAAA,CACA,oBAAA,CACA,kBAAA,CAIA,mDDhM4D,uBCgM5D,CACA,eAAA,CACA,uBAWF,YACE,kBAAA,CACA,iBAGF,iBACE,cAAA,CACA,UAAA,CACA,WAAA,CACA,cAGF,YACE,cAAA,CACA,UAAA,CACA,WAAA,CACA,eAAA,CACA,YAAA,CAEA,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,iBAAA,CACA,gBAAA,CACA,cAAA,CACA,kBAAA,YAGF,WAAY,MACV,oBAAA,CACA,mDD3O4D,uBC2O5D,CACA,cAAA,CACA,gBAIF,WAAY,GACV,oBAAA,CACA,UAAA,CACA,WAAA,CACA,eC3PF,aAAc,MACZ,eAAA,CAEA,eAyBF,mBACE,cAAA,CACA,KAAA,CACA,OAAA,CACA,QAAA,CACA,MAAA,CACA,aAIF,YACE,kBAKF,OACE,eAAA,CACA,kBAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,eAAA,CACA,YAAA,CACA,iBAAA,CACA,iBAGF,MAAC,WACC,gBAAA,CACA,iBAGF,MAAC,cACC,wBAAA,CACA,WAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAIJ,YAAa,QACX,WAGF,OACE,oBAAA,CACA,gBAAA,CACA,UAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,uBAAA,CACA,YAAA,CACA,kBAGF,MAAC,cACC,wBAAA,CACA,UAAA,CACA,+BAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAKJ,SAAS,aACP,WAAA,CACA,iBAAA,CACA,sBACA,SAJO,YAIN,QACC,eALJ,SAAS,YAQP,eACE,aAAA,CACA,cAAA,CACA,WA8BJ,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,iBAAA,CACA,iBAOF,YAEE,mBACE,cAHJ,YAME,oBACE,cAPJ,YAUE,MAAK,uBAVP,YAWE,SAAQ,uBACN,cAZJ,YAeE,MAAK,4BAfP,YAeoC,SAAQ,4BACxC,cAIJ,MAEE,gBACE,gBAHJ,MAME,cAAc,QACZ,eAAA,CACA,kBACA,MAHF,cAAc,OAGX,OACC,aAAA,CACA,UAEF,MAPF,cAAc,OAOX,QAAS,MAPZ,cAAc,OAOD,OAAQ,MAPrB,cAAc,OAOQ,SAClB,wBAdN,MAkBE,iBAAiB,aAlBnB,MAkBgC,MAC5B,aAAA,CACA,eAOJ,OACE,eAAA,CACA,WAAA,CACA,mBAGF,eACE,WAAA,CACA,eAMF,iBACE,eAAA,CACA,cAAA,CACA,eAGF,WACE,eAAc,SACZ,aAAA,CACA,iBACA,WAHF,eAAc,QAGX,QACC,kBAAA,CACA,aAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,aAAA,CACA,YAAa,cAZnB,WAeE,eACE,eAAA,CACA,WAIJ,KAAK,oBACH,OACE,oBAAA,CACA,iBAAA,CAEA,eAIJ,aACE,UACE,cAAA,CACA,eH3QJ,mBACC,iBAGD,QACC,WAAA,CACA,kCAAA,CACA,eAAA,CACA,WAAA,CACA,KAAA,CACA,UAAA,CACA,cAAA,CACA,YARD,OAaC,aACC,eAAA,CACA,WAAA,CACA,gBAAA,CAGA,wBAAA,CACA,WApBF,OAaC,YAUC,GACC,cAEA,OAbF,YAUC,EAGE,OACA,cA3BJ,OAaC,YAkBC,QACC,UAAA,CACA,oBAAA,CACA,cAAA,CACA,SAnCH,OAaC,YAkBC,OAMC,YACC,aAAA,CACA,UAAA,CACA,WAAA,CACA,qDAzCJ,OAaC,YAgCC,kBACC,UAAA,CACA,oBAAA,CACA,QAAA,CACA,SAAA,CACA,iBAlDH,OAaC,YAgCC,iBAOC,QACC,eArDJ,OAaC,YA2CC,uBACC,UAAA,CACA,oBAAA,CAEA,QAAA,CACA,UA7DH,OAaC,YA2CC,sBAOC,aACC,wBAAA,CACA,WAAA,CACA,QAAA,CACA,eAAA,CACA,WAAA,CACA,kBAAA,CACA,cAAA,CACA,WAEA,OA5DH,YA2CC,sBAOC,YAUG,GAAK,GAAI,QACV,cAAA,CACA,aAAA,CACA,UAAA,CAEA,QAAS,IA9Ed,OAaC,YA2CC,sBAOC,YAmBC,QACC,eAnFL,OAaC,YA2CC,sBAOC,YAuBC,YACC,cAAA,CACA,cAxFL,OAaC,YAgFC,mBACC,YAMH,cACC,eAAA,CACA,mBAGD,QACC,UAAA,CACA,WAAA,CACA,gBAAA,CACA,wBAAA,CACA,4BAAA,CACA,+BAAA,CACA,WAAA,CAOA,iBAAA,CACA,eANA,OAAC,qBACA,QAAA,CACA,eAOF,OACC,WADD,OACY,WADZ,OACuB,WADvB,OACkC,WADlC,OAC6C,WAD7C,OACwD,WADxD,OACmE,WADnE,OAC8E,WAD9E,OACyF,WADzF,OACoG,YADpG,OACgH,YAC9G,iBAAA,CACA,iBAKF,SAEC,eAAA,CACA,kBAAA,CAOA,qBAAA,CAMA,iBAAA,CACA,aAGA,QAAC,YACA,mBAIF,YAEC,UAAA,CAGA,6BAAA,CACA,gCAND,WAQC,eACC,6BAAA,CACA,WAAA,CACA,iBAAA,CACA,WAAA,CACA,sBAbF,WAQC,cAOC,UACC,mDCpL2D,uBDoL3D,CACA,cAAA,CACA,UAAA,CACA,qBAnBH,WAQC,cAcC,YACC,aAAA,CAEA,WAAA,CACA,iBAAA,CACA,eAAA,CACA,kBAAA,CACA,sBAAA,CACA,cAAA,CACA,WAAA,CACA,mBAhCH,WAQC,cA2BC,aAEC,UAAA,CACA,WAAA,CACA,eAAA,CACA,eAAA,CAEA,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,iBAAA,CACA,gBAAA,CACA,cAAA,CACA,kBAAA,YAhDH,WAQC,cA2BC,YAeC,MACC,oBAAA,CACA,mDCxN0D,uBDwN1D,CACA,cAAA,CACA,gBAtDJ,WAQC,cAkDC,UACC,gBA3DH,WAQC,cAkDC,SAEC,GACC,eAAA,CACA,iBAEA,WAxDH,cAkDC,SAEC,EAIE,aACA,cAED,WA3DH,cAkDC,SAEC,EAOE,YACA,eApEL,WA0EC,cACC,6BAAA,CACA,8BAAA,CAEA,mBA9EF,WA0EC,aAMC,IACC,6BAEA,WATF,aAMC,GAGE,YACA,gCApFJ,WA0EC,aAcC,gBACC,yBAGD,WAlBD,aAkBE,cACA,wBAAA,CACA,iBAAA,CACA,sBAHD,WAlBD,aAkBE,aAKA,mBACC,aAAA,CACA,eAnGJ,WAwGC,UAEC,IACC,yBA3GH,WAwGC,UAEC,GAGC,WACC,cAAA,CACA,YAAA,CACA,UAAA,CACA,YAAA,CACA,eAAA,CACA,WAAA,CACA,WAAA,CACA,YArHJ,WAwGC,UAEC,GAGC,UAUC,KACC,kBAxHL,WAwGC,UAEC,GAGC,UAUC,IAEC,WACC,mDC9RwD,wBDuS9D,SACC,gBADD,QAGC,WACC,cAAA,CACA,wBAAA,CACA,cAAA,CACA,YAAA,CACA,UAAA,CAEA,gBAAA,CACA,eAAA,CACA,WAAA,CAEA,YAdF,QAGC,UAaC,KACC,kBAEA,QAhBF,UAaC,IAGE,OACA,yBApBJ,QAGC,UAaC,IAOC,SAEC,aAAA,CACA,eAAA,CACA,kBAAA,CACA,gBAAA,CACA,wBAAA,CACA,UAAA,CACA,mDCtU0D,wBDuS9D,QAGC,UAaC,IAkBC,UACC,aAAA,CACA,gBAAA,CACA,wBAAA,CACA,WAtCJ,QAGC,UAaC,IAwBC,YACC,aAAA,CACA,gBAAA,CACA,wBAAA,CACA,WAOJ,UACC,WAAA,CACA,iBAFD,SAIC,aACC,SAAA,CACA,QAAA,CACA,eAAA,CACA,6BARF,SAWC,aACC,WAZF,SAeC,MACC,gBAyNF,IACC,oBAAA,CACA,SAAA,CACA,cAHD,GAKC,WACC,wBAAA,CAEA,UAAA,CACA,kBATF,GAaC,UACC,UAAA,CACA,WAAA,CACA,iBAAA,CAEA,eAAA,CACA,sBAAA,CACA,kBAAA,CACA,kBAIA,GAZD,SAYE,aAEA,gBAAA,CACA,mBA5BH,GA+BC,YACC,oBAAA,CACA,UAAA,CACA,iBAAA,CACA,cAAA,CAEA,aAAA,CACA,eAAA,CACA,sBAAA,CACA,kBAAA,CACA,0BAAA,CACA,8BAAA,CAEA,4BAAA,CACA,iCAGD,GAAC,YACA,WACC,yBAFF,GAAC,YAIA,UACC,WAGF,GAAC,WACA,WACC,yBAFF,GAAC,WAIA,UACC,WAGF,GAAC,UACA,WACC,yBAFF,GAAC,UAIA,UACC,WAGF,GAAC,WACA,WACC,yBAFF,GAAC,WAIA,UACC,WAKH,cACC,wBAAA,CACA,WAAA,CACA,gBAAA,CAEA,cAAA,CAEA,aAAA,CACA,cAAA,CAEA,eAVD,aAYC,OACC,YAAA,CACA,eAGD,aAAC,MACA,OACC,qBAKH,kBACC,UAAA,CACA,eI7qBD,wBAKE,aAGF,gBAAiB,cAAe,aAE9B,0BAAA,YAGF,aACE,0BAAA,CACA,4BAGF,gBACE,6BAAA,CACA,+BAGF,eAAgB,cAAe,gBAU7B,SAAA,YACA,SAAA,YACA,OAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,aAAA,YACA,kBAGF,cAAc,QAAS,cAAe,eAAc,QAClD,QAAS,OAAT,YACA,uBAAA,YACA,aAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,UAAA,YACA,aAAA,YACA,iBAAA,YACA,iBAAA,YACA,OAAA,YACA,KAAA,YAcF,eACE,cAAA,YACA,gBAAA,YACA,kBAAA,YACA,eAAA,YACA,UAAA,YACA,gBAAA,YAQF,cAEE,UAAA,YACA,cAAA,YACA,oBAAA,YAGF,cACE,iBADF,cACmB,eADnB,cACkC,cAC9B,8BAAA,YAFJ,cAKE,gBACE,UAAA,YANJ,cASE,eACE,UAAA,YAVJ,cAaE,gBACE,SAAA,YACA,SAAA,YACA,OAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,aAAA,YACA,iBAAA,CACA,kBAAA,YAIJ,gBACE,iBADF,gBACmB,eADnB,gBACkC,cAE9B,4BAAA,YAHJ,gBAME,gBAEE,UAAA,YARJ,gBAWE,eAEE,UAAA,YAbJ,gBAgBE,gBACE,kBAAA,YAKJ,wBACE,WAAA,CAEA","file":"single.css","sourceRoot":"..\\less"} \ No newline at end of file diff --git a/server/www/teleport/static/css/style.css b/server/www/teleport/static/css/style.css new file mode 100644 index 0000000..dfa03c4 --- /dev/null +++ b/server/www/teleport/static/css/style.css @@ -0,0 +1 @@ +@charset "utf-8";html{font-size:13px}body{font-family:-apple-system,system-ui,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"PingFang SC","Hiragino Sans GB","Microsoft YaHei",sans-serif;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-state{text-align:center;white-space:nowrap}.remote-action-group ul li.remote-action-state.state-disabled{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.remote-action-group ul li.remote-action-state.state-disabled>i.fa{color:#b53a2f}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li.remote-action-btn:first-child{border:none}.remote-action-group ul li.remote-action-btn:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li.remote-action-btn:last-child{border:none}.remote-action-group ul li.remote-action-btn:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-info-group{margin-bottom:3px;height:28px}.remote-info-group ul{display:inline-block;height:28px;margin:0;padding:0}.remote-info-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-info-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-info-group ul li.remote-action-input{background:none;padding:4px 0}.remote-info-group ul li.remote-action-input select{border:none}.remote-info-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-info-group ul li select{margin-top:-3px}.remote-info-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-info-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat}.disable-bg{position:absolute;background:url(../img/css/disable-bg.png) repeat;opacity:.45;z-index:990}.disable-message{display:inline-block;font-size:160%;position:absolute;padding:20px 40px;border:1px solid #2b0002;background-color:#65181a;color:#fff;opacity:.85;z-index:991}.table.table-role .header{font-size:120%;margin-top:8px}.table.table-role td{text-align:left;vertical-align:top;padding:0}.table.table-role td.role-name{min-width:160px}.table.table-role td.role-name ul{list-style:none;margin:0;padding:0}.table.table-role td.role-name ul li{padding:8px 30px 8px 6px;white-space:nowrap}.table.table-role td.role-name ul li:first-child{margin-top:10px}.table.table-role td.role-name ul li.active{color:#fff;background-color:#4091dd}.table.table-role td.role-name ul li.active:hover{background-color:#4091dd;cursor:auto}.table.table-role td.role-name ul li:hover{background-color:#81b6e9;cursor:pointer}.table.table-role td.role-privilege{padding:0 0 10px 10px;border-left:3px solid #4091dd}.table.table-role td.role-privilege hr{margin:8px 0;border-bottom:1px solid rgba(255,255,255,0.3)}.table.table-role td.role-privilege .title{font-size:110%;margin:5px 0}.table.table-role td.role-privilege ul{list-style:none;margin:0;padding:0}.table.table-role td.role-privilege ul li{display:inline-block;width:180px;margin-bottom:5px;margin-left:10px}.table.table-role td.role-privilege ul li span{color:#8e8e8e}.table.table-role td.role-privilege ul li span:before{display:inline-block;width:16px;content:"\f096";font-family:'FontAwesome'}.table.table-role td.role-privilege ul li span.enabled{color:#3374b0}.table.table-role td.role-privilege ul li span.enabled:before{content:"\f046";font-family:'FontAwesome'}.table.table-role td.role-privilege.editable li span{cursor:pointer}.table.table-role tr:first-child td{border-top:none}#btn-create-role{margin-top:15px}.table{margin-bottom:10px}.table>thead>tr>th{vertical-align:middle;border-bottom:2px solid #ddd}.table.table-info-list{width:auto}.table.table-info-list td{border-top:1px dotted #ddd;padding:5px 10px;vertical-align:top}.table.table-info-list td.key{text-align:right;width:1px;white-space:nowrap}.table.table-info-list td.value{text-align:left;color:#767676}.table.table-info-list td .error{color:#cc3632}.table.table-info-list.table-info-list-lite{width:100%}.table.table-info-list.table-info-list-lite td{padding:5px 5px}.table.table-info-list.table-info-list-lite td.value{font-weight:normal}.table.table-info-list tr:first-child td{border-top:none}.table.table-config-list{width:100%}.table.table-config-list td{border:none;padding:5px}.table.table-config-list td.title{text-align:left;font-size:110%;font-weight:bolder}.table.table-config-list td.key{width:1px;white-space:nowrap;text-align:right;padding-right:15px}.table.table-config-list td.value{text-align:left}.table.table-config-list td.value input{width:4em;padding:0 5px;text-align:right}.table.table-config-list td.value .unit{margin-left:5px}.table.table-config-list td.value .desc{color:#999;margin-left:15px;display:inline-block}.table>thead>tr>th{padding:5px 5px;outline:none;white-space:nowrap;font-weight:normal;text-align:center;background-color:#ededed}.table>tbody>tr>td{padding:5px;text-align:center;vertical-align:middle}.table>tbody>tr>td .nowrap{white-space:nowrap}.table-data td.loading{text-align:left;padding:20px}.table-data .btn-group.open .dropdown-toggle{-webkit-box-shadow:none;box-shadow:none}.table.table-data thead .sorting,.table.table-data thead .sorting_asc,.table.table-data thead .sorting_desc{cursor:pointer}.table.table-data thead .sorting:after,.table.table-data thead .sorting_asc:after,.table.table-data thead .sorting_desc:after{bottom:4px;padding-left:5px;display:inline-block;font-family:'FontAwesome';opacity:.8}.table.table-data thead .sorting:after{opacity:.2;content:"\f0dc"}.table.table-data thead .sorting_asc:after{content:"\f0de"}.table.table-data thead .sorting_desc:after{content:"\f0dd"}.host-name{font-size:16px}.host-name-desc{cursor:pointer}.host-ip{font-size:12px;color:#999;display:inline-block;white-space:nowrap;font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace;overflow:hidden;text-overflow:ellipsis}.td-ip-list{padding-right:20px;padding-left:5px}.td-ip-show-more{font-size:14px;width:12px;float:right;display:block}.td-ip-item{min-width:12em;width:12em;height:18px;padding:2px 4px;margin:1px 0;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.td-ip-item span{display:inline-block;font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.td-ip-item a{display:inline-block;width:14px;float:right;font-size:14px}.btn-group-sm>.btn{padding:3px 5px;font-size:12px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{padding:3px 6px;border-radius:10px;font-size:13px;font-weight:400;background-color:#999}.badge.badge-sm{font-size:12px;padding:3px 5px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-8px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.btn-success .badge{color:#fff}.label{display:inline-block;padding:5px 10px;margin:2px;font-size:13px;font-weight:400;background-color:#999}.label.label-sm{font-size:12px;padding:3px 8px 4px 8px;margin-top:0;border-radius:3px}.label.label-ignore{background-color:#e5e5e5;color:#aaa;text-shadow:-1px -1px 1px #fff}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.modal .modal-content{border-radius:0}.modal .modal-header .close{margin-top:-4px;margin-right:-6px}.modal .modal-header .close:hover{color:#9c3023;opacity:1}.modal .modal-header .close:active,.modal .modal-header .close:focus,.modal .modal-header .close:visited{-webkit-appearance:none}.modal .form-horizontal .form-group,.modal .row{margin-left:0;margin-right:0}.alert{border-radius:0;padding:5px;margin-bottom:10px}.dropdown-menu{min-width:0;font-size:13px}.form-control-sm{padding:3px 5px;font-size:13px;height:inherit}.form-group .control-label.require{color:#505050;font-weight:bold}.form-group .control-label.require:before{font-weight:normal;color:#ac4e43;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f069";font-size:8px;font-family:'FontAwesome'}.form-group .control-desc{padding-top:6px;color:#999}label.form-control-static input{display:inline-block;position:relative;margin-top:4px}.control-desc .popover{max-width:none;font-size:13px}body.page-header-fixed{padding-top:48px}body.page-sidebar-fixed .page-sidebar{position:fixed}body.page-sidebar-fixed .page-content{margin-left:180px}.page-header{border:none;box-shadow:0 0 3px rgba(0,0,0,0.5)}.page-header.navbar{overflow:hidden;min-height:48px;height:48px;margin:0}.page-header.navbar .brand{display:inline-block;float:left;width:180px;height:48px;padding:12px 0 0;text-align:center;margin:0 auto;background-color:#3a3a3a}.page-header.navbar .brand .logo{display:inline-block;width:93px;height:30px;background:url(../img/site-logo-small.png) no-repeat}.page-header.navbar .breadcrumb-container{display:inline-block;padding-top:6px}.page-header.navbar .breadcrumb-container .breadcrumb{background-color:transparent;padding-left:20px;font-size:16px}.page-header.navbar .breadcrumb-container .breadcrumb>li+li:before{content:"\f105";font-family:'FontAwesome'}.page-header .container-fluid{padding-left:0}.page-sidebar{top:48px;bottom:0;left:0;width:180px;padding-top:0;z-index:1010;background-color:#3a3a3a}.page-sidebar .nav-menu>li>a{padding:8px 0 8px 20px;line-height:24px;font-size:13px;color:#c2c2c2;border-left:5px solid #3a3a3a}.page-sidebar .nav-menu>li>a:focus{background-color:#3a3a3a;border-left:5px solid #3a3a3a}.page-sidebar .nav-menu>li>a:hover{background-color:#2d2d2d;border-left:5px solid #005c74}.page-sidebar .nav-menu>li>a.selected{border-left:5px solid #00485b}.page-sidebar .nav-menu>li>a.selected:focus{border-left:5px solid #00485b}.page-sidebar .nav-menu>li>a.active{color:#fff;background-color:#0084a7;border-left:5px solid #0084a7}.page-sidebar .nav-menu>li>a.active:hover{border-left:5px solid #00acda}.page-sidebar .nav-menu>li>a.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:relative;top:1px;display:inline-block;font-style:normal;font-weight:400;float:right;color:#e9e9e9;font-size:20px;line-height:24px;margin-right:-6px}.page-sidebar .nav-menu li .menu-caret:after{display:inline-block;width:12px;height:12px;margin-left:5px;top:1px;position:relative;border:none;font-family:'FontAwesome';font-style:normal}.page-sidebar .nav-menu li .menu-caret:after{content:'\f0da'}.page-sidebar .nav-menu li.expand .menu-caret:after{content:'\f0d7'}.page-sidebar .nav-menu>li>a>i.icon{float:left;margin-top:1px;margin-right:15px;text-align:center;line-height:24px;font-size:14px}.page-sidebar .sub-menu{padding:0;margin:0;background-color:#292929;position:relative;list-style-type:none;border-top:1px solid #202020;border-bottom:1px solid #464646}.page-sidebar .sub-menu>li>a{padding:8px 0 8px 40px;line-height:20px;font-size:13px;display:block;position:relative;color:#889097;border-left:5px solid #292929}.page-sidebar .sub-menu>li>a:before{display:inline-block;padding-right:8px;line-height:20px;content:"\f105";font-family:'FontAwesome'}.page-sidebar .sub-menu>li>a:hover{color:#fff;border-left:5px solid #005c74}.page-sidebar .sub-menu>li>a.active{color:#fff;background-color:#0084a7;border-left:5px solid #0084a7}.page-sidebar .sub-menu>li>a.active:hover{border-left:5px solid #00acda}.page-sidebar .sub-menu>li>a.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:relative;top:-1px;display:inline-block;font-style:normal;font-weight:400;float:right;color:#e9e9e9;font-size:18px;line-height:24px;margin-right:-6px}.page-sidebar .nav-menu>li.profile{padding:10px 10px;color:#ccc;background-color:#333;border-bottom:1px solid #464646}.page-sidebar .nav-menu>li.profile a.title{color:#ccc}.page-sidebar .nav-menu>li.profile a.title:hover{color:#fff;background-color:transparent}.page-sidebar .nav-menu>li.profile a.title:focus{background-color:transparent}.page-sidebar .nav-menu>li.profile .image{float:left;margin-top:3px;font-size:24px;color:#69f;width:36px;height:36px;border-radius:6px;background-color:#eee;text-align:center;margin-right:10px;overflow:hidden}.page-sidebar .nav-menu>li.profile .image img{margin-top:-3px}.page-sidebar .nav-menu>li.profile .name{display:block;padding-top:3px;font-size:14px}.page-sidebar .nav-menu>li.profile .role{display:block;font-size:12px;color:#999}.page-sidebar .nav-menu>li.profile.active{background-color:#0084a7;color:#fff}.page-sidebar .nav-menu>li.profile.active .name,.page-sidebar .nav-menu>li.profile.active .role{color:#fff}.page-sidebar .nav-menu>li.profile.active:after{content:"\e251";font-family:'Glyphicons Halflings';position:absolute;top:0;right:-1px;display:inline-block;font-style:normal;font-weight:400;color:#e9e9e9;font-size:24px;line-height:61px;margin-right:-6px}.page-sidebar .nav-menu>li.profile .dropdown-menu{min-width:0;font-size:13px}.page-sidebar .nav-menu>li.profile .dropdown-menu>li>a{padding-right:30px}.page-sidebar .nav-menu>li.profile .dropdown-menu>li>a:hover{background-color:#ccc}.page-sidebar .nav-menu>li.profile .dropdown-menu .divider{margin:5px 0}.page-sidebar .badge{margin-top:-10px;margin-left:5px}.page-content-inner{padding:15px}.box{border:none;box-shadow:1px 1px 2px rgba(0,0,0,0.2);background-color:#fff;padding:15px;margin-bottom:15px}.box .nav-tabs{font-size:14px;font-weight:bold}.box .nav-tabs>li:first-child{margin-left:30px}.box .tab-content>.tab-pane{background-color:#fff;padding:20px;border:1px solid #ddd;border-top:none;border-bottom-left-radius:3px;border-bottom-right-radius:3px}.box.box-nav-tabs{padding:0;background-color:#f5f5f5}.box.box-nav-tabs .nav-tabs{font-size:inherit;font-weight:inherit}.box.box-nav-tabs .nav-tabs>li{padding-top:8px}.box.box-nav-tabs .nav-tabs>li>a{margin-right:5px;border-top-left-radius:3px;border-top-right-radius:3px;padding:3px 15px;background-color:#e6e6e6;border:1px solid #ddd;border-bottom-color:transparent}.box.box-nav-tabs .nav-tabs>li.active>a{font-weight:bold;border:1px solid #ddd;border-bottom-color:transparent;background-color:#fff}.box.box-nav-tabs .tab-content>.tab-pane{padding:20px;border:none}.box-btn-bar{line-height:30px}.box-btn-bar a.btn{margin-right:20px}.page-filter{height:36px;line-height:36px;margin-bottom:10px}.page-filter .form-control{margin-top:5px;margin-right:4px}.form-group .input-group{margin-bottom:5px}.op_box{display:block;padding:5px;border-radius:3px;text-align:center;margin-top:5px}.op_error{background:#fbb}.op_wait{background:#ccc}.more-action{position:absolute !important}.more-action .dropdown-menu{background-color:rgba(60,60,60,0.9);color:#fff;font-size:13px}.more-action .dropdown-menu.dropdown-menu-left{margin-left:-120px}.more-action .dropdown-menu>li>a{padding:5px 20px;color:#fff}.more-action .dropdown-menu>li>a:hover,.more-action .dropdown-menu>li>a:active,.more-action .dropdown-menu>li>a:visited{background-color:#0084a7}.more-action .dropdown-menu .divider{margin:5px 0;background-color:#666}.popover-inline-edit input,.popover-inline-edit .btn{height:30px}.popover-inline-edit .popover-title{background-color:#ddd}.popover-inline-edit .popover-content{padding:20px 10px}.popover-inline-edit .popover{padding:0;max-width:500px}.popover-inline-edit .popover .popover-content{padding:10px 10px 20px 10px}.popover-inline-edit .popover.bottom>.arrow:after{top:1px;margin-left:-10px;content:" ";border-top-width:0;border-bottom-color:#ddd}.tp-table-filter{width:100%;height:25px;margin:0;padding:0}.tp-table-filter .tp-table-filter-inner{display:table;margin:0;padding:0}.tp-table-filter.tp-table-filter-input .tp-table-filter-inner{width:100%}.tp-table-filter.tp-table-filter-left .tp-table-filter-inner,.tp-table-filter.tp-table-filter-right .tp-table-filter-inner{width:auto}.tp-table-filter.tp-table-filter-center .tp-table-filter-inner{margin:0 auto}.tp-table-filter .search-title{display:table-cell;vertical-align:middle;height:25px;line-height:25px;color:#555;white-space:nowrap;width:1px;text-align:left}.tp-table-filter .search-input,.tp-table-filter .search-select{display:table-cell;text-align:left;height:25px}.tp-table-filter .search-input>.btn,.tp-table-filter .search-select>.btn{font-size:13px;background-color:transparent;border:none;padding:0 0 0 8px;border-radius:0}.tp-table-filter .search-input>.btn:active,.tp-table-filter .search-select>.btn:active{box-shadow:none}.tp-table-filter .search-input .dropdown-menu,.tp-table-filter .search-select .dropdown-menu{min-width:0;background-color:rgba(0,0,0,0.8);color:#fff;font-size:13px;box-shadow:0 3px 6px rgba(0,0,0,0.3);border:none}.tp-table-filter .search-input .dropdown-menu>li>a,.tp-table-filter .search-select .dropdown-menu>li>a{padding-right:30px;min-width:100px;color:#fff}.tp-table-filter .search-input .dropdown-menu>li>a:hover,.tp-table-filter .search-select .dropdown-menu>li>a:hover,.tp-table-filter .search-input .dropdown-menu>li>a:active,.tp-table-filter .search-select .dropdown-menu>li>a:active,.tp-table-filter .search-input .dropdown-menu>li>a:visited,.tp-table-filter .search-select .dropdown-menu>li>a:visited{background-color:rgba(255,255,255,0.2)}.tp-table-filter .search-input .dropdown-menu .divider,.tp-table-filter .search-select .dropdown-menu .divider{margin:5px 0;background-color:#666}.tp-table-filter .search-input{width:100%;padding-left:10px}.tp-table-filter .search-input>.input-group .input-group-addon{font-size:13px;padding:0 5px}.tp-table-filter .search-input>.input-group input.form-control{font-size:13px;height:25px;width:100%;padding:0 5px}.btn.btn-no-border{font-size:13px;background-color:transparent;border:none;padding:0 0 0 8px;border-radius:0}.btn.btn-no-border:active{box-shadow:none}.table>tbody>tr.table-footer-action{background-color:transparent}.table>tbody>tr.table-footer-action>td{border-color:transparent;padding-top:15px;vertical-align:middle}.table-extend-area,.table-prefix-area{display:table;width:100%}.table-extend-area .table-extend-cell,.table-prefix-area .table-extend-cell{display:table-cell;height:24px;line-height:24px;padding:3px}.table-extend-area .table-extend-cell.table-extend-cell-right,.table-prefix-area .table-extend-cell.table-extend-cell-right{text-align:right}.table-extend-area .table-extend-cell .table-name,.table-prefix-area .table-extend-cell .table-name{font-size:120%;position:relative;top:3px;padding-right:10px}.table-extend-area .table-extend-cell .pagination,.table-prefix-area .table-extend-cell .pagination{margin:0 0}.table-extend-area .table-extend-cell .pagination>li>span,.table-prefix-area .table-extend-cell .pagination>li>span,.table-extend-area .table-extend-cell .pagination a,.table-prefix-area .table-extend-cell .pagination a{padding:3px 8px;font-size:12px;line-height:1.5}.table-extend-area .table-extend-cell.checkbox-select-all,.table-prefix-area .table-extend-cell.checkbox-select-all{width:36px;text-align:center}.table-extend-area .table-extend-cell.checkbox-select-all input,.table-prefix-area .table-extend-cell.checkbox-select-all input{position:relative;top:3px}.table-extend-area .table-extend-cell.group-actions>.btn-group .btn,.table-prefix-area .table-extend-cell.group-actions>.btn-group .btn{padding:3px 5px;font-size:12px}.table-extend-area .table-extend-cell.table-item-counter,.table-prefix-area .table-extend-cell.table-item-counter{text-align:right}.table-extend-area .table-extend-cell.table-item-counter ol,.table-prefix-area .table-extend-cell.table-item-counter ol{list-style:none;padding:0;margin:0;border:none;background-color:transparent;color:#666}.table-extend-area .table-extend-cell.table-item-counter ol>li,.table-prefix-area .table-extend-cell.table-item-counter ol>li{display:inline-block;padding-left:10px}.table-extend-area .table-extend-cell.table-item-counter .btn,.table-prefix-area .table-extend-cell.table-item-counter .btn{margin-top:-3px}.table-extend-area .table-extend-cell.table-item-counter .btn .caret,.table-prefix-area .table-extend-cell.table-item-counter .btn .caret{margin-top:-3px}.table-prefix-area{margin-bottom:10px}.table-prefix-area .table-extend-cell{padding:0}.btn-group.pagination .btn.dropdown-toggle{background-color:transparent;border:1px solid #999}textarea.textarea-resize-y{resize:vertical}textarea.textarea-resize-none{resize:none}textarea.textarea-code{font-family:Monaco,Lucida Console,Consolas,Courier,'Courier New',monospace}textarea.cert_pub{width:100%;height:64px;border:1px solid #e2e2e2;background-color:#e4ffe5}.box ul.help-list{list-style:none;margin:0 0 10px 0;padding:0}.box ul.help-list li{padding:3px 3px 3px 20px;margin-left:10px}.box ul.help-list li:before{color:#999;position:absolute;margin-left:-1.2em;margin-top:1px;content:"\f05a";font-family:'FontAwesome'}.box ul.help-list li em{color:#3374b0;font-style:normal}.tp-checkbox{display:inline-block;color:#6487ad}.tp-checkbox.tp-disabled{cursor:not-allowed;color:#c2c2c2}.tp-checkbox:before{display:inline-block;position:relative;top:1px;width:16px;content:"\f096";font-family:'FontAwesome'}.tp-checkbox.tp-editable{cursor:pointer}.tp-checkbox.tp-editable:hover{color:#459dee}.tp-checkbox.tp-selected,.tp-checkbox.tp-checked{color:#3374b0}.tp-checkbox.tp-selected:before,.tp-checkbox.tp-checked:before{content:"\f046";font-family:'FontAwesome'}i.upload-button{font-size:128px;color:#b1b1b1}i.upload-button:hover{cursor:pointer;color:#709cff}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important}#gritter-notice-wrapper{width:320px;max-width:480px}/*# sourceMappingURL=style.css.map */ \ No newline at end of file diff --git a/server/www/teleport/static/css/style.css.map b/server/www/teleport/static/css/style.css.map new file mode 100644 index 0000000..58f4652 --- /dev/null +++ b/server/www/teleport/static/css/style.css.map @@ -0,0 +1 @@ +{"version":3,"sources":["style.less","_base.less","_role.less","_table.less","_overwrite_bootstrap.less","_overwrite_gritter.less"],"names":[],"mappings":"AAAA,SAAS,QCeT,KACE,eAGF,KACE,uDAViE,kBAAoB,uBAAyB,cAAe,mBAAoB,4BAUjJ,CAEA,wBAAA,CACA,WAGF,KAAM,KACJ,YAGF,gBACE,iBAGF,EACE,qBAGF,CAAC,MACC,qBAGF,CAAC,OACC,qBAGF,CAAC,QACC,qBAGF,CAAC,SACC,qBAGF,OACE,aAGF,MACE,mBAGF,aACE,WAGF,QACE,eAGF,aACE,cAAA,CACA,WAGF,MACE,mDAjE4D,wBAoE9D,EAAE,OACA,cAAA,CACA,kBAOF,iBACE,mBAEE,gBADF,KACG,aACC,0BAAA,CACA,8BAEF,gBALF,KAKG,YACC,2BAAA,CACA,+BAKN,qBACE,iBAAA,CACA,YAFF,oBAKE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UATJ,oBAKE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,oBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,oBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,oBAxBJ,GAME,GAkBG,oBAIC,QACE,YAIJ,oBAjCJ,GAME,GA2BG,4BACC,WAGF,oBArCJ,GAME,GA+BG,qBAEC,iBAAA,CACA,mBAEA,oBA1CN,GAME,GA+BG,oBAKE,gBACC,wBAAA,CACA,UAAA,CACA,+BAEA,oBA/CR,GAME,GA+BG,oBAKE,eAKG,EAAG,IACH,cAKN,oBArDJ,GAME,GA+CG,wBAAyB,oBArD9B,GAME,GA+C6B,oBAAqB,oBArDpD,GAME,GA+CmD,wBAC/C,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,eAAA,CACA,uBAEF,oBA5DJ,GAME,GAsDG,wBACC,aAAA,CACA,WAEF,oBAhEJ,GAME,GA0DG,oBAAqB,oBAhE1B,GAME,GA0DyB,wBAAyB,oBAhEpD,GAME,GA0DmD,4BAC/C,WAEF,oBAnEJ,GAME,GA6DG,oBAAqB,oBAnE1B,GAME,GA6DyB,4BACrB,iBAGF,oBAvEJ,GAME,GAiEG,wBAAyB,oBAvE9B,GAME,GAiE6B,sBAAuB,oBAvEtD,GAME,GAiEqD,sBACjD,iBAAA,CACA,eAAA,CACA,WAEF,oBA5EJ,GAME,GAsEG,wBACC,wBAAA,CACA,WAEF,oBAhFJ,GAME,GA0EG,sBACC,wBAAA,CACA,WAEF,oBApFJ,GAME,GA8EG,sBACC,wBAAA,CACA,WA3FR,oBAKE,GAME,GAmFE,MACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,gBAnGR,oBAKE,GAME,GA2FE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3GR,oBAKE,GAME,GAkGE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAhHR,oBAKE,GAME,GAuGE,QACE,gBAGF,oBAjHJ,GAME,GA2GG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,oBAvHJ,GAME,GAiHG,kBAAkB,aACjB,YADF,oBAvHJ,GAME,GAiHG,kBAAkB,YAEjB,MACE,0BAAA,CACA,8BAIJ,oBA/HJ,GAME,GAyHG,YACC,2BAAA,CACA,+BAGF,oBApIJ,GAME,GA8HG,kBAAkB,YACjB,YADF,oBApIJ,GAME,GA8HG,kBAAkB,WAEjB,MACE,2BAAA,CACA,+BAQV,mBACE,iBAAA,CACA,YAFF,kBAIE,IACE,oBAAA,CACA,WAAA,CACA,QAAA,CACA,UARJ,kBAIE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAGA,yBAAA,CACA,2BAAA,CACA,6BAEA,kBAlBJ,GAME,GAYG,mBACC,eAAA,CACA,SAAA,CACA,YAGF,kBAxBJ,GAME,GAkBG,qBACC,eAAA,CACA,cAFF,kBAxBJ,GAME,GAkBG,oBAIC,QACE,YAjCV,kBAIE,GAME,GA2BE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA1CR,kBAIE,GAME,GAkCE,QACE,gBAGF,kBA5CJ,GAME,GAsCG,aACC,0BAAA,CACA,0BAAA,CACA,8BAGF,kBAlDJ,GAME,GA4CG,YACC,2BAAA,CACA,+BAMR,EAAE,OACA,aAGF,oBACE,kBADF,mBAGE,IACE,aAAA,CACA,WAAA,CACA,QAAA,CACA,UAPJ,mBAGE,GAME,IACE,UAAA,CACA,iBAAA,CACA,aAAA,CACA,WAAA,CACA,eAAA,CAEA,qBAAA,CACA,yBAAA,CACA,2BAAA,CACA,6BAEA,mBAlBJ,GAME,GAYG,WACC,YASF,mBA5BJ,GAME,GAsBG,UACC,eAAA,CACA,SAAA,CACA,YAGF,mBAlCJ,GAME,GA4BG,YACC,eAAA,CACA,WAAA,CACA,UAxCR,mBAGE,GAME,GAkCE,eACE,eAAA,CACA,QAAA,CACA,eAAA,CACA,cAAA,CACA,WAAA,CACA,eAAA,CACA,gBAAA,CACA,YAnDR,mBAGE,GAME,GA6CE,OACE,SAAA,CACA,aAAA,CACA,UAAA,CACA,cAAA,CACA,eA3DR,mBAGE,GAME,GAoDE,MAAK,gBACH,aAAA,CACA,UAAA,CACA,mBAMF,mBAnEJ,GAME,GA6DG,aACC,0BAAA,CACA,0BAAA,CACA,8BAHF,mBAnEJ,GAME,GA6DG,YAKC,MALF,mBAnEJ,GAME,GA6DG,YAKO,eACJ,0BAAA,CACA,8BAIJ,mBA9EJ,GAME,GAwEG,YACC,2BAAA,CACA,+BAFF,mBA9EJ,GAME,GAwEG,WAIC,MAJF,mBA9EJ,GAME,GAwEG,WAIO,eACJ,2BAAA,CACA,+BAUV,gBAAgB,OACd,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,UAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAAA,CACA,iBAAA,CACA,oBAAA,CACA,YAAa,cAGf,cAAc,OACZ,aAAA,CACA,QAAS,OAAT,CACA,cAAA,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,cAGf,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAAQ,eAAe,OAClF,QAAS,GAAT,CACA,UAAA,CACA,WAAA,CACA,gBAAA,CACA,qBAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,eAAe,OACb,0DAGF,YACE,iBAAA,CACA,gDAAA,CACA,WAAA,CACA,YAGF,iBACE,oBAAA,CAGA,cAAA,CACA,iBAAA,CACA,iBAAA,CAMA,wBAAA,CAEA,wBAAA,CACA,UAAA,CACA,WAAA,CAEA,YC7eF,MAAM,WACJ,SACE,cAAA,CACA,eAHJ,MAAM,WAKJ,IACE,eAAA,CACA,kBAAA,CACA,UACA,MATE,WAKJ,GAIG,WACC,gBADF,MATE,WAKJ,GAIG,UAIC,IACE,eAAA,CACA,QAAA,CACA,UAPJ,MATE,WAKJ,GAIG,UAIC,GAIE,IACE,wBAAA,CACA,mBAKA,MAxBJ,WAKJ,GAIG,UAIC,GAIE,GAOG,aACC,gBAEF,MA3BJ,WAKJ,GAIG,UAIC,GAIE,GAUG,QAGC,UAAA,CAGA,yBAEA,MAnCN,WAKJ,GAIG,UAIC,GAIE,GAUG,OAQE,OACC,wBAAA,CACA,YAGJ,MAxCJ,WAKJ,GAIG,UAIC,GAIE,GAuBG,OACC,wBAAA,CAEA,eAKR,MAhDE,WAKJ,GA2CG,gBACC,qBAAA,CAGA,8BAJF,MAhDE,WAKJ,GA2CG,eAKC,IACE,YAAA,CAGA,8CATJ,MAhDE,WAKJ,GA2CG,eAYC,QACE,cAAA,CACA,aAdJ,MAhDE,WAKJ,GA2CG,eAiBC,IACE,eAAA,CACA,QAAA,CACA,UApBJ,MAhDE,WAKJ,GA2CG,eAiBC,GAIE,IACE,oBAAA,CACA,WAAA,CACA,iBAAA,CACA,iBAzBN,MAhDE,WAKJ,GA2CG,eAiBC,GAIE,GAME,MAEE,cACA,MA9EN,WAKJ,GA2CG,eAiBC,GAIE,GAME,KAGG,QACC,oBAAA,CACA,UAAA,CACA,QAAS,OAAT,CACA,YAAa,cAEf,MApFN,WAKJ,GA2CG,eAiBC,GAIE,GAME,KASG,SACC,cACA,MAtFR,WAKJ,GA2CG,eAiBC,GAIE,GAME,KASG,QAEE,QACC,QAAS,OAAT,CACA,YAAa,cAOvB,MA/FA,WAKJ,GA2CG,eA+CE,SACC,GAAG,MACD,eAjGV,MAAM,WAuGJ,GAAE,YAAa,IACb,gBAOJ,iBACE,gBClHF,OACE,mBAGA,MAAE,MAAQ,GAAK,IACb,qBAAA,CACA,6BAGF,MAAC,iBACC,WADF,MAAC,gBAIC,IACE,0BAAA,CACA,gBAAA,CACA,mBACA,MARH,gBAIC,GAIG,KACC,gBAAA,CACA,SAAA,CACA,mBAEF,MAbH,gBAIC,GASG,OACC,eAAA,CAGA,cAjBN,MAAC,gBAIC,GAeE,QACE,cAIJ,MAxBD,gBAwBE,sBACC,WADF,MAxBD,gBAwBE,qBAEC,IACE,gBACA,MA5BL,gBAwBE,qBAEC,GAEG,OACC,mBA7BR,MAAC,gBAkCC,GAAE,YACA,IACE,gBAKN,MAAC,mBACC,WADF,MAAC,kBAGC,IACE,WAAA,CACA,YAGA,MARH,kBAGC,GAKG,OACC,eAAA,CACA,cAAA,CACA,mBAGF,MAdH,kBAGC,GAWG,KACC,SAAA,CACA,kBAAA,CACA,gBAAA,CACA,mBAEF,MApBH,kBAGC,GAiBG,OACC,gBADF,MApBH,kBAGC,GAiBG,MAGC,OACE,SAAA,CACA,aAAA,CACA,iBANJ,MApBH,kBAGC,GAiBG,MASC,OAEE,gBAXJ,MApBH,kBAGC,GAiBG,MAcC,OACE,UAAA,CACA,gBAAA,CACA,qBAOV,MAAO,MAAQ,GAAK,IAClB,eAAA,CACA,YAAA,CACA,kBAAA,CACA,kBAAA,CACA,iBAAA,CACA,yBAGF,MAAO,MAAQ,GAAK,IAClB,WAAA,CACA,iBAAA,CACA,sBAGF,MAAO,MAAQ,GAAK,GAAK,SACvB,mBAGF,WACE,GAAE,SACA,eAAA,CACA,aAHJ,WAME,WAAU,KAAM,kBACd,uBAAA,CACA,gBAIJ,MAAM,WAAY,MAAM,UAAU,MAAM,WAAY,MAAM,cAAc,MAAM,WAAY,MAAM,eAC9F,eAWF,MAAM,WAAY,MAAM,SAAQ,OAAQ,MAAM,WAAY,MAAM,aAAY,OAAQ,MAAM,WAAY,MAAM,cAAa,OACvH,UAAA,CACA,gBAAA,CACA,oBAAA,CACA,YAAa,aAAb,CACA,WAeF,MAAM,WAAY,MAAM,SAAQ,OAC9B,UAAA,CACA,QAAS,QAGX,MAAM,WAAY,MAAM,aAAY,OAClC,QAAS,QAGX,MAAM,WAAY,MAAM,cAAa,OACnC,QAAS,QAGX,WAGE,eAaF,gBAGE,eAGF,SACE,cAAA,CACA,UAAA,CACA,oBAAA,CACA,kBAAA,CAIA,mDFhM4D,uBEgM5D,CACA,eAAA,CACA,uBAWF,YACE,kBAAA,CACA,iBAGF,iBACE,cAAA,CACA,UAAA,CACA,WAAA,CACA,cAGF,YACE,cAAA,CACA,UAAA,CACA,WAAA,CACA,eAAA,CACA,YAAA,CAEA,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,iBAAA,CACA,gBAAA,CACA,cAAA,CACA,kBAAA,YAGF,WAAY,MACV,oBAAA,CACA,mDF3O4D,uBE2O5D,CACA,cAAA,CACA,gBAIF,WAAY,GACV,oBAAA,CACA,UAAA,CACA,WAAA,CACA,eC3PF,aAAc,MACZ,eAAA,CAEA,eAyBF,mBACE,cAAA,CACA,KAAA,CACA,OAAA,CACA,QAAA,CACA,MAAA,CACA,aAIF,YACE,kBAKF,OACE,eAAA,CACA,kBAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,eAAA,CACA,YAAA,CACA,iBAAA,CACA,iBAGF,MAAC,WACC,gBAAA,CACA,iBAGF,MAAC,cACC,wBAAA,CACA,WAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAIJ,YAAa,QACX,WAGF,OACE,oBAAA,CACA,gBAAA,CACA,UAAA,CACA,cAAA,CACA,eAAA,CACA,sBAEA,MAAC,UACC,cAAA,CACA,uBAAA,CACA,YAAA,CACA,kBAGF,MAAC,cACC,wBAAA,CACA,UAAA,CACA,+BAEF,MAAC,YACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,eACC,yBAEF,MAAC,cACC,yBAKJ,SAAS,aACP,WAAA,CACA,iBAAA,CACA,sBACA,SAJO,YAIN,QACC,eALJ,SAAS,YAQP,eACE,aAAA,CACA,cAAA,CACA,WA8BJ,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,UAAW,WAAY,WAAY,WACzH,iBAAA,CACA,iBAOF,YAEE,mBACE,cAHJ,YAME,oBACE,cAPJ,YAUE,MAAK,uBAVP,YAWE,SAAQ,uBACN,cAZJ,YAeE,MAAK,4BAfP,YAeoC,SAAQ,4BACxC,cAIJ,MAEE,gBACE,gBAHJ,MAME,cAAc,QACZ,eAAA,CACA,kBACA,MAHF,cAAc,OAGX,OACC,aAAA,CACA,UAEF,MAPF,cAAc,OAOX,QAAS,MAPZ,cAAc,OAOD,OAAQ,MAPrB,cAAc,OAOQ,SAClB,wBAdN,MAkBE,iBAAiB,aAlBnB,MAkBgC,MAC5B,aAAA,CACA,eAOJ,OACE,eAAA,CACA,WAAA,CACA,mBAGF,eACE,WAAA,CACA,eAMF,iBACE,eAAA,CACA,cAAA,CACA,eAGF,WACE,eAAc,SACZ,aAAA,CACA,iBACA,WAHF,eAAc,QAGX,QACC,kBAAA,CACA,aAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,aAAA,CACA,YAAa,cAZnB,WAeE,eACE,eAAA,CACA,WAIJ,KAAK,oBACH,OACE,oBAAA,CACA,iBAAA,CAEA,eAIJ,aACE,UACE,cAAA,CACA,eJnQF,IAAC,mBACC,iBAGF,IAAC,mBACC,eACE,eAFJ,IAAC,mBAIC,eACE,kBAON,aACE,WAAA,CACA,mCAEA,YAAC,QACC,eAAA,CACA,eAAA,CACA,WAAA,CACA,SAJF,YAAC,OAMC,QACE,oBAAA,CACA,UAAA,CACA,WAAA,CACA,WAAA,CACA,gBAAA,CACA,iBAAA,CACA,aAAA,CACA,yBAdJ,YAAC,OAMC,OAUE,OACE,oBAAA,CACA,UAAA,CACA,WAAA,CACA,qDApBN,YAAC,OAyBC,uBACE,oBAAA,CACA,gBA3BJ,YAAC,OAyBC,sBAIE,aACE,4BAAA,CACA,iBAAA,CACA,eAGA,YAnCL,OAyBC,sBAIE,YAMI,GAAK,GAAI,QAKT,QAAS,OAAT,CACA,YAAa,cA7CvB,YAoDE,kBACE,eAKJ,cAEE,QAAA,CACA,QAAA,CACA,MAAA,CACA,WAAA,CAEA,aAAA,CACA,YAAA,CACA,yBATF,aAcE,UAAU,GAAK,GACb,sBAAA,CACA,gBAAA,CACA,cAAA,CACA,aAAA,CACA,8BAEA,aAPF,UAAU,GAAK,EAOZ,OACC,wBAAA,CACA,8BAGF,aAZF,UAAU,GAAK,EAYZ,OACC,wBAAA,CACA,8BAGF,aAjBF,UAAU,GAAK,EAiBZ,UACC,8BAEA,aApBJ,UAAU,GAAK,EAiBZ,SAGE,OAEC,8BAIJ,aA1BF,UAAU,GAAK,EA0BZ,QACC,UAAA,CACA,wBAAA,CACA,8BAEA,aA/BJ,UAAU,GAAK,EA0BZ,OAKE,OACC,8BAGF,aAnCJ,UAAU,GAAK,EA0BZ,OASE,OACC,QAAS,OAAT,CACA,YAAa,sBAAb,CACA,iBAAA,CACA,OAAA,CACA,oBAAA,CACA,iBAAA,CACA,eAAA,CACA,WAAA,CACA,aAAA,CACA,cAAA,CACA,gBAAA,CACA,kBA7DR,aAkEE,UAAU,GAAG,YAAW,OACtB,oBAAA,CACA,UAAA,CACA,WAAA,CACA,eAAA,CACA,OAAA,CACA,iBAAA,CACA,WAAA,CACA,YAAa,aAAb,CACA,kBA3EJ,aA8EE,UAAU,GAAG,YAAW,OACtB,QAAS,QA/Eb,aAiFE,UAAU,GAAE,OAAQ,YAAW,OAC7B,QAAS,QAlFb,aAqFE,UAAU,GAAK,EAAI,EAAG,MACpB,UAAA,CACA,cAAA,CACA,iBAAA,CACA,iBAAA,CACA,gBAAA,CACA,eA3FJ,aA8FE,WAEE,SAAA,CACA,QAAA,CACA,wBAAA,CACA,iBAAA,CACA,oBAAA,CACA,4BAAA,CACA,gCAEA,aAVF,UAUI,GAAK,GACL,sBAAA,CACA,gBAAA,CACA,cAAA,CACA,aAAA,CACA,iBAAA,CACA,aAAA,CACA,8BAEA,aAnBJ,UAUI,GAAK,EASJ,QACC,oBAAA,CACA,iBAAA,CACA,gBAAA,CACA,QAAS,OAAT,CACA,YAAa,cAGf,aA3BJ,UAUI,GAAK,EAiBJ,OACC,UAAA,CACA,8BAGF,aAhCJ,UAUI,GAAK,EAsBJ,QACC,UAAA,CACA,wBAAA,CACA,8BAEA,aArCN,UAUI,GAAK,EAsBJ,OAKE,OACC,8BAGF,aAzCN,UAUI,GAAK,EAsBJ,OASE,OACC,QAAS,OAAT,CACA,YAAa,sBAAb,CAEA,iBAAA,CACA,QAAA,CACA,oBAAA,CACA,iBAAA,CACA,eAAA,CACA,WAAA,CACA,aAAA,CACA,cAAA,CACA,gBAAA,CACA,kBApJV,aA2JE,UAAU,GAAI,SAEZ,iBAAA,CACA,UAAA,CACA,qBAAA,CACA,gCAhKJ,aA2JE,UAAU,GAAI,QAOZ,EAAC,OACC,WAEA,aAVJ,UAAU,GAAI,QAOZ,EAAC,MAGE,OACC,UAAA,CACA,6BAEF,aAdJ,UAAU,GAAI,QAOZ,EAAC,MAOE,OACC,6BA1KR,aA2JE,UAAU,GAAI,QAmBZ,QACE,UAAA,CACA,cAAA,CACA,cAAA,CACA,UAAA,CACA,UAAA,CACA,WAAA,CAEA,iBAAA,CACA,qBAAA,CACA,iBAAA,CACA,iBAAA,CACA,gBA1LN,aA2JE,UAAU,GAAI,QAmBZ,OAcE,KACE,gBA7LR,aA2JE,UAAU,GAAI,QAsCZ,OACE,aAAA,CACA,eAAA,CACA,eApMN,aA2JE,UAAU,GAAI,QA2CZ,OACE,aAAA,CACA,cAAA,CACA,WAGF,aAjDF,UAAU,GAAI,QAiDX,QACC,wBAAA,CACA,WAFF,aAjDF,UAAU,GAAI,QAiDX,OAGC,OAHF,aAjDF,UAAU,GAAI,QAiDX,OAGQ,OACL,WAEF,aAvDJ,UAAU,GAAI,QAiDX,OAME,OACC,QAAS,OAAT,CACA,YAAa,sBAAb,CAEA,iBAAA,CACA,KAAA,CACA,UAAA,CACA,oBAAA,CACA,iBAAA,CACA,eAAA,CAEA,aAAA,CACA,cAAA,CACA,gBAAA,CACA,kBAhOR,aA2JE,UAAU,GAAI,QAyEZ,gBAGE,WAAA,CACA,eAMA,aAnFJ,UAAU,GAAI,QAyEZ,eAUI,GAAK,GAEL,mBAGA,aAxFN,UAAU,GAAI,QAyEZ,eAUI,GAAK,EAKJ,OACC,sBApPV,aA2JE,UAAU,GAAI,QAyEZ,eAoBE,UACE,aAzPR,aAqQE,QACE,gBAAA,CACA,gBAUJ,oBACE,aAeF,KACE,WAAA,CACA,sCAAA,CACA,qBAAA,CACA,YAAA,CACA,mBALF,IA2BE,WACE,cAAA,CACA,iBAEA,IAJF,UAII,GAAI,aACJ,iBAhCN,IAwCE,aAAa,WACX,qBAAA,CACA,YAAA,CACA,qBAAA,CACA,eAAA,CACA,6BAAA,CACA,+BAGF,IAAC,cACC,SAAA,CACA,yBAFF,IAAC,aAIC,WACE,iBAAA,CACA,oBAOA,IAbH,aAIC,UASI,IACA,gBAEF,IAhBH,aAIC,UAYI,GAAK,GACL,gBAAA,CACA,0BAAA,CACA,2BAAA,CACA,gBAAA,CACA,wBAAA,CAGA,qBAAA,CAEA,gCAEF,IA5BH,aAIC,UAwBI,GAAI,OAAQ,GACZ,gBAAA,CAGA,qBAAA,CAEA,+BAAA,CACA,sBAnCN,IAAC,aAuCC,aAAa,WACX,YAAA,CACA,YAiBN,aAEE,iBAFF,YAGE,EAAC,KACC,kBAyBJ,aACE,WAAA,CACA,gBAAA,CACA,mBAHF,YAKE,eACE,cAAA,CACA,iBAkCJ,WAAY,cACV,kBAGF,QACE,aAAA,CACA,WAAA,CACA,iBAAA,CACA,iBAAA,CACA,eAGF,UACE,gBAGF,SACE,gBAGF,aACE,iBAAA,YADF,YAGE,gBACE,mCAAA,CACA,UAAA,CACA,eAEA,YALF,eAKG,oBACC,mBAGF,YATF,eASI,GAAK,GACL,gBAAA,CACA,WAEA,YAbJ,eASI,GAAK,EAIJ,OAAQ,YAbb,eASI,GAAK,EAIK,QAAS,YAbvB,eASI,GAAK,EAIe,SAClB,yBAjBR,YAGE,eAkBE,UACE,YAAA,CACA,sBAMN,oBACE,OADF,oBACS,MACL,YAFJ,oBAME,gBACE,sBAPJ,oBAUE,kBACE,kBAXJ,oBAcE,UACE,SAAA,CACA,gBAhBJ,oBAcE,SAIE,kBACE,4BAGF,oBARF,SAQG,OAAQ,OAAQ,OACf,OAAA,CACA,iBAAA,CACA,QAAS,GAAT,CACA,kBAAA,CACA,yBAYN,iBACE,UAAA,CACA,WAAA,CACA,QAAA,CACA,UAJF,gBAME,wBACE,aAAA,CACA,QAAA,CACA,UAGF,gBAAC,sBACC,wBACE,WAIJ,gBAAC,qBACC,wBADsB,gBAAC,sBACvB,wBACE,WAGJ,gBAAC,uBACC,wBACE,cAzBN,gBA6BE,eACE,kBAAA,CACA,qBAAA,CACA,WAAA,CACA,gBAAA,CAGA,UAAA,CACA,kBAAA,CACA,SAAA,CACA,gBAvCJ,gBAyCE,eAzCF,gBAyCiB,gBACb,kBAAA,CACA,eAAA,CACA,YAEA,gBALF,cAKI,MAAF,gBALa,eAKX,MACA,cAAA,CACA,4BAAA,CAEA,WAAA,CACA,iBAAA,CACA,gBACA,gBAZJ,cAKI,KAOC,QAAD,gBAZW,eAKX,KAOC,QACC,gBAtDR,gBAyCE,cAkBE,gBA3DJ,gBAyCiB,eAkBb,gBACE,WAAA,CACA,gCAAA,CACA,UAAA,CACA,cAAA,CACA,oCAAA,CACA,YAMA,gBA9BJ,cAkBE,eAYI,GAAK,GAAP,gBA9BW,eAkBb,eAYI,GAAK,GAEL,kBAAA,CACA,eAAA,CACA,WAEA,gBApCN,cAkBE,eAYI,GAAK,EAMJ,OAAD,gBApCS,eAkBb,eAYI,GAAK,EAMJ,OAAQ,gBApCf,cAkBE,eAYI,GAAK,EAMK,QAAD,gBApCA,eAkBb,eAYI,GAAK,EAMK,QAAS,gBApCzB,cAkBE,eAYI,GAAK,EAMe,SAAD,gBApCV,eAkBb,eAYI,GAAK,EAMe,SAElB,uCA/EV,gBAyCE,cAkBE,eA2BE,UAtFN,gBAyCiB,eAkBb,eA2BE,UACE,YAAA,CACA,sBAxFR,gBA6FE,eACE,UAAA,CACA,kBAYA,gBAdF,cAcI,aACA,oBACE,cAAA,CACA,cAHJ,gBAdF,cAcI,aAKA,MAAK,cAEH,cAAA,CACA,WAAA,CACA,UAAA,CACA,cAOR,IAAI,eACF,cAAA,CACA,4BAAA,CAEA,WAAA,CACA,iBAAA,CACA,gBACA,IAPE,cAOD,QACC,gBAIJ,MAAO,MAAQ,GAAI,qBACjB,6BACA,MAFK,MAAQ,GAAI,oBAEf,IACA,wBAAA,CACA,gBAAA,CACA,sBAOJ,mBAAoB,mBAClB,aAAA,CACA,WAFF,kBAIE,oBAJkB,kBAIlB,oBACE,kBAAA,CACA,WAAA,CAEA,gBAAA,CACA,YAEA,kBAPF,mBAOG,yBAAD,kBAPF,mBAOG,yBACC,iBAZN,kBAIE,mBAWE,aAfgB,kBAIlB,mBAWE,aACE,cAAA,CAEA,iBAAA,CACA,OAAA,CACA,mBApBN,kBAIE,mBAoBE,aAxBgB,kBAIlB,mBAoBE,aACE,WACA,kBAtBJ,mBAoBE,YAEI,GAAK,MAAP,kBAtBJ,mBAoBE,YAEI,GAAK,MA1Bb,kBAIE,mBAoBE,YAEiB,GA1BD,kBAIlB,mBAoBE,YAEiB,GACb,eAAA,CACA,cAAA,CACA,gBAIJ,kBA7BF,mBA6BG,qBAAD,kBA7BF,mBA6BG,qBACC,UAAA,CACA,kBAFF,kBA7BF,mBA6BG,oBAGC,OAHF,kBA7BF,mBA6BG,oBAGC,OACE,iBAAA,CACA,QAIF,kBAtCJ,mBAqCG,cACG,WACA,MADF,kBAtCJ,mBAqCG,cACG,WACA,MACE,eAAA,CACA,eAIN,kBA7CF,mBA6CG,oBAAD,kBA7CF,mBA6CG,oBACC,iBADF,kBA7CF,mBA6CG,mBAGC,IAHF,kBA7CF,mBA6CG,mBAGC,IACE,eAAA,CACA,SAAA,CACA,QAAA,CACA,WAAA,CACA,4BAAA,CACA,WACA,kBAvDN,mBA6CG,mBAGC,GAOI,IAAF,kBAvDN,mBA6CG,mBAGC,GAOI,IACA,oBAAA,CACA,kBAZN,kBA7CF,mBA6CG,mBAgBC,MAhBF,kBA7CF,mBA6CG,mBAgBC,MACE,gBAjBJ,kBA7CF,mBA6CG,mBAgBC,KAEE,QAlBJ,kBA7CF,mBA6CG,mBAgBC,KAEE,QACE,gBAQV,mBACE,mBADF,kBAEE,oBACE,UAOJ,UAAU,WACR,KAAI,iBACF,4BAAA,CACA,sBAkHJ,QAAQ,mBACN,gBAGF,QAAQ,sBACN,YAGF,QAAQ,eACN,mDCx+B4D,wBD2+B9D,QAAQ,UACN,UAAA,CACA,WAAA,CACA,wBAAA,CACA,yBAMF,IAAK,GAAE,WACL,eAAA,CACA,iBAAA,CACA,UAHF,IAAK,GAAE,UAIL,IACE,wBAAA,CACA,iBACA,IAPC,GAAE,UAIL,GAGG,QACC,UAAA,CACA,iBAAA,CACA,kBAAA,CACA,cAAA,CACA,QAAS,OAAT,CACA,YAAa,cAbnB,IAAK,GAAE,UAIL,GAWE,IACE,aAAA,CACA,kBAMN,aACE,qBAEA,YAAC,aACC,kBAAA,CACA,cAGF,YAAC,QACC,oBAAA,CACA,iBAAA,CACA,OAAA,CACA,UAAA,CACA,QAAS,OAAT,CACA,YAAa,cAGf,YAAC,aACC,eAGF,YAAC,aACC,cACA,YAFD,YAEE,QACC,QAAS,OAAT,CACA,YAAa,cAKnB,CAAC,eACC,eAAA,CACA,cACA,CAHD,cAGE,OACC,cAAA,CACA,cK7iCJ,wBAKE,aAGF,gBAAiB,cAAe,aAE9B,0BAAA,YAGF,aACE,0BAAA,CACA,4BAGF,gBACE,6BAAA,CACA,+BAGF,eAAgB,cAAe,gBAU7B,SAAA,YACA,SAAA,YACA,OAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,aAAA,YACA,kBAGF,cAAc,QAAS,cAAe,eAAc,QAClD,QAAS,OAAT,YACA,uBAAA,YACA,aAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,UAAA,YACA,aAAA,YACA,iBAAA,YACA,iBAAA,YACA,OAAA,YACA,KAAA,YAcF,eACE,cAAA,YACA,gBAAA,YACA,kBAAA,YACA,eAAA,YACA,UAAA,YACA,gBAAA,YAQF,cAEE,UAAA,YACA,cAAA,YACA,oBAAA,YAGF,cACE,iBADF,cACmB,eADnB,cACkC,cAC9B,8BAAA,YAFJ,cAKE,gBACE,UAAA,YANJ,cASE,eACE,UAAA,YAVJ,cAaE,gBACE,SAAA,YACA,SAAA,YACA,OAAA,YACA,UAAA,YACA,WAAA,YACA,gBAAA,YACA,aAAA,YACA,iBAAA,CACA,kBAAA,YAIJ,gBACE,iBADF,gBACmB,eADnB,gBACkC,cAE9B,4BAAA,YAHJ,gBAME,gBAEE,UAAA,YARJ,gBAWE,eAEE,UAAA,YAbJ,gBAgBE,gBACE,kBAAA,YAKJ,wBACE,WAAA,CAEA","file":"style.css","sourceRoot":"..\\less"} \ No newline at end of file diff --git a/server/www/teleport/static/css/sub.css b/server/www/teleport/static/css/sub.css deleted file mode 100644 index 73393db..0000000 --- a/server/www/teleport/static/css/sub.css +++ /dev/null @@ -1 +0,0 @@ -@charset "utf-8";body{font-family:"Open Sans","Helvetica Neue","Microsoft YaHei","微软雅黑",Helvetica,Arial,sans-serif;font-size:13px;background-color:#e9e9e9;color:#333}html,body{height:100%}#page-container{min-width:1260px}a{text-decoration:none}a:link{text-decoration:none}a:hover{text-decoration:none}a:active{text-decoration:none}a:visited{text-decoration:none}select{outline:none}label{font-weight:normal}.clear-float{clear:both}.bigger{font-size:120%}.normal-text{font-size:13px;color:#333}.mono{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}hr.hr-sm{margin-top:5px;margin-bottom:5px}.btn-group-sm>.btn,.btn-sm{padding:2px 5px}.btn.btn-sm{padding:3px 8px}.btn.btn-icon{padding:3px 6px}.btn.btn-icon.btn-sm{padding:0;font-size:14px;height:24px;width:24px;line-height:24px;border-radius:0}.form-group-sm .input-group .input-group-btn>.btn{height:30px;padding:0 8px}.pop-menu-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040}.form-group{margin-bottom:5px}.badge{display:inline-block;min-width:8px;padding:5px 10px;border-radius:10px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.badge.badge-plain{text-shadow:none}.badge.badge-sm{font-size:11px;padding:3px 6px;margin-top:0;border-radius:8px;text-shadow:none}.badge.badge-sup{margin-left:-6px;margin-top:-16px}.badge.badge-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.badge.badge-info{background-color:#33b7d0}.badge.badge-primary{background-color:#348fe2}.badge.badge-success{background-color:#368142}.badge.badge-warning{background-color:#f57523}.badge.badge-danger{background-color:#d34242}.label{display:inline-block;min-width:8px;padding:5px 10px;border-radius:5px;text-align:center;white-space:nowrap;vertical-align:middle;font-size:13px;font-weight:400;line-height:1em;background-color:#888;color:#fff;text-shadow:1px 1px 0 #525252}.label.label-plain{text-shadow:none}.label.label-sm{font-size:11px;padding:3px 8px;margin-top:0;border-radius:5px;text-shadow:none}.label.label-ignore{background-color:#e5e5e5;color:#999;text-shadow:none}.label.label-info{background-color:#33b7d0}.label.label-primary{background-color:#348fe2}.label.label-success{background-color:#368142}.label.label-warning{background-color:#f57523}.label.label-danger{background-color:#d34242}.progress.progress-sm{height:18px;margin-bottom:2px;background-color:#aaa}.progress.progress-sm.button{cursor:pointer}.progress.progress-sm .progress-bar{display:block;font-size:11px;float:none}.alert-sm{padding:5px;margin-bottom:10px}.modal-dialog-sm .modal-header{padding:10px}.modal-dialog-sm .modal-body{padding:10px}.modal-dialog-sm .modal-footer{padding:10px}.modal-dialog-sm .form-horizontal .form-group{margin-right:-5px;margin-left:-5px}.modal-dialog-sm .col-sm-1,.modal-dialog-sm .col-sm-2,.modal-dialog-sm .col-sm-3,.modal-dialog-sm .col-sm-4,.modal-dialog-sm .col-sm-5,.modal-dialog-sm .col-sm-6,.modal-dialog-sm .col-sm-7,.modal-dialog-sm .col-sm-8,.modal-dialog-sm .col-sm-9,.modal-dialog-sm .col-sm-10,.modal-dialog-sm .col-sm-11{padding-right:5px;padding-left:5px}.btn-single-line{white-space:nowrap}.btn-single-line .btn:first-child{border-top-left-radius:3px;border-bottom-left-radius:3px}.btn-single-line .btn:last-child{border-top-right-radius:3px;border-bottom-right-radius:3px}.remote-action-group{margin-bottom:3px;height:28px;min-width:390px}.remote-action-group ul{display:block;height:28px;margin:0;padding:0}.remote-action-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.remote-action-group ul li.remote-action-btn{background:none;padding:0;border:none}.remote-action-group ul li.remote-action-input{background:none;padding:4px 0}.remote-action-group ul li.remote-action-input select{border:none}.remote-action-group ul li.remote-action-chk-protocol{width:86px}.remote-action-group ul li.remote-action-username,.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol{width:96px;text-align:center;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}.remote-action-group ul li.remote-action-username{font-size:90%;color:#999}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-protocol,.remote-action-group ul li.remote-action-chk-protocol{color:#000}.remote-action-group ul li.remote-action-name,.remote-action-group ul li.remote-action-chk-protocol{font-weight:bold}.remote-action-group ul li.remote-action-password,.remote-action-group ul li.remote-action-sshkey,.remote-action-group ul li.remote-action-noauth{text-align:center;padding:4px 8px;width:45px}.remote-action-group ul li.remote-action-password{background-color:#e3ffe3;color:#999}.remote-action-group ul li.remote-action-sshkey{background-color:#fbe9c8;color:#666}.remote-action-group ul li.remote-action-noauth{background-color:#e0e0e0;color:#666}.remote-action-group ul li .btn{line-height:1.5;margin:0;padding:4px 8px;font-size:12px;border-radius:0}.remote-action-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.remote-action-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.remote-action-group ul li select{margin-top:-3px}.remote-action-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:first-child .btn{border-top-left-radius:4px;border-bottom-left-radius:4px}.remote-action-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.remote-action-group ul li:last-child .btn{border-top-right-radius:4px;border-bottom-right-radius:4px}hr.small{margin:5px 0}.dlg-protocol-group{margin-bottom:3px}.dlg-protocol-group ul{display:block;height:28px;margin:0;padding:0}.dlg-protocol-group ul li{float:left;position:relative;display:block;height:28px;padding:4px 5px;background-color:#eee;border-top:1px solid #ccc;border-right:1px solid #ccc;border-bottom:1px solid #ccc}.dlg-protocol-group ul li.item-name{width:120px}.dlg-protocol-group ul li.item-btn{background:none;padding:0;border:none}.dlg-protocol-group ul li.item-input{background:none;border:none;padding:0}.dlg-protocol-group ul li .form-control{line-height:1.5;margin:0;padding:4px 5px;font-size:12px;height:28px;border-radius:0;border-left:none;width:100px}.dlg-protocol-group ul li label{padding:0;display:block;float:left;margin-top:1px;cursor:pointer}.dlg-protocol-group ul li input[type=checkbox]{display:block;float:left;margin:3px 5px 0 0}.dlg-protocol-group ul li:first-child{border-left:1px solid #ccc;border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:first-child .btn,.dlg-protocol-group ul li:first-child .form-control{border-top-left-radius:4px;border-bottom-left-radius:4px}.dlg-protocol-group ul li:last-child{border-top-right-radius:4px;border-bottom-right-radius:4px}.dlg-protocol-group ul li:last-child .btn,.dlg-protocol-group ul li:last-child .form-control{border-top-right-radius:4px;border-bottom-right-radius:4px}.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{padding-right:5px;padding-left:5px}.form-group-sm .form-control-static{padding:6px 0}.input-group :-moz-placeholder{color:#d2d2d2}.input-group ::-moz-placeholder{color:#d2d2d2}.input-group input:-ms-input-placeholder,.input-group textarea:-ms-input-placeholder{color:#d2d2d2}.input-group input::-webkit-input-placeholder,.input-group textarea::-webkit-input-placeholder{color:#d2d2d2}.table{margin-bottom:10px}.table>thead>tr>th{padding:5px 5px;outline:none;white-space:nowrap;font-weight:normal;text-align:center;background-color:#ededed}.table>tbody>tr>td{padding:5px;text-align:center;vertical-align:middle}.table>tbody>tr>td .nowrap{white-space:nowrap}.table.table-data thead .sorting,.table.table-data thead .sorting_asc,.table.table-data thead .sorting_desc{cursor:pointer;position:relative}.table.table-data thead .sorting>span:after,.table.table-data thead .sorting_asc>span:after,.table.table-data thead .sorting_desc>span:after{bottom:4px;padding-left:5px;display:inline-block;font-family:'FontAwesome';opacity:.8}.table.table-data thead .sorting>span:after{opacity:.2;content:"\f0dc"}.table.table-data thead .sorting_asc>span:after{content:"\f0de"}.table.table-data thead .sorting_desc>span:after{content:"\f0dd"}.host-id{display:block;font-size:16px;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;color:#333}.host-id.not-active{font-size:14px;font-weight:400;color:#999}.host-desc{font-size:12px;color:#999;display:inline-block;white-space:nowrap;width:160px;overflow:hidden;text-overflow:ellipsis}a.host-desc:hover:before{display:inline-block;padding-right:3px;line-height:12px;content:"\f040";font-family:'FontAwesome'}.td-ip-list{padding-right:20px;padding-left:5px}.td-ip-show-more{font-size:14px;width:12px;float:right;display:block}.td-ip-item{min-width:12em;width:12em;height:18px;padding:2px 4px;margin:1px 0;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.td-ip-item span{display:inline-block;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.td-ip-item a{display:inline-block;width:14px;float:right;font-size:14px}.page-header-fixed{padding-top:48px}.header{border:none;box-shadow:0 0 3px rgba(0,0,0,0.5);min-height:48px;height:48px;top:0;width:100%;position:fixed;z-index:999}.header .top-navbar{min-height:48px;height:48px;line-height:48px;background-color:#3a3a3a;color:#ccc}.header .top-navbar a{color:#d5d5d5}.header .top-navbar a:hover{color:#5a8fee}.header .top-navbar .brand{float:left;display:inline-block;padding:12px 0;margin:0}.header .top-navbar .brand .site-logo{display:block;width:86px;height:24px;background:url(../img/site-logo-small.png) no-repeat}.header .top-navbar .title-container{float:left;display:inline-block;margin:0;padding:0;margin-left:20px}.header .top-navbar .title-container .title{font-size:16px}.header .top-navbar .breadcrumb-container{float:left;display:inline-block;margin:0;padding:0}.header .top-navbar .breadcrumb-container .breadcrumb{background-color:#3a3a3a;height:48px;margin:0;border-radius:0;border:none;padding:0 0 0 20px;font-size:16px;color:#ccc}.header .top-navbar .breadcrumb-container .breadcrumb>li+li:before{font-size:18px;padding:0 5px;color:#555;content:'|'}.header .top-navbar .breadcrumb-container .breadcrumb .title{font-size:18px}.header .top-navbar .breadcrumb-container .breadcrumb .sub-title{font-size:14px;color:#b3b3b3}.header .top-navbar .status-container{float:right}.page-content{margin-top:10px;margin-bottom:44px}.footer{width:100%;height:24px;line-height:24px;background-color:#d5d5d5;border-top:1px solid #a2a2a2;border-bottom:1px solid #efefef;z-index:998;text-align:center;font-size:12px}.footer.footer-fixed-bottom{bottom:0;position:fixed}.row-sm .col-sm-1,.row-sm .col-sm-2,.row-sm .col-sm-3,.row-sm .col-sm-4,.row-sm .col-sm-5,.row-sm .col-sm-6,.row-sm .col-sm-7,.row-sm .col-sm-8,.row-sm .col-sm-9,.row-sm .col-sm-10,.row-sm .col-sm-11{padding-right:5px;padding-left:5px}.content{margin-top:15px;margin-bottom:20px;background-color:#fff;border-radius:5px;padding:10px}.content:last-child{margin-bottom:54px}.table-host{width:100%;border-top:10px solid #b3cfe7;border-bottom:1px solid #b3cfe7}.table-host .cell-host-id{border-left:1px solid #e7e7e7;padding:5px;text-align:center;width:168px;vertical-align:middle}.table-host .cell-host-id .host-id{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:13px;color:#999;display:inline-block}.table-host .cell-host-id .host-name{display:block;width:168px;text-align:center;overflow:hidden;white-space:nowrap;text-overflow:ellipsis;font-size:16px;margin:auto;margin-bottom:10px}.table-host .cell-host-id .td-ip-item{width:10em;height:18px;padding:2px 4px;margin:1px auto;color:#333;text-align:center;white-space:nowrap;border-radius:9px;line-height:11px;font-size:11px;background:#dfdfdf !important}.table-host .cell-host-id .td-ip-item span{display:inline-block;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace;font-size:11px;font-weight:400}.table-host .cell-host-id .actions{margin-top:20px}.table-host .cell-host-id .actions a{margin-left:5px;margin-right:5px}.table-host .cell-host-id .actions a:first-child{margin-left:0}.table-host .cell-host-id .actions a:last-child{margin-right:0}.table-host .cell-detail{border-left:1px solid #e7e7e7;border-right:1px solid #e7e7e7;vertical-align:top}.table-host .cell-detail tr{border-top:1px solid #e7e7e7}.table-host .cell-detail tr:last-child{border-bottom:1px solid #e7e7e7}.table-host .cell-detail .row-host-info{background-color:#ececed}.table-host .cell-detail.host-offline{background-color:#ffcecc;text-align:center;vertical-align:middle}.table-host .cell-detail.host-offline .host-offline-msg{color:#802506;font-size:24px}.table-host .cell-log td{border:1px solid #e7e7e7}.table-host .cell-log td .host-log{font-size:12px;outline:none;width:100%;height:120px;overflow-y:auto;resize:none;border:none;padding:5px}.table-host .cell-log td .host-log div{margin-bottom:3px}.table-host .cell-log td .host-log div .datetime{font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}.log-box{margin-top:15px}.log-box .log-list{margin-top:5px;border:1px solid #e7e7e7;font-size:12px;outline:none;width:100%;max-height:480px;overflow-y:auto;resize:none;padding:5px}.log-box .log-list div{margin-bottom:3px}.log-box .log-list div:hover{background-color:#f3f3f3}.log-box .log-list div .log-dt{padding:0 3px;padding-top:2px;padding-bottom:1px;margin-right:3px;background-color:#f57523;color:#fff;font-family:Consolas,Lucida Console,Monaco,Courier,'Courier New',monospace}.log-box .log-list div .log-hid{padding:0 3px;margin-right:3px;background-color:#348fe2;color:#fff}.log-box .log-list div .log-hname{padding:0 3px;margin-right:3px;background-color:#348fe2;color:#fff}.page-nav{height:30px;line-height:30px}.page-nav .breadcrumb{padding:0;margin:0;border-radius:0;background-color:transparent}.page-nav .pagination{margin:0 0}.page-nav .btn{margin-top:-3px}.mp{display:inline-block;width:20%;max-width:20%}.mp .mp-inner{background-color:#e5e5e5;margin:3px;border-radius:4px}.mp .mp-name{color:#999;padding:9px;margin-bottom:3px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;text-align:center}.mp .mp-name.with-target{padding-top:17px;padding-bottom:1px}.mp .mp-target{display:inline-block;float:left;position:absolute;font-size:11px;padding:0 5px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;border-top-left-radius:4px;border-bottom-right-radius:4px;color:rgba(255,255,255,0.85);background-color:rgba(0,0,0,0.1)}.mp.mp-disabled .mp-inner{background-color:#e5e5e5}.mp.mp-disabled .mp-name{color:#999}.mp.mp-success .mp-inner{background-color:#368142}.mp.mp-success .mp-name{color:#fff}.mp.mp-danger .mp-inner{background-color:#d34242}.mp.mp-danger .mp-name{color:#fff}.mp.mp-warning .mp-inner{background-color:#f57523}.mp.mp-warning .mp-name{color:#fff}.host-offline{background-color:#ffcecc;height:36px;line-height:36px;padding:0 10px;color:#802506;font-size:20px;cursor:pointer}.host-offline .tips{display:none;font-size:12px}.host-offline:hover .tips{display:inline-block}.host-no-strategy{color:#999;font-size:16px}#gritter-notice-wrapper{z-index:9999}.gritter-bottom,.gritter-item,.gritter-top{background:rgba(0,0,0,0.8) !important}.gritter-top{border-top-left-radius:3px;border-top-right-radius:3px}.gritter-bottom{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.gritter-close,.gritter-light .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%}.gritter-close:before,.gritter-light .gritter-close:before{content:'\f00d' !important;font-family:FontAwesome !important;font-size:9px !important;width:16px !important;height:16px !important;line-height:16px !important;color:#fff !important;text-indent:0 !important;position:absolute !important;text-align:center !important;right:0 !important;top:0 !important}.gritter-title{font-size:13px !important;line-height:16px !important;padding-bottom:5px !important;font-weight:400 !important;color:#fff !important;text-shadow:none !important}.gritter-item{color:#aaa !important;font-size:13px !important;padding:2px 15px 5px !important}.gritter-error .gritter-bottom,.gritter-error .gritter-item,.gritter-error .gritter-top{background:rgba(123,32,32,0.9) !important}.gritter-error .gritter-title{color:#fff !important}.gritter-error .gritter-item{color:#ddd !important}.gritter-error .gritter-close{left:auto !important;right:5px !important;top:5px !important;width:16px !important;height:16px !important;line-height:16px !important;display:block !important;border-radius:50%;background:#e33b3b !important}.gritter-success .gritter-bottom,.gritter-success .gritter-item,.gritter-success .gritter-top{background:rgba(1,65,16,0.9) !important}.gritter-success .gritter-title{color:#ddd !important}.gritter-success .gritter-item{color:#ccc !important}.gritter-success .gritter-close{background:#0eb320 !important}.icon{display:inline-block}.icon16{width:16px;height:16px;line-height:16px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/mimetype-16.png") !important}.icon16.icon-disk{background-position:0 0 !important}.icon16.icon-folder{background-position:-16px 0 !important}.icon16.icon-file{background-position:0 -16px !important}.icon16.icon-txt{background-position:-16px -16px !important}.icon16.icon-help{background-position:-32px -16px !important}.icon16.icon-sys{background-position:-48px -16px !important}.icon16.icon-exe{background-position:-64px -16px !important}.icon16.icon-office{background-position:0 -32px !important}.icon16.icon-word{background-position:-16px -32px !important}.icon16.icon-excel{background-position:-32px -32px !important}.icon16.icon-ppt{background-position:-48px -32px !important}.icon16.icon-access{background-position:-64px -32px !important}.icon16.icon-visio{background-position:-80px -32px !important}.icon16.icon-audio{background-position:0 -48px !important}.icon16.icon-video{background-position:-16px -48px !important}.icon16.icon-pic{background-position:-32px -48px !important}.icon16.icon-pdf{background-position:-48px -48px !important}.icon16.icon-font{background-position:-64px -48px !important}.icon16.icon-script{background-position:0 -64px !important}.icon16.icon-html{background-position:-16px -64px !important}.icon16.icon-py{background-position:-32px -64px !important}.icon16.icon-h{background-position:-48px -64px !important}.icon16.icon-c{background-position:-64px -64px !important}.icon16.icon-cpp{background-position:-80px -64px !important}.icon16.icon-cs{background-position:-96px -64px !important}.icon16.icon-php{background-position:-112px -64px !important}.icon16.icon-ruby{background-position:-128px -64px !important}.icon16.icon-java{background-position:-144px -64px !important}.icon16.icon-vs{background-position:-160px -64px !important}.icon16.icon-js{background-position:-176px -64px !important}.icon16.icon-archive{background-position:0 -80px !important}.icon16.icon-rar{background-position:-16px -80px !important}.icon16.icon-zip{background-position:-32px -80px !important}.icon16.icon-7z{background-position:-48px -80px !important}.icon16.icon-tar{background-position:-64px -80px !important}.icon16.icon-gz{background-position:-80px -80px !important}.icon16.icon-jar{background-position:-96px -80px !important}.icon16.icon-bz2{background-position:-112px -80px !important}.icon24{width:24px;height:24px;line-height:24px;margin-right:3px;margin-top:0 !important;margin-bottom:-2px !important;vertical-align:top;background-image:url("img/desktop/icons-tree-24x24.png") !important}.icon24.icon-disk{background-position:0 0 !important}.icon24.icon-folder{background-position:-24px 0 !important}.icon24.icon-folder-open{background-position:-48px 0 !important}.os-icon-windows:after{color:#00bcf6;content:"\f17a";font-size:18px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-linux:after{color:#fff;content:"\f17c";font-size:18px;width:24px;height:24px;line-height:24px;background-color:#333;border-radius:50%;display:inline-block;font-family:'FontAwesome'}.os-icon-macos:after{color:#a7a7a7;content:"\f179";font-size:20px;width:24px;height:24px;line-height:24px;display:inline-block;font-family:'FontAwesome'}.os-icon-ubuntu:after,.os-icon-debian:after,.os-icon-centos:after,.os-icon-redhat:after{content:" ";width:24px;height:24px;line-height:24px;display:inline-block}.os-icon-ubuntu:after{background:url(../img/os-icon/ubuntu-24x24.png) no-repeat}.os-icon-debian:after{background:url(../img/os-icon/debian-24x24.png) no-repeat}.os-icon-centos:after{background:url(../img/os-icon/centos-24x24.png) no-repeat}.os-icon-redhat:after{background:url(../img/os-icon/redhat-24x24.png) no-repeat} \ No newline at end of file diff --git a/server/www/teleport/static/download/example.csv b/server/www/teleport/static/download/example.csv deleted file mode 100644 index 927099f..0000000 --- a/server/www/teleport/static/download/example.csv +++ /dev/null @@ -1,4 +0,0 @@ -ID, ϵͳ, IPַ, ˿, Э, ״̬, , ϵͳû, ϵͳ, Ƿ,Ӳ, ԿID, ֤ -0,1,115.28.12.207,3389,1,0,115.28.12.207,administrator,123456,0,,0,1 -0,2,120.26.109.25,22,2,0,120.26.109.25,root,123456,0,,0,1 -0,2,120.26.109.25,22,2,0,120.26.109.25,root,,0,,1,2 diff --git a/server/www/teleport/static/download/teleport-example-asset.csv b/server/www/teleport/static/download/teleport-example-asset.csv new file mode 100644 index 0000000..42c0bb1 --- /dev/null +++ b/server/www/teleport/static/download/teleport-example-asset.csv @@ -0,0 +1,5 @@ +#用户账号,用户姓名,登录认证方式,EMail,Mobile,QQ,微信,所属组,描述 +zhangsan,张三,1,zhangsan@domain.tld,,12345678,112233,运维人员|审计员, +lisi,李四,1,lisi@domain.tld,,,,审计员, +wangwu,王五,1,wangwu@domain.tld,,,,, +bai.lee,李白,1,bai.lee@domain.tld,,10086,bai.lee,审计管理员,著名诗人、剑客 diff --git a/server/www/teleport/static/download/teleport-example-user.csv b/server/www/teleport/static/download/teleport-example-user.csv new file mode 100644 index 0000000..76f8c31 --- /dev/null +++ b/server/www/teleport/static/download/teleport-example-user.csv @@ -0,0 +1,6 @@ +#用户账号示例文件,使用CSV格式,每行一个用户,用英文逗号分隔,共9个字段,需要8个英文逗号,,,,,,,, +#用户账号,用户姓名,登录认证方式,EMail,Mobile,QQ,微信,所属组,描述 +zhangsan,张三,1,zhangsan@domain.tld,,12345678,112233,运维人员|审计员, +lisi,李四,1,lisi@domain.tld,,,,审计员, +wangwu,王五,1,wangwu@domain.tld,,,,, +bai.lee,李白,1,bai.lee@domain.tld,,10086,bai.lee,审计管理员,著名诗人、剑客 diff --git a/server/www/teleport/static/img/css/disable-bg.png b/server/www/teleport/static/img/css/disable-bg.png new file mode 100644 index 0000000..c6b7a98 Binary files /dev/null and b/server/www/teleport/static/img/css/disable-bg.png differ diff --git a/server/www/teleport/static/img/login/input_right_clean.png b/server/www/teleport/static/img/login/input_right_clean.png deleted file mode 100644 index 2d48a32..0000000 Binary files a/server/www/teleport/static/img/login/input_right_clean.png and /dev/null differ diff --git a/server/www/teleport/static/img/login/login-bg-0.png b/server/www/teleport/static/img/login/login-bg-0.png new file mode 100644 index 0000000..e406241 Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-0.png differ diff --git a/server/www/teleport/static/img/login/login-bg-1.png b/server/www/teleport/static/img/login/login-bg-1.png new file mode 100644 index 0000000..ae86f5e Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-1.png differ diff --git a/server/www/teleport/static/img/login/login-bg-2.png b/server/www/teleport/static/img/login/login-bg-2.png new file mode 100644 index 0000000..2a0ce7a Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-2.png differ diff --git a/server/www/teleport/static/img/login/login-bg-3.png b/server/www/teleport/static/img/login/login-bg-3.png new file mode 100644 index 0000000..51558b1 Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-3.png differ diff --git a/server/www/teleport/static/img/login/login-bg-4.png b/server/www/teleport/static/img/login/login-bg-4.png new file mode 100644 index 0000000..4c9f133 Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-4.png differ diff --git a/server/www/teleport/static/img/login/login-bg-5.png b/server/www/teleport/static/img/login/login-bg-5.png new file mode 100644 index 0000000..8ab412a Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-5.png differ diff --git a/server/www/teleport/static/img/login/login-bg-6.png b/server/www/teleport/static/img/login/login-bg-6.png new file mode 100644 index 0000000..a1faea5 Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-6.png differ diff --git a/server/www/teleport/static/img/login/login-bg-7.png b/server/www/teleport/static/img/login/login-bg-7.png new file mode 100644 index 0000000..47dd588 Binary files /dev/null and b/server/www/teleport/static/img/login/login-bg-7.png differ diff --git a/server/www/teleport/static/img/login/side-001.jpg b/server/www/teleport/static/img/login/side-001.jpg deleted file mode 100644 index 91a438e..0000000 Binary files a/server/www/teleport/static/img/login/side-001.jpg and /dev/null differ diff --git a/server/www/teleport/static/js/asset/account-group-info.js b/server/www/teleport/static/js/asset/account-group-info.js new file mode 100644 index 0000000..ca98cb1 --- /dev/null +++ b/server/www/teleport/static/js/asset/account-group-info.js @@ -0,0 +1,553 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_members: $('#btn-refresh-members'), + btn_add_members: $('#btn-add-members'), + chkbox_members_select_all: $('#table-members-select-all'), + btn_remove_members: $('#btn-remove-members'), + + chkbox_acc_select_all: $('#table-acc-select-all') + }; + + if ($app.options.group_id !== 0) { + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + } + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 成员列表表格 + //------------------------------- + var table_members_options = { + dom_id: 'table-members', + data_source: { + type: 'ajax-post', + url: '/asset/get-accounts', + restrict: {'group_id': $app.options.group_id} // 限定仅包含指定的成员 + // exclude: {'user_id':[6]} // 排除指定成员 + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "账号", + key: "username", + sort: true, + // width: 240, + header_render: 'filter_search', + render: 'acc_info', + fields: {id: 'id', username: 'username', host_ip: 'host_ip', router_ip: 'router_ip', router_port: 'router_port'} + }, + { + title: "远程连接协议", + key: "protocol_type", + width: 120, + align: 'center', + sort: true, + render: 'protocol', + fields: {protocol_type: 'protocol_type'} + }, + { + title: "认证方式", + key: "auth_type", + width: 80, + align: 'center', + render: 'auth_type', + fields: {auth_type: 'auth_type'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 120, + align: 'center', + render: 'acc_state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_members_header_created, + on_render_created: $app.on_table_members_render_created, + on_cell_created: $app.on_table_members_cell_created + }; + + $app.table_members = $tp.create_table(table_members_options); + cb_stack + .add($app.table_members.load_data) + .add($app.table_members.init); + + //------------------------------- + // 成员列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_members, { + name: 'search', + place_holder: '搜索:账号/主机IP/等等...' + }); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_members, 'table-members-paging', + { + per_page: Cookies.get($app.page_id('acc_group_info') + '_member_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('acc_group_info') + '_member_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_members, 'table-members-pagination'); + + //------------------------------- + // 选择成员表格 + //------------------------------- + var table_acc_options = { + dom_id: 'table-acc', + data_source: { + type: 'ajax-post', + url: '/asset/get-accounts', + exclude: {'group_id': $app.options.group_id} // 排除指定成员 + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "账号", + key: "username", + sort: true, + header_render: 'filter_search', + render: 'acc_info', + fields: {id: 'id', username: 'username', host_ip: 'host_ip', router_ip: 'router_ip', router_port: 'router_port'} + }, + { + title: "远程连接协议", + key: "protocol_type", + sort: true, + width: 120, + align: 'center', + render: 'protocol', + fields: {protocol_type: 'protocol_type'} + }, + { + title: "认证方式", + key: "auth_type", + width: 80, + align: 'center', + render: 'auth_type', + fields: {auth_type: 'auth_type'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 80, + align: 'center', + render: 'acc_state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_acc_header_created, + on_render_created: $app.on_table_acc_render_created, + on_cell_created: $app.on_table_acc_cell_created + }; + + $app.table_acc = $tp.create_table(table_acc_options); + cb_stack + .add($app.table_acc.load_data) + .add($app.table_acc.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_acc, { + name: 'search', + place_holder: '搜索:账号/主机IP/等等...' + }); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_acc, 'table-acc-paging', + { + per_page: Cookies.get($app.page_id('acc_group_info') + '_sel_member_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('acc_group_info') + '_sel_member_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_acc, 'table-acc-pagination'); + + //------------------------------- + // 对话框 + //------------------------------- + $app.dlg_select_members = $app.create_dlg_select_members(); + cb_stack.add($app.dlg_select_members.init); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_add_members.click(function () { + $app.dlg_select_members.show(); + }); + $app.dom.btn_refresh_members.click(function () { + $app.table_members.load_data(); + }); + $app.dom.chkbox_members_select_all.click(function () { + var _objects = $('#' + $app.table_members.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + + $app.dom.btn_remove_members.click(function () { + $app.on_btn_remove_members_click(); + }); + + $app.dom.chkbox_acc_select_all.click(function () { + var _objects = $('#' + $app.table_acc.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + + cb_stack.exec(); +}; + +$app.on_table_members_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_members_all_selected(); + }); + } +}; + +$app.check_members_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_members.dom_id + ' tbody').find('[data-check-box]'); + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + + if (_all_checked) { + $app.dom.chkbox_members_select_all.prop('checked', true); + } else { + $app.dom.chkbox_members_select_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app._add_common_render = function (render) { + render.filter_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.acc_info = function (row_id, fields) { + var ret = []; + + ret.push('' + fields.username + '@' + fields.host_ip + ''); + if (fields.router_ip.length > 0) + ret.push(''); + + return ret.join(''); + }; + + render.protocol = function (row_id, fields) { + switch (fields.protocol_type) { + case TP_PROTOCOL_TYPE_RDP: + return ' RDP'; + case TP_PROTOCOL_TYPE_SSH: + return ' SSH'; + case TP_PROTOCOL_TYPE_TELNET: + return ' TELNET'; + default: + return ' 未设置'; + } + }; + + render.auth_type = function (row_id, fields) { + switch (fields.auth_type) { + case TP_AUTH_TYPE_NONE: + return ''; + case TP_AUTH_TYPE_PASSWORD: + return '密码'; + case TP_AUTH_TYPE_PRIVATE_KEY: + return '私钥'; + default: + return '未设置'; + } + }; + + render.acc_state = function (row_id, fields) { + var _style, _state; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === fields.state) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + break; + } + } + if (i === $app.obj_states.length) { + _style = 'info'; + _state = ' 未知'; + } + + return '' + _state + '' + }; +}; + +$app.on_table_members_render_created = function (render) { + $app._add_common_render(render); +}; + +$app.on_table_members_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.on_table_acc_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_users_all_selected(); + }); + } +}; + +$app.check_users_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_acc.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.chkbox_acc_select_all.prop('checked', true); + } else { + $app.dom.chkbox_acc_select_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_acc_render_created = function (render) { + // + // render.filter_search_account = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('search_account'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + // + // render.make_check_box = function (row_id, fields) { + // return ''; + // }; + + $app._add_common_render(render); +}; + +$app.on_table_acc_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + // header._table_ctrl.get_filter_ctrl('role').on_created(); + // header._table_ctrl.get_filter_ctrl('user_state').on_created(); +}; + +$app.get_selected_members = function (tbl) { + var members = []; + var _objs = $('#' + $app.table_members.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + members.push(_row_data); + } + }); + + return members; +}; + +$app.on_btn_remove_members_click = function () { + var members = $app.get_selected_members($app.table_members); + if (members.length === 0) { + $tp.notify_error('请选择要移除的成员账号!'); + return; + } + + var member_list = []; + $.each(members, function (i, m) { + member_list.push(m.id); + }); + + var _fn_sure = function (cb_stack, cb_args) { + $tp.ajax_post_json('/group/remove-members', {gtype: TP_GROUP_ACCOUNT, gid: $app.options.group_id, members: member_list}, + function (ret) { + if (ret.code === TPE_OK) { + cb_stack + .add($app.check_members_all_selected) + .add($app.table_members.load_data); + $tp.notify_success('移除成员账号操作成功!'); + } else { + $tp.notify_error('移除成员账号操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,移除成员账号操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '
移除用户组内成员不会删除用户账号!

您确定要移除所有选定的 ' + member_list.length + '个 成员用户吗?

', + fn_yes: _fn_sure + }); + +}; + +$app.create_dlg_select_members = function () { + var dlg = {}; + dlg.dom_id = 'dlg-select-members'; + dlg.field_id = -1; // 用户id + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_add: $('#btn-add-to-group') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + cb_stack.exec(); + }; + + dlg.show = function () { + // dlg.init_fields(); + // $app.table_acc.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_acc.get_row(_obj); + items.push(_row_data.id); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + console.log('items:', items); + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/group/add-members', { + gtype: TP_GROUP_ACCOUNT, + gid: $app.options.group_id, + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('账户成员添加成功!'); + $app.table_members.load_data(); + $app.table_acc.load_data(); + } else { + $tp.notify_error('账户成员添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,账户成员添加失败!'); + } + ); + + }; + + return dlg; +}; diff --git a/server/www/teleport/static/js/asset/account-group-list.js b/server/www/teleport/static/js/asset/account-group-list.js new file mode 100644 index 0000000..37b201b --- /dev/null +++ b/server/www/teleport/static/js/asset/account-group-list.js @@ -0,0 +1,442 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_groups: $('#btn-refresh-groups'), + btn_create_group: $('#btn-create-group'), + chkbox_groups_select_all: $('#table-groups-select-all'), + + // btn_edit_user: $('#btn-edit-user'), + // btn_lock_user: $('#btn-lock-user'), + // btn_unlock_user: $('#btn-unlock-user'), + btn_remove_group: $('#btn-remove-group'), + + chkbox_user_list_select_all: $('#table-user-list-select-all') + }; + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 用户组列表表格 + //------------------------------- + var table_groups_options = { + dom_id: 'table-groups', + data_source: { + type: 'ajax-post', + url: '/asset/get-account-groups-with-member' + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "账号组", + key: "name", + sort: true, + width: 240, + header_render: 'filter_group_search', + render: 'group_info', + fields: {id: 'id', name: 'name', desc: 'desc'} + }, + { + title: "成员数", + key: "member_count", + width: 20, + align: 'center', + // sort: true, + // header_render: 'filter_role', + render: 'member_count', + fields: {member_count: 'member_count'} + }, + { + title: "成员账号", + key: "members", + // width: 200, + // sort: true, + // header_render: 'filter_role', + render: 'members', + fields: {id: 'id', member_count: 'member_count', members: 'members'} + }, + { + title: '操作', + key: 'actions', + width: 120, + align: 'center', + render: 'make_action_btn', + fields: {id: 'id'} + } + // { + // title: "状态", + // key: "state", + // sort: true, + // width: 120, + // align: 'center', + // header_render: 'filter_user_state', + // render: 'user_state', + // fields: {state: 'state'} + // } + ], + + // 重载回调函数 + on_header_created: $app.on_table_groups_header_created, + on_render_created: $app.on_table_groups_render_created, + on_cell_created: $app.on_table_groups_cell_created + }; + + $app.table_groups = $tp.create_table(table_groups_options); + cb_stack + .add($app.table_groups.load_data) + .add($app.table_groups.init); + + //------------------------------- + // 用户组列表相关过滤器 + //------------------------------- + $app.table_groups_filter_search_user = $tp.create_table_header_filter_search($app.table_groups, { + name: 'search_group', + place_holder: '搜索:账号组名称/描述' + }); + // $app.table_groups_role_filter = $tp.create_table_filter_role($app.table_groups, $app.role_list); + // $app.table_groups_user_state_filter = $tp.create_table_filter_user_state($app.table_groups, $app.user_states); + // 从cookie中读取用户分页限制的选择 + var _per_page = Cookies.get($app.page_id('acc_group_list') + '_per_page'); + $app.table_groups_paging = $tp.create_table_paging($app.table_groups, 'table-groups-paging', + { + per_page: _per_page, + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('acc_group_list') + '_per_page', per_page, {expires: 365}); + } + }); + $app.table_groups_pagination = $tp.create_table_pagination($app.table_groups, 'table-groups-pagination'); + + + //------------------------------- + // 对话框 + //------------------------------- + $app.dlg_edit_group = $app.create_dlg_edit_group(); + cb_stack.add($app.dlg_edit_group.init); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_create_group.click(function () { + // $app.dom.dlg_edit_user.modal(); + $app.dlg_edit_group.show_create(); + }); + $app.dom.btn_refresh_groups.click(function () { + $app.table_groups.load_data(); + }); + $app.dom.chkbox_groups_select_all.click(function () { + var _objects = $('#' + $app.table_groups.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + // $app.dom.btn_edit_user.click($app.on_btn_edit_user_click); + // $app.dom.btn_lock_user.click($app.on_btn_lock_user_click); + // $app.dom.btn_unlock_user.click($app.on_btn_unlock_user_click); + $app.dom.btn_remove_group.click(function(){ + $app.on_btn_remove_group_click(); + }); + + cb_stack.exec(); +}; + +$app.on_table_groups_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_groups_all_selected(); + }); + } else if (col_key === 'actions') { + var _row_id = row_id; + cell_obj.find('[data-btn-edit]').click(function () { + $app.dlg_edit_group.show_edit(_row_id); + }); + cell_obj.find('[data-btn-remove]').click(function () { + console.log(_row_id); + $app.on_btn_remove_group_click(_row_id); + }); + } +}; + +$app.check_groups_all_selected = function () { + var _all_checked = true; + var _objs = $('#' + $app.table_groups.dom_id + ' tbody').find('[data-check-box]'); + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + + if (_all_checked) { + $app.dom.chkbox_groups_select_all.prop('checked', true); + } else { + $app.dom.chkbox_groups_select_all.prop('checked', false); + } +}; + +$app.on_table_groups_render_created = function (render) { + render.filter_group_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search_group'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.group_info = function (row_id, fields) { + return '' + fields.name + '
' + fields.desc + '
' + // +'
编辑
' + ; + }; + + render.members = function (row_id, fields) { + if (_.isUndefined(fields.members)) + return ''; + + console.log(fields.members); + + var ret = []; + for (var i = 0; i < fields.members.length; ++i) { + ret.push('
'); + ret.push(fields.members[i].username+'@'+fields.members[i].host_ip); + ret.push('
'); + } + + if (fields.member_count > 5) { + ret.push('
'); + ret.push('...更多 '); + ret.push('
'); + } + + return ret.join(''); + }; + + render.member_count = function (row_id, fields) { + return '' + fields.member_count; + }; + + render.make_action_btn = function (row_id, fields) { + var ret = []; + ret.push('
'); + ret.push(' 编辑'); + ret.push(' 删除'); + ret.push('
'); + return ret.join(''); + }; +}; + +$app.on_table_groups_header_created = function (header) { + $app.dom.btn_table_groups_reset_filter = $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]'); + $app.dom.btn_table_groups_reset_filter.click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search_group').on_created(); +}; + +$app.get_selected_group = function (tbl) { + var groups = []; + var _objs = $('#' + $app.table_groups.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + groups.push(_row_data); + } + }); + return groups; +}; + +$app.on_btn_remove_group_click = function (_row_id) { + var group_list = []; + + if (_.isUndefined(_row_id)) { + var groups = $app.get_selected_group($app.table_groups); + if (groups.length === 0) { + $tp.notify_error('请选择要删除的用户组!'); + return; + } + + $.each(groups, function (i, g) { + group_list.push(g.id); + }); + } else { + var _row_data = $app.table_groups.get_row(_row_id); + group_list.push(_row_data.id); + } + + var _fn_sure = function (cb_stack, cb_args) { + $tp.ajax_post_json('/group/remove', {gtype: TP_GROUP_ACCOUNT, glist: group_list}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_groups.load_data(); + $tp.notify_success('删除分组操作成功!'); + } else { + $tp.notify_error('删除分组操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,删除分组操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + var _msg_remove = '您确定要移除此分组吗?'; + if(group_list.length > 1) + _msg_remove = '您确定要移除选定的 ' + group_list.length + '个 分组吗?'; + $tp.dlg_confirm(cb_stack, { + msg: '

注意:删除操作不可恢复!!

删除分组将同时删除所有分配给此分组成员的授权!

删除分组不会删除组内的成员账号!

' + _msg_remove + '

', + fn_yes: _fn_sure + }); + +}; + +$app.create_dlg_edit_group = function () { + var dlg = {}; + dlg.dom_id = 'dlg-edit-group'; + dlg.field_id = -1; // 用户id(仅编辑模式) + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + edit_name: $('#edit-group-name'), + edit_desc: $('#edit-group-desc'), + msg: $('#edit-group-message'), + btn_save: $('#btn-edit-group-save') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_save.click(dlg.on_save); + + cb_stack.exec(); + }; + + dlg.init_fields = function (g) { + if (_.isUndefined(g)) { + dlg.field_id = -1; + dlg.dom.dlg_title.html('创建账号分组'); + + dlg.dom.edit_name.val(''); + dlg.dom.edit_desc.val(''); + } else { + dlg.field_id = g.id; + dlg.dom.dlg_title.html('编辑:' + g.name); + + dlg.dom.edit_name.val(g.name); + dlg.dom.edit_desc.val(g.desc); + } + }; + + dlg.show_create = function () { + dlg.init_fields(); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.show_edit = function (row_id) { + var g = $app.table_groups.get_row(row_id); + dlg.init_fields(g); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.show_error = function (error) { + dlg.dom.msg.removeClass().addClass('alert alert-danger').html(error).show(); + }; + dlg.hide_error = function () { + dlg.dom.msg.hide(); + }; + + dlg.check_input = function () { + dlg.field_name = dlg.dom.edit_name.val(); + dlg.field_desc = dlg.dom.edit_desc.val(); + + if (dlg.field_name.length === 0) { + dlg.dom.edit_name.focus(); + dlg.show_error('请指定用户组名称!'); + return false; + } + + return true; + }; + + dlg.on_save = function () { + console.log('---save.'); + dlg.hide_error(); + if (!dlg.check_input()) + return; + + var action = (dlg.field_id === -1) ? '创建' : '更新'; + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/group/update', { + gtype: TP_GROUP_ACCOUNT, + gid: dlg.field_id, + name: dlg.field_name, + desc: dlg.field_desc + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('账号分组' + action + '成功!'); + $app.table_groups.load_data(); + dlg.dom.dialog.modal('hide'); + } else { + $tp.notify_error('账号分组' + action + '失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,账号分组' + action + '失败!'); + } + ); + + }; + + return dlg; +}; diff --git a/server/www/teleport/static/js/asset/host-group-info.js b/server/www/teleport/static/js/asset/host-group-info.js new file mode 100644 index 0000000..9dd744f --- /dev/null +++ b/server/www/teleport/static/js/asset/host-group-info.js @@ -0,0 +1,585 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_members: $('#btn-refresh-members'), + btn_add_members: $('#btn-add-members'), + chkbox_members_select_all: $('#table-members-select-all'), + btn_remove_members: $('#btn-remove-members'), + + chkbox_host_select_all: $('#table-host-select-all') + }; + + if ($app.options.group_id !== 0) { + cb_stack.add($app.create_controls); + } + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 成员列表表格 + //------------------------------- + var table_members_options = { + dom_id: 'table-members', + data_source: { + type: 'ajax-post', + url: '/asset/get-hosts', + restrict: {'group_id': $app.options.group_id} // 限定仅包含指定的成员 + // exclude: {'user_id':[6]} // 排除指定成员 + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "主机", + key: "ip", + sort: true, + // width: 240, + header_render: 'filter_search', + render: 'host_info', + fields: {id: 'id', ip: 'ip', name: 'name', router_ip: 'router_ip', router_port: 'router_port'} + }, + { + title: "系统", + key: "os_type", + width: 36, + align: 'center', + sort: true, + render: 'os_type', + fields: {os_type: 'os_type'} + }, + { + title: "资产编号", + key: "cid", + sort: true, + // width: 80, + // align: 'center', + //render: 'auth_type', + fields: {cid: 'cid'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 90, + align: 'center', + render: 'host_state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_members_header_created, + on_render_created: $app.on_table_members_render_created, + on_cell_created: $app.on_table_members_cell_created + }; + + $app.table_members = $tp.create_table(table_members_options); + cb_stack + .add($app.table_members.load_data) + .add($app.table_members.init); + + //------------------------------- + // 成员列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_members, { + name: 'search', + place_holder: '搜索:主机名称/IP/等等...' + }); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_members, 'table-members-paging', + { + per_page: Cookies.get($app.page_id('host_group_info') + '_member_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('host_group_info') + '_member_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_members, 'table-members-pagination'); + + //------------------------------- + // 选择成员表格 + //------------------------------- + var table_host_options = { + dom_id: 'table-host', + data_source: { + type: 'ajax-post', + url: '/asset/get-hosts', + exclude: {'group_id': $app.options.group_id} // 排除指定成员 + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "主机", + key: "ip", + sort: true, + // width: 240, + header_render: 'filter_search', + render: 'host_info', + fields: {id: 'id', ip: 'ip', name: 'name', router_ip: 'router_ip', router_port: 'router_port'} + }, + { + title: "系统", + key: "os_type", + width: 36, + align: 'center', + sort: true, + render: 'os_type', + fields: {os_type: 'os_type'} + }, + { + title: "资产编号", + key: "cid", + sort: true, + // width: 80, + // align: 'center', + //render: 'auth_type', + fields: {cid: 'cid'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 90, + align: 'center', + render: 'host_state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_acc_header_created, + on_render_created: $app.on_table_acc_render_created, + on_cell_created: $app.on_table_acc_cell_created + }; + + $app.table_host = $tp.create_table(table_host_options); + cb_stack + .add($app.table_host.load_data) + .add($app.table_host.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_host, { + name: 'search', + place_holder: '搜索:主机名称/IP/等等...' + }); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_host, 'table-host-paging', + { + per_page: Cookies.get($app.page_id('host_group_info') + '_sel_member_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('host_group_info') + '_sel_member_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_host, 'table-host-pagination'); + + //------------------------------- + // 对话框 + //------------------------------- + $app.dlg_select_members = $app.create_dlg_select_members(); + cb_stack.add($app.dlg_select_members.init); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_add_members.click(function () { + $app.dlg_select_members.show(); + }); + $app.dom.btn_refresh_members.click(function () { + $app.table_members.load_data(); + }); + $app.dom.chkbox_members_select_all.click(function () { + var _objects = $('#' + $app.table_members.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + + $app.dom.btn_remove_members.click(function () { + $app.on_btn_remove_members_click(); + }); + + $app.dom.chkbox_host_select_all.click(function () { + var _objects = $('#' + $app.table_host.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + + cb_stack.exec(); +}; + +$app.on_table_members_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_members_all_selected(); + }); + } +}; + +$app.check_members_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_members.dom_id + ' tbody').find('[data-check-box]'); + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + + if (_all_checked) { + $app.dom.chkbox_members_select_all.prop('checked', true); + } else { + $app.dom.chkbox_members_select_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app._add_common_render = function (render) { + render.filter_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.host_info = function (row_id, fields) { + var ret = []; + + var name = fields.name; + if (name.length === 0) + name = fields.ip; + var ip = fields.ip; + ret.push('' + name + ' [' + ip + ']'); + if (fields.router_ip.length > 0) + ret.push(' '); + + return ret.join(''); + }; + + render.protocol = function (row_id, fields) { + switch (fields.protocol_type) { + case TP_PROTOCOL_TYPE_RDP: + return ' RDP'; + case TP_PROTOCOL_TYPE_SSH: + return ' SSH'; + case TP_PROTOCOL_TYPE_TELNET: + return ' TELNET'; + default: + return ' 未设置'; + } + }; + + render.auth_type = function (row_id, fields) { + switch (fields.auth_type) { + case TP_AUTH_TYPE_NONE: + return ''; + case TP_AUTH_TYPE_PASSWORD: + return '密码'; + case TP_AUTH_TYPE_PRIVATE_KEY: + return '私钥'; + default: + return '未设置'; + } + }; + + render.host_state = function (row_id, fields) { + var _style, _state; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === fields.state) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + break; + } + } + if (i === $app.obj_states.length) { + _style = 'info'; + _state = ' 未知'; + } + + return '' + _state + '' + }; +}; + +$app.on_table_members_render_created = function (render) { + $app._add_common_render(render); +}; + +$app.on_table_members_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + // header._table_ctrl.get_filter_ctrl('role').on_created(); + // header._table_ctrl.get_filter_ctrl('account_state').on_created(); +}; + +$app.on_table_acc_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_users_all_selected(); + }); + } +}; + +$app.check_users_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_host.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.chkbox_host_select_all.prop('checked', true); + } else { + $app.dom.chkbox_host_select_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_acc_render_created = function (render) { + // render.filter_role = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('role'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + + // render.filter_user_state = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('user_state'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + + // render.filter_search = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + // + // render.make_check_box = function (row_id, fields) { + // return ''; + // }; + + $app._add_common_render(render); +}; + +$app.on_table_acc_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.get_selected_members = function (tbl) { + var members = []; + var _objs = $('#' + $app.table_members.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + members.push(_row_data); + } + }); + + return members; +}; + +$app.on_btn_remove_members_click = function () { + var members = $app.get_selected_members($app.table_members); + if (members.length === 0) { + $tp.notify_error('请选择要移除的成员主机!'); + return; + } + + var member_list = []; + $.each(members, function (i, m) { + member_list.push(m.id); + }); + + var _fn_sure = function (cb_stack, cb_args) { + $tp.ajax_post_json('/group/remove-members', {gtype: TP_GROUP_HOST, gid: $app.options.group_id, members: member_list}, + function (ret) { + if (ret.code === TPE_OK) { + cb_stack + .add($app.check_members_all_selected) + .add($app.table_members.load_data); + $tp.notify_success('移除成员主机操作成功!'); + } else { + $tp.notify_error('移除成员主机操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,移除成员主机操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '
移除组内成员不会删除主机!

您确定要移除所有选定的 ' + member_list.length + '个 成员主机吗?

', + fn_yes: _fn_sure + }); + +}; + +$app.create_dlg_select_members = function () { + var dlg = {}; + dlg.dom_id = 'dlg-select-members'; + dlg.field_id = -1; + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_add: $('#btn-add-to-group') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + cb_stack.exec(); + }; + + dlg.show = function () { + // dlg.init_fields(); + // $app.table_host.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_host.get_row(_obj); + items.push(_row_data.id); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + console.log('items:', items); + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/group/add-members', { + gtype: TP_GROUP_HOST, + gid: $app.options.group_id, + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('主机成员添加成功!'); + $app.table_members.load_data(); + $app.table_host.load_data(); + } else { + $tp.notify_error('主机成员添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,主机成员添加失败!'); + } + ); + + }; + + return dlg; +}; diff --git a/server/www/teleport/static/js/asset/host-group-list.js b/server/www/teleport/static/js/asset/host-group-list.js new file mode 100644 index 0000000..232aa06 --- /dev/null +++ b/server/www/teleport/static/js/asset/host-group-list.js @@ -0,0 +1,543 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_groups: $('#btn-refresh-groups'), + btn_create_group: $('#btn-create-group'), + chkbox_groups_select_all: $('#table-groups-select-all'), + + btn_lock_group: $('#btn-lock-group'), + btn_unlock_group: $('#btn-unlock-group'), + btn_remove_group: $('#btn-remove-group') + }; + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 用户组列表表格 + //------------------------------- + var table_groups_options = { + dom_id: 'table-groups', + data_source: { + type: 'ajax-post', + url: '/asset/get-host-groups-with-member' + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "主机组", + key: "name", + sort: true, + width: 240, + header_render: 'filter_search', + render: 'group_info', + fields: {id: 'id', name: 'name', desc: 'desc'} + }, + { + title: "成员数", + key: "member_count", + width: 20, + align: 'center', + // sort: true, + // header_render: 'filter_role', + //render: 'member_count', + fields: {member_count: 'member_count'} + }, + { + title: "成员主机", + key: "members", + // width: 200, + // sort: true, + // header_render: 'filter_role', + render: 'members', + fields: {id: 'id', member_count: 'member_count', members: 'members'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 90, + align: 'center', + header_render: 'filter_state', + render: 'group_state', + fields: {state: 'state'} + }, + { + title: '操作', + key: 'actions', + width: 120, + align: 'center', + render: 'make_action_btn', + fields: {id: 'id'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_groups_header_created, + on_render_created: $app.on_table_groups_render_created, + on_cell_created: $app.on_table_groups_cell_created + }; + + $app.table_groups = $tp.create_table(table_groups_options); + cb_stack + .add($app.table_groups.load_data) + .add($app.table_groups.init); + + //------------------------------- + // 用户组列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_groups, { + name: 'search', + place_holder: '搜索:账号组名称/描述' + }); + $tp.create_table_header_filter_state($app.table_groups, 'state', $app.obj_states, [TP_STATE_LOCKED]); + $tp.create_table_paging($app.table_groups, 'table-groups-paging', + { + per_page: Cookies.get($app.page_id('acc_group_list') + '_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('acc_group_list') + '_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_groups, 'table-groups-pagination'); + + + //------------------------------- + // 对话框 + //------------------------------- + $app.dlg_edit_group = $app.create_dlg_edit_group(); + cb_stack.add($app.dlg_edit_group.init); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_create_group.click(function () { + // $app.dom.dlg_edit_user.modal(); + $app.dlg_edit_group.show_create(); + }); + $app.dom.btn_refresh_groups.click(function () { + $app.table_groups.load_data(); + }); + $app.dom.chkbox_groups_select_all.click(function () { + var _objects = $('#' + $app.table_groups.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + // $app.dom.btn_edit_user.click($app.on_btn_edit_user_click); + $app.dom.btn_lock_group.click(function () { + $app.on_btn_lock_group_click(); + }); + $app.dom.btn_unlock_group.click(function () { + $app.on_btn_unlock_group_click(); + }); + $app.dom.btn_remove_group.click(function () { + $app.on_btn_remove_group_click(); + }); + + cb_stack.exec(); +}; + +$app.on_table_groups_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_groups_all_selected(); + }); + } else if (col_key === 'actions') { + var _row_id = row_id; + cell_obj.find('[data-btn-edit]').click(function () { + $app.dlg_edit_group.show_edit(_row_id); + }); + cell_obj.find('[data-btn-remove]').click(function () { + console.log(_row_id); + $app.on_btn_remove_group_click(_row_id); + }); + } +}; + +$app.check_groups_all_selected = function () { + var _all_checked = true; + var _objs = $('#' + $app.table_groups.dom_id + ' tbody').find('[data-check-box]'); + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + + if (_all_checked) { + $app.dom.chkbox_groups_select_all.prop('checked', true); + } else { + $app.dom.chkbox_groups_select_all.prop('checked', false); + } +}; + +$app.on_table_groups_render_created = function (render) { + render.filter_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.filter_state = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('state'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.group_state = function (row_id, fields) { + var _style, _state; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === fields.state) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + break; + } + } + if (i === $app.obj_states.length) { + _style = 'info'; + _state = ' 未知'; + } + + return '' + _state + '' + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.group_info = function (row_id, fields) { + return '' + fields.name + '
' + fields.desc + '
' + // +'
编辑
' + ; + }; + + render.members = function (row_id, fields) { + if (_.isUndefined(fields.members)) + return ''; + + console.log('xxx', fields.members); + + var ret = []; + for (var i = 0; i < fields.members.length; ++i) { + ret.push('
'); + if (fields.members[i].name.length === 0) + ret.push(fields.members[i].ip); + else + ret.push(fields.members[i].name + ' [' + fields.members[i].ip + ']'); + ret.push('
'); + } + + if (fields.member_count > 5) { + ret.push('
'); + ret.push('...更多 '); + ret.push('
'); + } + + return ret.join(''); + }; + + render.member_count = function (row_id, fields) { + return '' + fields.member_count; + }; + + render.make_action_btn = function (row_id, fields) { + var ret = []; + ret.push('
'); + ret.push(' 编辑'); + ret.push(' 删除'); + ret.push('
'); + return ret.join(''); + }; +}; + +$app.on_table_groups_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + header._table_ctrl.get_filter_ctrl('state').on_created(); +}; + +$app.get_selected_group = function (tbl) { + var groups = []; + var _objs = $('#' + $app.table_groups.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + groups.push(_row_data); + } + }); + return groups; +}; + +$app.on_btn_lock_group_click = function (_row_id) { + var group_list = []; + + if (_.isUndefined(_row_id)) { + var groups = $app.get_selected_group($app.table_groups); + if (groups.length === 0) { + $tp.notify_error('请选择要禁用的分组!'); + return; + } + + $.each(groups, function (i, g) { + group_list.push(g.id); + }); + } else { + var _row_data = $app.table_groups.get_row(_row_id); + group_list.push(_row_data.id); + } + + $tp.ajax_post_json('/group/lock', {gtype: TP_GROUP_HOST, glist: group_list}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_groups.load_data(); + $tp.notify_success('禁用分组操作成功!'); + } else { + $tp.notify_error('禁用分组操作失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,禁用分组操作失败!'); + } + ); + +}; + +$app.on_btn_unlock_group_click = function (_row_id) { + var group_list = []; + + if (_.isUndefined(_row_id)) { + var groups = $app.get_selected_group($app.table_groups); + if (groups.length === 0) { + $tp.notify_error('请选择要解禁的分组!'); + return; + } + + $.each(groups, function (i, g) { + group_list.push(g.id); + }); + } else { + var _row_data = $app.table_groups.get_row(_row_id); + group_list.push(_row_data.id); + } + + $tp.ajax_post_json('/group/unlock', {gtype: TP_GROUP_HOST, glist: group_list}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_groups.load_data(); + $tp.notify_success('分组解禁操作成功!'); + } else { + $tp.notify_error('分组解禁操作失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,分组解禁操作失败!'); + } + ); + +}; + +$app.on_btn_remove_group_click = function (_row_id) { + var group_list = []; + + if (_.isUndefined(_row_id)) { + var groups = $app.get_selected_group($app.table_groups); + if (groups.length === 0) { + $tp.notify_error('请选择要删除的分组!'); + return; + } + + $.each(groups, function (i, g) { + group_list.push(g.id); + }); + } else { + var _row_data = $app.table_groups.get_row(_row_id); + group_list.push(_row_data.id); + } + + var _fn_sure = function (cb_stack, cb_args) { + $tp.ajax_post_json('/group/remove', {gtype: TP_GROUP_HOST, glist: group_list}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_groups.load_data(); + $tp.notify_success('删除分组操作成功!'); + } else { + $tp.notify_error('删除分组操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,删除分组操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + var _msg_remove = '您确定要移除此分组吗?'; + if (group_list.length > 1) + _msg_remove = '您确定要移除选定的 ' + group_list.length + '个 分组吗?'; + $tp.dlg_confirm(cb_stack, { + msg: '

注意:删除操作不可恢复!!

删除分组将同时删除所有分配给此分组成员的授权!

删除分组不会删除组内的成员账号!

' + _msg_remove + '

', + fn_yes: _fn_sure + }); + +}; + +$app.create_dlg_edit_group = function () { + var dlg = {}; + dlg.dom_id = 'dlg-edit-group'; + dlg.field_id = -1; // 用户id(仅编辑模式) + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + edit_name: $('#edit-group-name'), + edit_desc: $('#edit-group-desc'), + msg: $('#edit-group-message'), + btn_save: $('#btn-edit-group-save') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_save.click(dlg.on_save); + + cb_stack.exec(); + }; + + dlg.init_fields = function (g) { + if (_.isUndefined(g)) { + dlg.field_id = -1; + dlg.dom.dlg_title.html('创建主机分组'); + + dlg.dom.edit_name.val(''); + dlg.dom.edit_desc.val(''); + } else { + dlg.field_id = g.id; + dlg.dom.dlg_title.html('编辑:' + g.name); + + dlg.dom.edit_name.val(g.name); + dlg.dom.edit_desc.val(g.desc); + } + }; + + dlg.show_create = function () { + dlg.init_fields(); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.show_edit = function (row_id) { + var g = $app.table_groups.get_row(row_id); + dlg.init_fields(g); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.show_error = function (error) { + dlg.dom.msg.removeClass().addClass('alert alert-danger').html(error).show(); + }; + dlg.hide_error = function () { + dlg.dom.msg.hide(); + }; + + dlg.check_input = function () { + dlg.field_name = dlg.dom.edit_name.val(); + dlg.field_desc = dlg.dom.edit_desc.val(); + + if (dlg.field_name.length === 0) { + dlg.dom.edit_name.focus(); + dlg.show_error('请指定用户组名称!'); + return false; + } + + return true; + }; + + dlg.on_save = function () { + console.log('---save.'); + dlg.hide_error(); + if (!dlg.check_input()) + return; + + var action = (dlg.field_id === -1) ? '创建' : '更新'; + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/group/update', { + gtype: TP_GROUP_HOST, + gid: dlg.field_id, + name: dlg.field_name, + desc: dlg.field_desc + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('主机分组' + action + '成功!'); + $app.table_groups.load_data(); + dlg.dom.dialog.modal('hide'); + } else { + $tp.notify_error('主机分组' + action + '失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,主机分组' + action + '失败!'); + } + ); + + }; + + return dlg; +}; diff --git a/server/www/teleport/static/js/asset/host-list.js b/server/www/teleport/static/js/asset/host-list.js new file mode 100644 index 0000000..4ada571 --- /dev/null +++ b/server/www/teleport/static/js/asset/host-list.js @@ -0,0 +1,1476 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_host: $('#btn-refresh-host'), + btn_add_user: $('#btn-add-host'), + chkbox_host_select_all: $('#table-host-select-all'), + + btn_lock_host: $('#btn-lock-host'), + btn_unlock_host: $('#btn-unlock-host'), + btn_remove_host: $('#btn-remove-host'), + + dlg_import_asset: $('#dlg-import-asset'), + btn_import_asset: $('#btn-import-asset'), + btn_select_file: $('#btn-select-file'), + btn_do_upload: $('#btn-do-upload-file'), + upload_file_info: $('#upload-file-info'), + upload_file_message: $('#upload-file-message') + }; + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 资产列表表格 + //------------------------------- + var table_host_options = { + dom_id: 'table-host', + data_source: { + type: 'ajax-post', + url: '/asset/get-hosts' + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: '主机', + key: 'ip', + sort: true, + header_render: 'filter_search', + render: 'host_info', + fields: {id: 'id', ip: 'ip', router_ip: 'router_ip', router_port: 'router_port', name: 'name', desc: 'desc'} + }, + { + title: '系统', + key: 'os_type', + align: 'center', + width: 36, + sort: true, + // header_render: 'filter_os', + render: 'os_type', + fields: {os_type: 'os_type'} + }, + { + title: '资产编号', + key: 'cid', + // align: 'center', + // width: 36, + sort: true + // header_render: 'filter_os', + // render: 'sys_type', + // fields: {: 'os'} + }, + { + title: '账号数', + key: 'acc_count', + render: 'account', + fields: {count: 'acc_count'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 90, + align: 'center', + header_render: 'filter_state', + render: 'host_state', + fields: {state: 'state'} + }, + { + title: '', + key: 'action', + sort: false, + align: 'center', + width: 70, + render: 'make_host_action_btn', + fields: {id: 'id', state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_host_header_created, + on_render_created: $app.on_table_host_render_created, + on_cell_created: $app.on_table_host_cell_created + }; + + $app.table_host = $tp.create_table(table_host_options); + cb_stack + .add($app.table_host.load_data) + .add($app.table_host.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_host, { + name: 'search', + place_holder: '搜索:主机IP/名称/描述/资产编号/等等...' + }); + // $app.table_host_role_filter = $tp.create_table_filter_role($app.table_host, $app.role_list); + // 主机没有“临时锁定”状态,因此要排除掉 + $tp.create_table_header_filter_state($app.table_host, 'state', $app.obj_states, [TP_STATE_LOCKED]); + + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_host, 'table-host-paging', + { + per_page: Cookies.get($app.page_id('asset_host') + '_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('asset_host') + '_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_host, 'table-host-pagination'); + + //------------------------------- + // 对话框 + //------------------------------- + $app.dlg_edit_host = $app.create_dlg_edit_host(); + cb_stack.add($app.dlg_edit_host.init); + // $app.dlg_host_info = $app.create_dlg_host_info(); + // cb_stack.add($app.dlg_host_info.init); + $app.dlg_accounts = $app.create_dlg_accounts(); + cb_stack.add($app.dlg_accounts.init); + $app.dlg_edit_account = $app.create_dlg_edit_account(); + cb_stack.add($app.dlg_edit_account.init); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_add_user.click(function () { + // $app.dom.dlg_edit_user.modal(); + $app.dlg_edit_host.show_add(); + }); + $app.dom.btn_refresh_host.click(function () { + $app.table_host.load_data(); + }); + $app.dom.btn_select_file.click($app.on_btn_select_file_click); + $app.dom.btn_do_upload.click($app.on_btn_do_upload_click); + $app.dom.btn_import_asset.click(function () { + $app.dom.upload_file_info.html('- 尚未选择文件 -'); + $app.dom.btn_do_upload.hide(); + $app.dom.upload_file_message.html('').hide(); + $app.dom.dlg_import_asset.modal({backdrop: 'static'}); + }); + $app.dom.chkbox_host_select_all.click(function () { + console.log('----'); + var _objects = $('#' + $app.table_host.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + $app.dom.btn_lock_host.click($app.on_btn_lock_host_click); + $app.dom.btn_unlock_host.click($app.on_btn_unlock_host_click); + $app.dom.btn_remove_host.click($app.on_btn_remove_host_click); + + cb_stack.exec(); +}; + +$app.on_table_host_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_host_all_selected(); + }); + } else if (col_key === 'action') { + // 绑定系统选择框事件 + cell_obj.find('[data-action]').click(function () { + var host = $app.table_host.get_row(row_id); + var action = $(this).attr('data-action'); + if (action === 'edit') { + $app.dlg_edit_host.show_edit(row_id); + } else if (action === 'account') { + $app.dlg_accounts.show(row_id); + } else if (action === 'lock') { + $app._lock_hosts([host.id]); + } else if (action === 'unlock') { + $app._unlock_hosts([host.id]); + } else if (action === 'remove') { + $app._remove_hosts([host.id]); + } else if (action === 'duplicate') { + $app._duplicate_host(host.id); + } + }); + } else if (col_key === 'ip') { + cell_obj.find('[data-toggle="popover"]').popover({trigger: 'hover'}); + // } else if (col_key === 'account') { + // cell_obj.find('[data-action="add-account"]').click(function () { + // $app.dlg_accounts.show(row_id); + // }); + } else if (col_key === 'acc_count') { + cell_obj.find('[data-action="edit-account"]').click(function () { + $app.dlg_accounts.show(row_id); + }); + } +}; + +$app.check_host_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_host.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.chkbox_host_select_all.prop('checked', true); + } else { + $app.dom.chkbox_host_select_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_host_render_created = function (render) { + + render.filter_state = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('state'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.filter_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.host_info = function (row_id, fields) { + var title, sub_title; + + title = fields.name; + sub_title = fields.ip; + + if (title.length === 0) { + title = fields.ip; + } + + var desc = []; + if (fields.desc.length > 0) { + desc.push(fields.desc.replace(/\r/ig, "").replace(/\n/ig, "
")); + } + if (fields.router_ip.length > 0) { + sub_title += ',由 ' + fields.router_ip + ':' + fields.router_port + ' 路由'; + } + + var ret = []; + // ret.push('
' + title + ''); + // if (desc.length > 0) { + // ret.push(''); + // } + + if (desc.length > 0) { + ret.push('
' + title + ''); + } else { + ret.push('
' + title + ''); + } + + ret.push('
' + sub_title + '
'); + return ret.join(''); + }; + + // render.os = function (row_id, fields) { + // return fields.os; + // }; + // + + render.account = function (row_id, fields) { + return ' ' + fields.count; + }; + + render.host_state = function (row_id, fields) { + var _style, _state; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === fields.state) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + break; + } + } + if (i === $app.obj_states.length) { + _style = 'info'; + _state = ' 未知'; + } + + return '' + _state + '' + }; + + render.make_host_action_btn = function (row_id, fields) { + var h = []; + h.push('
'); + h.push(''); + h.push(''); + h.push('
'); + + return h.join(''); + }; +}; + +$app.on_table_host_header_created = function (header) { + $app.dom.btn_table_host_reset_filter = $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]'); + $app.dom.btn_table_host_reset_filter.click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // TODO: 当过滤器不是默认值时,让“重置过滤器按钮”有呼吸效果,避免用户混淆 - 实验性质 + // var t1 = function(){ + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 1.0, function(){ + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 0.2, t1); + // }); + // }; + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 0.2, t1); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + // header._table_ctrl.get_filter_ctrl('role').on_created(); + header._table_ctrl.get_filter_ctrl('state').on_created(); +}; + +$app.on_btn_select_file_click = function () { + + var html = ''; + $('body').after($(html)); + var btn_file_selector = $("#file-selector"); + + btn_file_selector.change(function () { + $app.dom.upload_file_message.hide(); + // var dom_file_name = $('#upload-file-name'); + + var file = null; + if (btn_file_selector[0].files && btn_file_selector[0].files[0]) { + file = btn_file_selector[0].files[0]; + } else if (btn_file_selector[0].files && btn_file_selector[0].files.item(0)) { + file = btn_file_selector[0].files.item(0); + } + + if (file === null) { + $app.dom.upload_file_info.html('请点击图标,选择要上传的文件!'); + return; + } + + var _ext = file.name.substring(file.name.lastIndexOf('.')).toLocaleLowerCase(); + if (_ext !== '.csv') { + $app.dom.upload_file_info.html('抱歉,仅支持导入 csv 格式的文件!'); + return; + } + + if (file.size >= MB * 2) { + $app.dom.upload_file_info.html('文件太大,超过2MB,无法导入!'); + return; + } + + var fileInfo = ''; + fileInfo += file.name; + fileInfo += '
'; + fileInfo += tp_size2str(file.size, 2); + $app.dom.upload_file_info.html(fileInfo); + + $app.dom.btn_do_upload.show(); + }); + + btn_file_selector.click(); + +}; + +$app.on_btn_do_upload_click = function () { + $app.dom.btn_do_upload.hide(); + + $app.dom.upload_file_message + .removeClass('alert-danger alert-info') + .addClass('alert-info') + .html(' 正在导入,请稍候...') + .show(); + + + var param = {}; + $.ajaxFileUpload({ + url: "/asset/upload-import",// 需要链接到服务器地址 + fileElementId: "file-selector", // 文件选择框的id属性 + timeout: 60000, + secureuri: false, + dataType: 'text', + data: param, + success: function (data) { + $('#file-selector').remove(); + + var ret = JSON.parse(data); + + if (ret.code === TPE_OK) { + $app.dom.upload_file_message + .removeClass('alert-info') + .addClass('alert-success') + .html(' 资产导入成功:' + ret.message); + + $app.table_host.load_data(); + } else { + var err_msg = [' 资产导入失败:' + ret.message]; + if (!_.isUndefined(ret.data)) { + err_msg.push('
'); + var err_lines = []; + $.each(ret.data, function (i, item) { + err_lines.push('第' + item.line + '行:' + item.error); + }); + err_msg.push(err_lines.join('
')); + err_msg.push('
'); + + $app.table_host.load_data(); + } + + $app.dom.upload_file_message + .removeClass('alert-info') + .addClass('alert-danger') + .html(err_msg.join('')); + } + }, + error: function () { + $('#file-selector').remove(); + $tp.notify_error('网络故障,批量导入资产失败!'); + } + }); +}; + +$app.show_user_info = function (row_id) { + $app.dlg_user_info.show(row_id); +}; + +$app.get_selected_host = function (tbl) { + var users = []; + var _objs = $('#' + $app.table_host.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + // _all_checked = false; + users.push(_row_data.id); + } + }); + return users; +}; + +$app._lock_hosts = function (host_ids) { + $tp.ajax_post_json('/asset/update-hosts', {action: 'lock', hosts: host_ids}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_host.load_data(); + $tp.notify_success('禁用主机操作成功!'); + } else { + $tp.notify_error('禁用主机操作失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,禁用主机操作失败!'); + } + ); +}; + +$app.on_btn_lock_host_click = function () { + var items = $app.get_selected_host($app.table_host); + if (items.length === 0) { + $tp.notify_error('请选择要禁用的主机!'); + return; + } + + $app._lock_hosts(items); +}; + +$app._unlock_hosts = function (host_ids) { + $tp.ajax_post_json('/asset/update-hosts', {action: 'unlock', hosts: host_ids}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_host.load_data(); + $tp.notify_success('解禁主机操作成功!'); + } else { + $tp.notify_error('解禁主机操作失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,解禁主机操作失败!'); + } + ); +}; + +$app.on_btn_unlock_host_click = function () { + var items = $app.get_selected_host($app.table_host); + if (items.length === 0) { + $tp.notify_error('请选择要解禁的主机!'); + return; + } + + $app._unlock_hosts(items); +}; + +$app._remove_hosts = function (host_ids) { + var _fn_sure = function (cb_stack) { + $tp.ajax_post_json('/asset/update-hosts', {action: 'remove', hosts: host_ids}, + function (ret) { + if (ret.code === TPE_OK) { + cb_stack.add($app.check_host_all_selected); + cb_stack.add($app.table_host.load_data); + $tp.notify_success('删除主机操作成功!'); + } else { + $tp.notify_error('删除主机操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,删除主机操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '

注意:删除操作不可恢复!!

删除主机将同时删除与之相关的账号,并将主机和账号从所在分组中移除,同时删除所有相关授权!

如果您希望临时禁止登录指定主机,可将其“禁用”!

您确定要移除选定的' + host_ids.length + '个主机吗?

', + fn_yes: _fn_sure + }); +}; + +$app.on_btn_remove_host_click = function () { + var items = $app.get_selected_host($app.table_host); + if (items.length === 0) { + $tp.notify_error('请选择要删除的主机!'); + return; + } + + $app._remove_hosts(items); +}; + +$app.create_dlg_edit_host = function () { + var dlg = {}; + dlg.dom_id = 'dlg-edit-host'; + dlg.field_id = -1; // 主机id(仅编辑模式) + // dlg.field_type = -1; + dlg.field_os_type = -1; + dlg.field_ip = ''; + dlg.field_conn_mode = -1; + dlg.field_router_ip = ''; + dlg.field_router_port = 0; + dlg.field_name = ''; + dlg.field_cid = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + hlp_conn_mode: $('#help-host-conn-mode'), + hlp_cid: $('#help-host-cid'), + // select_type: $('#edit-host-type'), + edit_os_type: $('#edit-host-os-type'), + edit_ip: $('#edit-host-ip'), + edit_conn_mode: $('#edit-host-conn-mode'), + block_router_mode: $('#block-router-mode'), + edit_router_ip: $('#edit-host-router-ip'), + edit_router_port: $('#edit-host-router-port'), + edit_name: $('#edit-host-name'), + edit_cid: $('#edit-host-cid'), + edit_desc: $('#edit-host-desc'), + msg: $('#edit-host-message'), + btn_save: $('#btn-edit-host-save'), + }; + + dlg.init = function (cb_stack) { + var html = []; + // // 创建类型选择框 + // html.push(''); + // html.push(''); + // dlg.dom.select_type.after($(html.join(''))); + // dlg.dom.selected_type = $('#' + dlg.dom_id + ' span[data-selected-type]'); + // + // // 绑定类型选择框事件 + // $('#' + dlg.dom_id + ' li a[data-type-selector]').click(function () { + // var select = parseInt($(this).attr('data-type-selector')); + // if (dlg.field_type === select) + // return; + // var name = $app.id2name($app.host_types, select); + // if (_.isUndefined(name)) { + // name = '选择主机类型角色'; + // dlg.field_type = -1; + // } else { + // dlg.field_type = select; + // } + // + // dlg.dom.selected_type.text(name); + // }); + + // 创建系统选择框 + // html.push(''); + // html.push(''); + + // html.push(''); + $.each($app.host_os_type, function (i, t) { + html.push(''); + }); + + dlg.dom.edit_os_type.append(html.join('')); + // dlg.dom.selected_os = $('#' + dlg.dom_id + ' span[data-selected-os]'); + + dlg.dom.edit_conn_mode.change(dlg.on_conn_mode_change); + + dlg.dom.btn_save.click(dlg.on_save); + + dlg.dom.hlp_conn_mode.popover({trigger: 'hover'}); + dlg.dom.hlp_cid.popover({trigger: 'hover'}); + + cb_stack.exec(); + }; + + dlg.init_fields = function (host) { + // var type_name = '选择主机类型'; + // dlg.field_type = -1; + // var os_name = '选择操作系统'; + dlg.field_id = -1; + dlg.field_os_type = -1; + + if (_.isUndefined(host)) { + dlg.dom.dlg_title.html('添加主机'); + + dlg.dom.edit_ip.val(''); + dlg.dom.edit_conn_mode.val('0'); + dlg.dom.edit_router_ip.val(''); + dlg.dom.edit_router_port.val(''); + dlg.dom.edit_name.val(''); + dlg.dom.edit_cid.val(''); + dlg.dom.edit_desc.val(''); + } else { + dlg.field_id = host.id; + dlg.dom.dlg_title.html('编辑主机:'); + + var _name = $app.id2name($app.host_os_type, host.os_type); + if (!_.isUndefined(_name)) { + // os_name = _name; + } + dlg.field_os_type = host.os_type; + + if (host.router_ip.length > 0) { + dlg.dom.edit_router_ip.val(host.router_ip); + dlg.dom.edit_router_port.val(host.router_port); + dlg.dom.edit_conn_mode.val('1'); + } else { + dlg.dom.edit_conn_mode.val('0'); + } + + dlg.dom.edit_ip.val(host.ip); + dlg.dom.edit_name.val(host.name); + dlg.dom.edit_cid.val(host.cid); + dlg.dom.edit_desc.val(host.desc); + } + // dlg.dom.selected_type.text(type_name); + // dlg.dom.selected_os.text(os_name); + dlg.dom.edit_os_type.val('' + dlg.field_os_type); + dlg.on_conn_mode_change(); + }; + + dlg.on_conn_mode_change = function () { + if (dlg.dom.edit_conn_mode.val() === '0') { + dlg.dom.block_router_mode.hide(); + } else { + dlg.dom.block_router_mode.show(); + } + }; + + dlg.show_add = function () { + dlg.init_fields(); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.show_edit = function (row_id) { + var host = $app.table_host.get_row(row_id); + dlg.init_fields(host); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + // dlg.show_error = function (error) { + // dlg.dom.msg.removeClass().addClass('alert alert-danger').html(error).show(); + // }; + // dlg.hide_error = function () { + // dlg.dom.msg.hide(); + // }; + + dlg.check_input = function () { + dlg.field_os_type = parseInt(dlg.dom.edit_os_type.val()); + dlg.field_ip = dlg.dom.edit_ip.val(); + dlg.field_conn_mode = parseInt(dlg.dom.edit_conn_mode.val()); + dlg.field_router_ip = dlg.dom.edit_router_ip.val(); + dlg.field_router_port = parseInt(dlg.dom.edit_router_port.val()); + dlg.field_name = dlg.dom.edit_name.val(); + dlg.field_cid = dlg.dom.edit_cid.val(); + dlg.field_desc = dlg.dom.edit_desc.val(); + + if (_.isNaN(dlg.field_os_type) || dlg.field_os_type === -1) { + $tp.notify_error('请指定远程主机的操作系统!'); + return false; + } + + if (dlg.field_ip.length === 0) { + dlg.dom.edit_ip.focus(); + $tp.notify_error('请指定远程主机IP地址!'); + return false; + } + + if (!tp_check_ip(dlg.field_ip)) { + dlg.dom.edit_ip.focus(); + $tp.notify_error('远程主机IP地址格式有误!'); + return false; + } + + if (dlg.field_conn_mode === 1) { + // 端口映射 + if (dlg.field_router_ip.length === 0) { + dlg.dom.edit_router_ip.focus(); + $tp.notify_error('请指定路由主机IP地址!'); + return false; + } + + if (!tp_check_ip(dlg.field_router_ip)) { + dlg.dom.edit_router_ip.focus(); + $tp.notify_error('路由主机IP地址格式有误!'); + return false; + } + + if (dlg.dom.edit_router_port.val().length === 0) { + dlg.dom.edit_router_port.focus(); + $tp.notify_error('请指定路由主机映射端口!'); + return false; + } + + if (_.isNaN(dlg.field_router_port) || dlg.field_router_port <= 0 || dlg.field_router_port > 65535) { + dlg.dom.edit_router_port.focus(); + $tp.notify_error('路由主机映射端口有误!'); + return false; + } else { + dlg.dom.edit_router_port.val('' + dlg.field_router_port); + } + } + + return true; + }; + + dlg.on_save = function () { + if (!dlg.check_input()) + return; + + var action = (dlg.field_id === -1) ? '添加' : '更新'; + + // var router_addr = ''; + // if (dlg.field_conn_mode === 1) { + // router_addr = dlg.field_router_ip + ':' + dlg.field_router_port; + // } + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/asset/update-host', { + id: dlg.field_id, + // role: dlg.field_role, + os_type: dlg.field_os_type, + ip: dlg.field_ip, + router_ip: dlg.field_router_ip, + router_port: dlg.field_router_port, + name: dlg.field_name, + cid: dlg.field_cid, + desc: dlg.field_desc + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('远程主机' + action + '成功!'); + $app.table_host.load_data(); + dlg.dom.dialog.modal('hide'); + } else { + $tp.notify_error('远程主机' + action + '失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,远程主机' + action + '失败!'); + } + ); + }; + + return dlg; +}; + +$app.create_dlg_host_info = function () { + var dlg = {}; + dlg.dom_id = 'dlg-user-info'; + dlg.row_id = -1; + dlg.need_edit = false; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + info: $('#' + dlg.dom_id + ' [data-field="user-info"]'), + btn_edit: $('#' + dlg.dom_id + ' [data-field="btn-edit"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.dialog.on('hidden.bs.modal', function () { + if (!dlg.need_edit) + return; + $app.dlg_edit_user.show_edit(dlg.row_id); + }); + + dlg.dom.btn_edit.click(function () { + dlg.need_edit = true; + dlg.dom.dialog.modal('hide'); + }); + + cb_stack.exec(); + }; + + dlg.show = function (row_id) { + dlg.row_id = row_id; + dlg.need_edit = false; + + var _row_data = $app.table_host.get_row(dlg.row_id); + + // 表格加载时,是不会读取用户的 desc 字段的,因此可以判断此用户是否已经读取过详细信息了 + if (_.isUndefined(_row_data.desc)) { + // 尚未读取,则向服务器要求获取此用户账号的完整信息 + $tp.ajax_post_json('/user/get-user/' + _row_data.id, {}, + function (ret) { + if (ret.code === TPE_OK) { + $app.table_host.update_row(dlg.row_id, ret.data); + dlg.show_info(ret.data); + } else { + $tp.notify_error('无法获取用户详细信息:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,无法获取用户详细信息!'); + } + ); + } else { + dlg.show_info(_row_data); + } + }; + + dlg.show_info = function (user) { + // 更新对话框中显示的信息 + dlg.dom.dlg_title.html(' ' + user.surname); + + var info = []; + + var not_set = '未设置'; + var mobile = (user.mobile.length === 0) ? not_set : user.mobile; + var qq = (user.qq.length === 0) ? not_set : user.qq; + var wechat = (user.wechat.length === 0) ? not_set : user.wechat; + var desc = (user.desc.length === 0) ? not_set : user.desc; + info.push('账号:' + user.username + ''); + info.push('姓名:' + user.surname + ''); + info.push('邮箱:' + user.email + ''); + info.push('电话:' + mobile + ''); + info.push('QQ:' + qq + ''); + info.push('微信:' + wechat + ''); + info.push('描述:
' + desc + '
'); + + dlg.dom.info.html($(info.join(''))); + + dlg.dom.dialog.modal(); + }; + + return dlg; +}; + +$app.create_dlg_accounts = function () { + var dlg = {}; + dlg.dom_id = 'dlg-accounts'; + dlg.host_row_id = -1; + dlg.host = null; + dlg.accounts = []; + // dlg.row_id = -1; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + // info: $('#' + dlg.dom_id + ' [data-field="user-info"]'), + btn_add: $('#' + dlg.dom_id + ' [data-btn="btn-add-account"]'), + acc_list: $('#' + dlg.dom_id + ' [data-field="account-list"]') + }; + + dlg.init = function (cb_stack) { + // dlg.dom.dialog.on('hidden.bs.modal', function () { + // if (!dlg.show_edit_account) + // return; + // $app.dlg_edit_account.show_edit(dlg.row_id); + // }); + + dlg.dom.btn_add.click(function () { + // dlg.show_edit_account = true; + $app.dlg_edit_account.show_add(dlg.host_row_id); + }); + + cb_stack.exec(); + }; + + dlg.show = function (host_row_id) { + dlg.dom.acc_list.empty().html(' 正在加载...'); + dlg.host_row_id = host_row_id; + dlg.host = $app.table_host.get_row(host_row_id); + dlg.dom.dialog.modal(); + dlg.load_accounts(); + }; + + dlg.load_accounts = function () { + $tp.ajax_post_json('/asset/get-accounts', { + host_id: dlg.host.id + }, + function (ret) { + if (ret.code === TPE_OK) { + console.log('account:', ret.data); + dlg.accounts = ret.data; + } else { + // $tp.notify_error('远程账号' + action + '失败:' + tp_error_msg(ret.code, ret.message)); + console.error('failed.', tp_error_msg(ret.code, ret.message)); + dlg.accounts = []; + } + dlg.show_account_list(); + }, + function () { + $tp.notify_error('网络故障,获取账号信息失败!'); + } + ); + }; + + dlg.show_account_list = function () { + var html = []; + if (dlg.accounts.length === 0) { + dlg.dom.acc_list.empty(); + return; + } + + for (var i = 0; i < dlg.accounts.length; ++i) { + var acc = dlg.accounts[i]; + var pro_name = '未知'; + if (acc.protocol_type === TP_PROTOCOL_TYPE_RDP) { + pro_name = 'RDP'; + } else if (acc.protocol_type === TP_PROTOCOL_TYPE_SSH) { + pro_name = 'SSH'; + } else if (acc.protocol_type === TP_PROTOCOL_TYPE_TELNET) { + pro_name = 'TELNET'; + } + var auth_name = "未知"; + if (acc.auth_type === TP_AUTH_TYPE_NONE) { + auth_name = '无'; + } else if (acc.auth_type === TP_AUTH_TYPE_PASSWORD) { + auth_name = '密码'; + } else if (acc.auth_type === TP_AUTH_TYPE_PRIVATE_KEY) { + auth_name = '私钥'; + } + + html.push('
    '); + html.push('
  • ' + acc.username + '
  • '); + html.push('
  • ' + pro_name + '
  • '); + html.push('
  • ' + auth_name + '
  • '); + html.push('
  • '); + html.push(''); + html.push('
  • '); + + if (acc.state === TP_STATE_NORMAL) { + html.push('
  • '); + html.push(''); + html.push('
  • '); + } else { + html.push('
  • '); + html.push(''); + html.push('
  • '); + } + + html.push('
  • '); + html.push(''); + html.push('
  • '); + html.push('
'); + } + dlg.dom.acc_list.empty().append($(html.join(''))); + + // 绑定账号操作按钮点击事件 + $('#' + dlg.dom_id + ' [data-action="modify-account"]').click(function () { + var acc_id = parseInt($(this).attr('data-id')); + for (var i = 0; i < dlg.accounts.length; ++i) { + if (dlg.accounts[i].id === acc_id) { + $app.dlg_edit_account.show_edit(dlg.host_row_id, dlg.accounts[i]); + return; + } + } + }); + + // 删除账号 + $('#' + dlg.dom_id + ' [data-action="delete-account"]').click(function () { + var acc_id = parseInt($(this).attr('data-id')); + + var _fn_sure = function (cb_stack, cb_args) { + // $tp.ajax_post_json('/asset/remove-account', {host_id: dlg.host.id, acc_id: acc_id}, + $tp.ajax_post_json('/asset/update-account', {action: 'remove', host_id: dlg.host.id, acc_id: acc_id}, + function (ret) { + if (ret.code === TPE_OK) { + // cb_stack.add($app.check_user_list_all_selected); + // cb_stack.add($app.table_user_list.load_data); + $tp.notify_success('删除账号操作成功!'); + + var update_args = { + acc_count: dlg.host.acc_count - 1 + }; + $app.table_host.update_row(dlg.host_row_id, update_args); + + dlg.load_accounts(); + } else { + $tp.notify_error('删除账号操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,删除用户账号操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '

注意:删除操作不可恢复!!

如果您只是想临时禁止以此账号登录远程主机,可以禁用此账号。

您确定要删除此远程账号吗?

', + fn_yes: _fn_sure + }); + + }); + + // 禁用账号 + $('#' + dlg.dom_id + ' [data-action="lock-account"]').click(function () { + var acc_id = parseInt($(this).attr('data-id')); + + $tp.ajax_post_json('/asset/update-account', {action: 'lock', host_id: dlg.host.id, acc_id: acc_id}, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('远程账号已禁用!'); + dlg.load_accounts(); + } else { + $tp.notify_error('禁用远程账号操作失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,禁用远程账号操作失败!'); + } + ); + }); + + // 解禁账号 + $('#' + dlg.dom_id + ' [data-action="unlock-account"]').click(function () { + var acc_id = parseInt($(this).attr('data-id')); + + $tp.ajax_post_json('/asset/update-account', {action: 'unlock', host_id: dlg.host.id, acc_id: acc_id}, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('远程账号解禁成功!'); + dlg.load_accounts(); + } else { + $tp.notify_error('远程账号解禁失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,远程账号解禁失败!'); + } + ); + }); + }; + + return dlg; +}; + +$app.create_dlg_edit_account = function () { + var dlg = {}; + dlg.dom_id = 'dlg-edit-account'; + dlg.host_row_id = -1; + dlg.host = null; + dlg.account = null; + dlg.field_id = -1; // 账户id(仅编辑模式) + dlg.field_protocol = -1; + dlg.field_auth = -1; + dlg.field_username = ''; + dlg.field_password = ''; + dlg.field_pri_key = ''; + dlg.protocol_sub_type = 0; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + protocol_type: $('#account-protocol-type'), + protocol_port: $('#account-protocol-port'), + auth_type: $('#account-auth-type'), + username: $('#account-username'), + password: $('#account-password'), + ssh_prikey: $('#account-ssh-pri'), + block_ssh_param: $('#block-ssh-param'), + block_rdp_param: $('#block-rdp-param'), + block_prompt: $('#block-prompt'), + block_username: $('#block-username'), + block_password: $('#block-password'), + block_sshkey: $('#block-sshkey'), + // btn_allow_ssh: $('#btn-allow-ssh'), + // btn_allow_sftp: $('#btn-allow-sftp'), + // btn_allow_rdp_desktop: $('#btn-allow-rdp-desktop'), + // btn_allow_rdp_clipboard: $('#btn-allow-rdp-clipboard'), + // btn_allow_rdp_driver: $('#btn-allow-rdp-driver-map'), + // btn_allow_rdp_console: $('#btn-allow-rdp-console'), + prompt_username: $('#account-username-prompt'), + prompt_password: $('#account-password-prompt'), + btn_show_password: $('#btn-show-account-password'), + btn_show_password_icon: $('#btn-show-account-password i'), + btn_test: $('#btn-edit-account-test'), + btn_save: $('#btn-edit-account-save') + }; + + dlg.init = function (cb_stack) { + dlg.dom.protocol_type.change(dlg.on_protocol_change); + dlg.dom.auth_type.change(dlg.on_auth_change); + + dlg.dom.btn_save.click(dlg.on_save); + dlg.dom.btn_test.click(dlg.on_test); + + dlg.dom.btn_show_password.click(function () { + if ('password' === dlg.dom.password.attr('type')) { + dlg.dom.password.attr('type', 'text'); + dlg.dom.btn_show_password_icon.removeClass('fa-eye').addClass('fa-eye-slash') + } else { + dlg.dom.password.attr('type', 'password'); + dlg.dom.btn_show_password_icon.removeClass('fa-eye-slash').addClass('fa-eye') + } + }); + + cb_stack.exec(); + }; + + dlg.init_fields = function (account) { + dlg.dom.password.val(''); + dlg.dom.ssh_prikey.val(''); + + if (_.isUndefined(account)) { + dlg.account = null; + dlg.field_id = -1; + dlg.dom.dlg_title.html('添加远程账号'); + + if (dlg.host.os_type === TP_OS_TYPE_LINUX) { + dlg.dom.protocol_type.val(TP_PROTOCOL_TYPE_SSH); + } else if (dlg.host.os_type === TP_OS_TYPE_WINDOWS) { + dlg.dom.protocol_type.val(TP_PROTOCOL_TYPE_RDP); + } else { + + } + + dlg.dom.username.val(''); + + } else { + dlg.account = account; + dlg.field_id = account.id; + dlg.dom.dlg_title.html('编辑:' + account.username); + + dlg.dom.username.val(account.username); + + dlg.dom.protocol_type.val(account.protocol_type); + dlg.dom.protocol_port.val(account.protocol_port); + } + + if (dlg.host.router_ip.length === 0) { + dlg.dom.protocol_port.removeAttr('disabled'); + } else { + dlg.dom.protocol_port.val('端口映射:' + dlg.host.router_ip + ':' + dlg.host.router_port).attr('disabled', 'disabled'); + } + + dlg.on_protocol_change(); + }; + + dlg.on_protocol_change = function () { + dlg.field_protocol = parseInt(dlg.dom.protocol_type.val()); + + var html = []; + if (dlg.field_protocol === TP_PROTOCOL_TYPE_RDP) { + // $('#dlg-edit-host-protocol-port').val('3389'); + dlg.dom.block_rdp_param.show(); + dlg.dom.block_ssh_param.hide(); + dlg.dom.block_prompt.hide(); + // dlg.dom.block_sshkey.hide(); + // dlg.dom.block_password.show(); + // dlg.dom.block_username.show(); + + html.push(''); + + if (dlg.host.router_ip.length === 0) + dlg.dom.protocol_port.val(3389); + + dlg.protocol_sub_type = TP_PROTOCOL_TYPE_RDP_DESKTOP; + } else if (dlg.field_protocol === TP_PROTOCOL_TYPE_SSH) { + // $('#dlg-edit-host-protocol-port').val('22'); + dlg.dom.block_rdp_param.hide(); + dlg.dom.block_ssh_param.show(); + dlg.dom.block_prompt.hide(); + // dlg.dom.block_sshkey.hide(); + // dlg.dom.block_password.show(); + // dlg.dom.block_username.show(); + + html.push(''); + html.push(''); + + if (dlg.host.router_ip.length === 0) + dlg.dom.protocol_port.val(22); + + dlg.protocol_sub_type = TP_PROTOCOL_TYPE_SSH_SHELL; + } else if (dlg.field_protocol === TP_PROTOCOL_TYPE_TELNET) { + dlg.dom.block_rdp_param.hide(); + dlg.dom.block_ssh_param.hide(); + dlg.dom.block_prompt.show(); + + html.push(''); + html.push(''); + + if (dlg.host.router_ip.length === 0) + dlg.dom.protocol_port.val(23); + + dlg.protocol_sub_type = TP_PROTOCOL_TYPE_TELNET_SHELL; + } else { + dlg.dom.protocol_port.val(''); + } + + dlg.dom.auth_type.empty().append($(html.join(''))); + dlg.dom.auth_type.val(dlg.account.auth_type); + dlg.on_auth_change(); + }; + + dlg.on_auth_change = function () { + dlg.field_auth = parseInt(dlg.dom.auth_type.val()); + if (dlg.field_auth === TP_AUTH_TYPE_PASSWORD) { + dlg.dom.block_password.show(); + dlg.dom.block_sshkey.hide(); + } else if (dlg.field_auth === TP_AUTH_TYPE_PRIVATE_KEY) { + dlg.dom.block_password.hide(); + dlg.dom.block_sshkey.show(); + } else if (dlg.field_auth === TP_AUTH_TYPE_NONE) { + dlg.dom.block_password.hide(); + dlg.dom.block_sshkey.hide(); + } + }; + + dlg.show_add = function (host_row_id) { + dlg.host_row_id = host_row_id; + dlg.host = $app.table_host.get_row(host_row_id); + dlg.init_fields(); + dlg.show(); + }; + + dlg.show_edit = function (host_row_id, account) { + dlg.host_row_id = host_row_id; + dlg.host = $app.table_host.get_row(host_row_id); + dlg.init_fields(account); + dlg.show(); + }; + + dlg.show = function () { + if ($(document.body).find('.modal-backdrop').length > 0) + dlg.dom.dialog.modal({backdrop: false}); + else + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.check_input = function () { + dlg.field_protocol = parseInt(dlg.dom.protocol_type.val()); + dlg.field_port = 0; + dlg.field_auth_type = parseInt(dlg.dom.auth_type.val()); + dlg.field_username = dlg.dom.username.val(); + dlg.field_password = dlg.dom.password.val(); + dlg.field_pri_key = dlg.dom.ssh_prikey.val(); + + if (dlg.host.router_ip.length === 0) { + if (dlg.dom.protocol_port.val().length === 0) { + $tp.notify_error('请设定远程访问的端口号!'); + dlg.dom.protocol_port.focus(); + return false; + } + + dlg.field_port = parseInt(dlg.dom.protocol_port.val()); + + if (_.isNaN(dlg.field_port) || dlg.field_port <= 0 || dlg.field_port > 65535) { + dlg.dom.protocol_port.focus(); + $tp.notify_error('端口有误!'); + return false; + } else { + dlg.dom.protocol_port.val('' + dlg.field_port); + } + } + + if (dlg.field_username.length === 0) { + dlg.dom.username.focus(); + $tp.notify_error('请填写登录远程主机的账号名称!'); + return false; + } + + if (dlg.field_auth_type === TP_AUTH_TYPE_PASSWORD) { + if (dlg.field_id === -1 && dlg.field_password.length === 0) { + dlg.dom.password.focus(); + $tp.notify_error('请填写登录远程主机的密码!'); + return false; + } + } else if (dlg.field_auth_type === TP_AUTH_TYPE_PRIVATE_KEY) { + if (dlg.field_id === -1 && dlg.field_pri_key.length === 0) { + dlg.dom.ssh_prikey.focus(); + $tp.notify_error('请填写登录远程主机的SSH私钥!'); + return false; + } + } + + return true; + }; + + dlg.on_save = function () { + if (!dlg.check_input()) + return; + + var action = (dlg.field_id === -1) ? '添加' : '更新'; + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/asset/update-account', { + action: 'update', + host_id: dlg.host.id, + acc_id: dlg.field_id, + param: { + host_ip: dlg.host.ip, + router_ip: dlg.host.router_ip, + router_port: dlg.host.router_port, + protocol: dlg.field_protocol, + port: dlg.field_port, + auth_type: dlg.field_auth_type, + username: dlg.field_username, + password: dlg.field_password, + pri_key: dlg.field_pri_key + } + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('远程账号' + action + '成功!'); + + if (dlg.field_id === -1) { + // 新建账号成功了,更新界面上对应主机的账号数 + var update_args = { + acc_count: dlg.host.acc_count + 1 + }; + $app.table_host.update_row(dlg.host_row_id, update_args); + } + + // 更新上一级对话框中的数据 + $app.dlg_accounts.load_accounts(); + + dlg.dom.dialog.modal('hide'); + } else { + $tp.notify_error('远程账号' + action + '失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,远程账号' + action + '失败!'); + } + ); + }; + + dlg.on_test = function () { + if (!dlg.check_input()) + return; + + $assist.do_teleport( + { + acc_id: dlg.field_id, + host_id: dlg.host.id, + protocol_type: dlg.field_protocol, + protocol_sub_type: dlg.protocol_sub_type, + protocol_port: dlg.field_port, + auth_type: dlg.field_auth_type, + username: dlg.field_username, + password: dlg.field_password, + pri_key: dlg.field_pri_key + }, + function () { + // func_success + //$tp.notify_success('远程连接测试通过!'); + }, + function (code, message) { + if (code === TPE_NO_ASSIST) + $assist.alert_assist_not_found(); + else + $tp.notify_error('远程连接失败:' + tp_error_msg(code, message)); + } + ); + }; + + return dlg; +}; diff --git a/server/www/teleport/static/js/audit/record-list.js b/server/www/teleport/static/js/audit/record-list.js new file mode 100644 index 0000000..fb6b7e9 --- /dev/null +++ b/server/www/teleport/static/js/audit/record-list.js @@ -0,0 +1,427 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_record: $('#btn-refresh-record') + }; + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 资产列表表格 + //------------------------------- + var table_record_options = { + dom_id: 'table-record', + data_source: { + type: 'ajax-post', + url: '/audit/get-records', + //exclude: {'state': [TP_SESS_STAT_RUNNING, TP_SESS_STAT_STARTED]} + }, + column_default: {sort: false, align: 'left'}, + columns: [ + // { + // // title: '', + // title: '', + // key: 'chkbox', + // sort: false, + // width: 36, + // align: 'center', + // render: 'make_check_box', + // fields: {id: 'id'} + // }, + { + title: 'ID', + key: 'id', + sort: true, + sort_asc: false, + fields: {id: 'id'} + }, + { + title: '用户', + key: 'user', + //sort: true, + //header_render: 'filter_search_host', + render: 'user', + fields: {user_username: 'user_username', user_surname: 'user_surname'} + }, + { + title: '来源', + key: 'client_ip', + //sort: true, + //header_render: 'filter_search_host', + //render: 'host_info', + fields: {client_ip: 'client_ip'} + }, + { + title: '远程连接', + key: 'remote', + //sort: true, + //header_render: 'filter_search_host', + render: 'remote', + fields: {acc_username: 'acc_username', host_ip: 'host_ip', conn_ip: 'conn_ip', conn_port: 'conn_port'} + }, + { + title: '远程协议', + key: 'protocol_type', + align: 'center', + width: 80, + // align: 'center', + // width: 36, + //sort: true + // header_render: 'filter_os', + render: 'protocol', + fields: {protocol_type: 'protocol_type', protocol_sub_type: 'protocol_sub_type'} + }, + { + title: '开始时间', + key: 'time_begin', + sort: true, + sort_asc: false, + render: 'time_begin', + fields: {time_begin: 'time_begin'} + }, + { + title: '耗时', + key: 'time_cost', + render: 'time_cost', + fields: {time_begin: 'time_begin', time_end: 'time_end', state: 'state'} + }, + { + title: "状态", + key: "state", + //sort: true, + width: 90, + align: 'center', + //header_render: 'filter_host_state', + render: 'state', + fields: {state: 'state'} + }, + { + title: '', + key: 'action', + //sort: false, + //align: 'center', + width: 160, + render: 'record_action', + fields: {id: 'id', state: 'state', time_end: 'time_end', protocol_sub_type: 'protocol_sub_type'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_host_header_created, + on_render_created: $app.on_table_host_render_created, + on_cell_created: $app.on_table_host_cell_created + }; + + $app.table_record = $tp.create_table(table_record_options); + cb_stack + .add($app.table_record.load_data) + .add($app.table_record.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_record, { + name: 'search', + place_holder: '搜索:主机IP/名称/描述/资产编号/等等...' + }); + // $app.table_record_role_filter = $tp.create_table_filter_role($app.table_record, $app.role_list); + // $tp.create_table_header_filter_state($app.table_record, 'state', $app.obj_states, [TP_STATE_LOCKED]); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_record, 'table-record-paging', + { + per_page: Cookies.get($app.page_id('audit_record') + '_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('audit_record') + '_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_record, 'table-record-pagination'); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_refresh_record.click(function () { + $app.table_record.load_data(); + }); + // $app.dom.chkbox_host_select_all.click(function () { + // var _objects = $('#' + $app.table_record.dom_id + ' tbody').find('[data-check-box]'); + // if ($(this).is(':checked')) { + // $.each(_objects, function (i, _obj) { + // $(_obj).prop('checked', true); + // }); + // } else { + // $.each(_objects, function (i, _obj) { + // $(_obj).prop('checked', false); + // }); + // } + // }); + //$app.dom.btn_remove_record.click($app.on_btn_remove_record_click); + + cb_stack.exec(); +}; + +$app.on_table_host_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_host_all_selected(); + }); + } else if (col_key === 'action') { + // 绑定系统选择框事件 + cell_obj.find('[data-action]').click(function () { + + var row_data = tbl.get_row(row_id); + console.log('---', row_data); + var action = $(this).attr('data-action'); + + if (action === 'replay') { + //$app.dlg_edit_host.show_edit(row_id); + if(row_data.protocol_type === TP_PROTOCOL_TYPE_RDP) { + $tp.notify_error('sorry, not impl.'); + } else if(row_data.protocol_type === TP_PROTOCOL_TYPE_SSH) { + window.open('/audit/replay/' + row_data.protocol_type + '/' + row_data.id); + } + } else if (action === 'cmd') { + //$app.dlg_accounts.show(row_id); + window.open('/audit/command-log/' + row_data.protocol_type + '/' + row_data.id); + } + }); + } else if (col_key === 'ip') { + cell_obj.find('[data-toggle="popover"]').popover({trigger: 'hover'}); + // } else if (col_key === 'account') { + // cell_obj.find('[data-action="add-account"]').click(function () { + // $app.dlg_accounts.show(row_id); + // }); + } else if (col_key === 'account_count') { + cell_obj.find('[data-action="edit-account"]').click(function () { + $app.dlg_accounts.show(row_id); + }); + } +}; + +$app.on_table_host_render_created = function (render) { + render.filter_host_state = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('state'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.filter_search_host = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.user = function (row_id, fields) { + if (_.isNull(fields.user_surname) || fields.user_surname.length === 0 || fields.user_username === fields.user_surname) { + return fields.user_username; + } else { + return fields.user_username + ' (' + fields.user_surname + ')'; + } + }; + + render.remote = function (row_id, fields) { + if (fields.host_ip === fields.conn_ip) + return fields.acc_username + '@' + fields.host_ip + ':' + fields.conn_port; + else + return '
' + fields.acc_username + '@' + fields.host_ip + '
'; + }; + + // fields: {protocol_type: 'protocol_type', protocol_sub_type: 'protocol_sub_type'} + render.protocol = function (row_id, fields) { + switch (fields.protocol_sub_type) { + case 100: + return 'RDP'; + case 200: + return 'SSH'; + case 201: + return 'SFTP'; + case 300: + return 'TELNET'; + default: + return '未知'; + } + }; + + render.time_begin = function (row_id, fields) { + // return tp_format_datetime(tp_utc2local(fields.time_begin), 'MM-dd HH:mm:ss'); + return tp_format_datetime(tp_utc2local(fields.time_begin), 'MM-dd HH:mm:ss'); + }; + + render.time_cost = function (row_id, fields) { + if (fields.state === TP_SESS_STAT_RUNNING || fields.state === TP_SESS_STAT_STARTED) { + var _style = 'info'; + if (fields.state === TP_SESS_STAT_RUNNING) + _style = 'warning'; + else if (fields.state === TP_SESS_STAT_STARTED) + _style = 'primary'; + return ' ' + tp_second2str(tp_local2utc() - fields.time_begin) + ''; + } else { + if (fields.time_end === 0) + return ' 未知'; + else + return tp_second2str(fields.time_end - fields.time_begin); + } + + // if (fields.time_end === 0) { + // var _style = 'info'; + // if (fields.state === TP_SESS_STAT_RUNNING) + // _style = 'warning'; + // else if (fields.state === TP_SESS_STAT_STARTED) + // _style = 'primary'; + // return ' ' + tp_second2str(tp_local2utc() - fields.time_begin) + ''; + // } else { + // return tp_second2str(fields.time_end - fields.time_begin); + // } + }; + + render.state = function (row_id, fields) { + var msg = ''; + switch (fields.state) { + case TP_SESS_STAT_RUNNING: + return '正在连接'; + case TP_SESS_STAT_STARTED: + return '使用中'; + case TP_SESS_STAT_END: + return '已结束'; + case TP_SESS_STAT_ERR_AUTH_DENIED: + msg = '认证失败'; + break; + case TP_SESS_STAT_ERR_CONNECT: + msg = '连接失败'; + break; + case TP_SESS_STAT_ERR_BAD_SSH_KEY: + msg = '私钥错误'; + break; + case TP_SESS_STAT_ERR_START_INTERNAL: + case TP_SESS_STAT_ERR_INTERNAL: + msg = '内部错误'; + break; + case TP_SESS_STAT_ERR_UNSUPPORT_PROTOCOL: + msg = '协议不支持'; + break; + case TP_SESS_STAT_ERR_BAD_PKG: + case TP_SESS_STAT_ERR_START_BAD_PKG: + msg = '数据格式错误'; + break; + case TP_SESS_STAT_ERR_RESET: + case TP_SESS_STAT_ERR_START_RESET: + msg = '核心服务重置'; + break; + case TP_SESS_STAT_ERR_IO: + case TP_SESS_STAT_ERR_START_IO: + msg = '网络通讯故障'; + break; + case TP_SESS_STAT_ERR_SESSION: + msg = '无效会话'; + break; + default: + msg = '未知状态 [' + fields.state + ']'; + } + + return '' + msg + ''; + }; + + render.record_action = function (row_id, fields) { + var ret = []; + + if (fields.state >= TP_SESS_STAT_STARTED || fields.state === TP_SESS_STAT_ERR_RESET) { + //if (fields.time_end === 0) { + if (fields.state === TP_SESS_STAT_STARTED) { + ret.push(' 同步 '); + } else { + ret.push(' 播放 '); + } + if (fields.protocol_sub_type !== TP_PROTOCOL_TYPE_RDP_DESKTOP) { + ret.push(' 日志 '); + } + } + + return ret.join(''); + }; +}; + +$app.on_table_host_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + // header._table_ctrl.get_filter_ctrl('role').on_created(); + // header._table_ctrl.get_filter_ctrl('state').on_created(); +}; + +$app.get_selected_record = function (tbl) { + var records = []; + var _objs = $('#' + $app.table_record.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + records.push(_row_data.id); + } + }); + return records; +}; + +$app.on_btn_remove_record_click = function () { + // var records = $app.get_selected_record($app.table_record); + // if (records.length === 0) { + // $tp.notify_error('请选择要删除的会话记录!'); + // return; + // } + // + // var _fn_sure = function (cb_stack, cb_args) { + // $tp.ajax_post_json('/user/remove-user', {users: users}, + // function (ret) { + // if (ret.code === TPE_OK) { + // cb_stack.add($app.check_host_all_selected); + // cb_stack.add($app.table_record.load_data); + // $tp.notify_success('删除用户账号操作成功!'); + // } else { + // $tp.notify_error('删除用户账号操作失败:' + tp_error_msg(ret.code, ret.message)); + // } + // + // cb_stack.exec(); + // }, + // function () { + // $tp.notify_error('网络故障,删除用户账号操作失败!'); + // cb_stack.exec(); + // } + // ); + // }; + // + // var cb_stack = CALLBACK_STACK.create(); + // $tp.dlg_confirm(cb_stack, { + // msg: '

注意:删除操作不可恢复!!

删除用户账号将同时将其从所在用户组中移除,并且删除所有分配给此用户的授权!

如果您希望禁止某个用户登录本系统,可对其进行“禁用”操作!

您确定要移除所有选定的 ' + user_list.length + '个 用户账号吗?

', + // fn_yes: _fn_sure + // }); +}; diff --git a/server/www/teleport/static/js/audit/replay.js b/server/www/teleport/static/js/audit/replay.js new file mode 100644 index 0000000..635ea02 --- /dev/null +++ b/server/www/teleport/static/js/audit/replay.js @@ -0,0 +1,300 @@ +/** + * Created by mi on 2016/7/27. + * Upgrade for new record-format by Apex on 2017-01-08 + */ + +"use strict"; + +var g_header = null; +var g_data = []; + +var g_data_offset = 0; + +var g_played_pkg_count = 0; + +var g_timer = null; + +var g_playing = false; +var g_need_stop = false; +var g_skip = true; +var g_console_term = null; +var g_current_time; +var g_finish = false; + +var g_record_tick = 50; + + +var speed_table = [ + {speed: 1, name: '正常速度'}, + {speed: 2, name: '快进 x2'}, + {speed: 4, name: '快进 x4'}, + {speed: 8, name: '快进 x8'}, + {speed: 16, name: '快进 x16'} +]; +var speed_offset = 0; + +$app.req_record_data = function (record_id, offset) { + $tp.ajax_post_json('/audit/get-record-data', {id: record_id, offset: offset}, + function (ret) { + if (ret.code === TPE_OK) { + console.log('data', ret.data); + g_data = g_data.concat(ret.data.data_list); + g_data_offset += ret.data.data_size; + + if (g_data.length < g_header.pkg_count) { + $app.req_record_data(record_id, g_data_offset); + } + // else if(g_header.pkg_count < g_data.length) { + // g_header.pkg_count = g_data.length; + // } + } else { + console.log('req_record_info error ', ret.code); + } + }, + function () { + console.log('req_record_info error'); + }, + 30 * 1000 + ); +}; + +$app.on_init = function (cb_stack, cb_args) { + var record_id = $app.options.record_id; + + $app.dom = { + time: $('#play-time'), + btn_play: $('#btn-play'), + btn_speed: $('#btn-speed'), + btn_skip: $('#btn-skip'), + btn_restart: $('#btn-restart'), + btn_big_font: $('#btn-big-font'), + btn_small_font: $('#btn-small-font'), + progress: $('#progress'), + status: $('#play-status'), + xterm_box: $('#xterm-box') + }; + + $app.dom.progress.width($('#toolbar').width()).val(0); + + Terminal.cursorBlink = false; + + $tp.ajax_post_json('/audit/get-record-header', {id: record_id}, + function (ret) { + if (ret.code === TPE_OK) { + g_header = ret.data; + console.log('header', g_header); + + $('#recorder-info').html(tp_format_datetime(g_header.start) + ': ' + g_header.user_name + '@' + g_header.client_ip + ' 访问 ' + g_header.account + '@' + g_header.conn_ip + ':' + g_header.conn_port); + + $app.req_record_data(record_id, 0); + + setTimeout(init, 1000); + } else { + $tp.notify_error('请求录像数据失败'); + console.log('load init info error ', ret.code); + } + }, + function () { + $tp.notify_error('网络通讯失败'); + } + ); + + $app.dom.btn_big_font.click(function () { + var obj = $('.terminal'); + obj.css('font-size', parseInt(obj.css('font-size')) + 2); + }); + $app.dom.btn_small_font.click(function () { + var obj = $('.terminal'); + obj.css('font-size', parseInt(obj.css('font-size')) - 2); + }); + + $app.dom.btn_play.click(function () { + if (g_playing) + pause(); + else + play(); + }); + + $app.dom.btn_skip.click(function () { + var obj = $('#btn-skip i'); + if (g_skip) { + g_skip = false; + obj.removeClass('fa-check-square-o').addClass('fa-square-o'); + } else { + g_skip = true; + obj.removeClass('fa-square-o').addClass('fa-check-square-o'); + } + + console.log('skip:', g_skip); + }); + + $app.dom.btn_restart.click(function () { + restart(); + }); + + speed_offset = 0; + $app.dom.btn_speed.text(speed_table[speed_offset].name); + + $app.dom.btn_speed.click(function () { + var length = speed_table.length; + speed_offset += 1; + if (speed_offset === length) { + speed_offset = 0; + } + $app.dom.btn_speed.text(speed_table[speed_offset].name); + }); + +// $app.dom.progress.change(function () { +// var process = g_dom_progress.val(); +// console.log('change.' + process); +// //var beginTime = parseInt(g_header.time_used * process / 100); +// speed_offset = 0; +// g_dom_btn_speed.text(speed_table[speed_offset].name); +// }); + + function init() { + if (_.isNull(g_console_term)) { + g_console_term = new Terminal({ + cols: g_header.width, + rows: g_header.height + }); + g_console_term.open(document.getElementById('xterm-box'), false); + + // g_console_term.on('resize', function (obj, x, y) { + // var y = window.getComputedStyle($('#xterm-box .terminal .xterm-rows')[0]); + // var w = parseInt(y.width); + // + // // $('#xterm-box .terminal .xterm-viewport').width(w+17); + // + // $app.dom.xterm_box.width(w + 17); + // // $app.dom.progress.width(w).val(g_process); + // }); + + } else { + g_console_term.reset(g_header.width, g_header.height); + // g_console_term.setOption('scrollback', g_header.height); + } + + $app.dom.progress.val(0); + $app.dom.status.text("正在播放"); + $app.dom.btn_play.children().removeClass().addClass('fa fa-pause').text(' 暂停'); + + g_need_stop = false; + g_playing = true; + g_finish = false; + g_current_time = 0; + g_played_pkg_count = 0; + setTimeout(done, g_record_tick); + } + + function done() { + if (g_need_stop) { + g_playing = false; + return; + } + + if (g_data.length <= g_played_pkg_count) { + $app.dom.status.text("正在缓存数据..."); + g_timer = setTimeout(done, g_record_tick); + return; + } + + $app.dom.status.text("正在播放"); + g_current_time += g_record_tick * speed_table[speed_offset].speed; + for (var i = g_played_pkg_count; i < g_data.length; i++) { + var play_data = g_data[i]; + + if (g_skip && play_data.a === 1) { + g_console_term.resize(play_data.w, play_data.h); + // g_console_term.setOption('scrollback', play_data.h); + + g_played_pkg_count++; + continue; + } + + console.log(play_data.t, g_current_time); + if (play_data.t < g_current_time) { + if(play_data.a === 1) { + g_console_term.resize(play_data.w, play_data.h); + // g_console_term.setOption('scrollback', play_data.h); + } else if (play_data.a === 2) { + g_console_term.write(play_data.d); + } + else { + g_console_term.write(tp_base64_decode(play_data.d)); + } + + if ((g_played_pkg_count + 1) === g_header.pkg_count) { + $app.dom.progress.val(100); + $app.dom.status.text('播放完成'); + $app.dom.time.text(parseInt(g_header.time_used / 1000) + '秒'); + g_finish = true; + g_playing = false; + $app.dom.btn_play.children().removeClass().addClass('fa fa-play').text(' 播放'); + + return; + } else { + g_played_pkg_count++; + } + + } else { + break; + } + } + if (g_skip) { + if (play_data.t - g_current_time > 500) { + g_current_time = play_data.t; // - g_record_tick * speed_table[speed_offset].speed; + } + } + + // sync progress bar. + var _progress = parseInt((g_current_time) * 100 / g_header.time_used); + $app.dom.progress.val(_progress); + var temp = parseInt((g_current_time) / 1000); + $app.dom.time.text(temp + '/' + parseInt(g_header.time_used / 1000) + '秒'); + + // if all packages played + if (g_played_pkg_count >= g_header.pkg_count) { + $app.dom.progress.val(100); + $app.dom.status.text('播放完成'); + $app.dom.time.text(parseInt(g_header.time_used / 1000) + '秒'); + g_finish = true; + g_playing = false; + $app.dom.btn_play.children().removeClass().addClass('fa fa-play').text(' 播放'); + } else { + g_timer = setTimeout(done, g_record_tick); + } + } + + function play() { + if (g_playing) { + return; + } + + if (g_finish) { + restart(); + return; + } + + $app.dom.btn_play.children().removeClass().addClass('fa fa-pause').text(' 暂停'); + + g_need_stop = false; + g_playing = true; + g_timer = setTimeout(done, g_record_tick); + } + + function pause() { + $app.dom.btn_play.children().removeClass().addClass('fa fa-play').text(' 播放'); + g_need_stop = true; + g_playing = false; + $app.dom.status.text("已暂停"); + } + + function restart() { + if(!_.isNull(g_timer)) + clearTimeout(g_timer); + init(); + } + + cb_stack.exec(); +}; diff --git a/server/www/teleport/static/js/auth/login.js b/server/www/teleport/static/js/auth/login.js new file mode 100644 index 0000000..037fefc --- /dev/null +++ b/server/www/teleport/static/js/auth/login.js @@ -0,0 +1,237 @@ +"use strict"; + +var BLUR_BG_COUNT = 8; +var SLOGAN = [ + '我感谢那段时光,
因为不曾把我打倒的,
最终让我变得更加强大!', + '宁愿在做事中犯错,
也不要为了不犯错而什么都不做。', + '从出生到死,
只有900个月,
所以虚耗每一分钟,
都是巨大的浪费!', + '没有播种,何来收获;
没有辛劳,何来成功;
没有磨难,何来荣耀;
没有挫折,何来辉煌。', + '宝剑锋从磨砺出,
梅花香自苦寒来。', + '不登高山,不知天之高也;
不临深溪,不知地之厚也。', + '追求进步,
不求完美。' +]; + + +// $app.on_init = function (cb_stack, cb_args) { +$app.on_init = function (cb_stack) { + $app.login_type = LOGIN_TYPE_PASSWORD_CAPTCHA; + $app.dom = { + slogan: $('#msg-slogan'), + btn_login_type_password: $('#login-type-password'), + btn_login_type_oath: $('#login-type-oath'), + area_captcha: $('#login-area-captcha'), + area_oath: $('#login-area-oath'), + captcha_image: $('#captcha-image'), + + input_username: $('#login-area-username [data-field="username"]'), + input_password: $('#login-area-username [data-field="password"]'), + input_captcha: $('#captcha'), + input_oath: $('#oath-code'), + + remember: $('#remember-me'), + btn_login: $('#btn-login'), + + message: $('#message') + }; + + console.log($app.options); + if ($app.options.username.length > 0) { + $app.dom.input_username.val($app.options.username); + } + + $app.dom.captcha_image.attr('src', '/auth/captcha?' + Math.random()); + + window.onresize = $app.on_screen_resize; + $app.init_blur_bg(); + $app.init_slogan(); + + $app.dom.btn_login_type_password.click(function () { + $app.login_type = LOGIN_TYPE_PASSWORD_CAPTCHA; + $app.dom.btn_login_type_oath.removeClass('selected'); + $(this).addClass('selected'); + $app.dom.area_oath.slideUp(100); + $app.dom.area_captcha.slideDown(100); + }); + $app.dom.btn_login_type_oath.click(function () { + $app.login_type = LOGIN_TYPE_PASSWORD_OATH; + $app.dom.btn_login_type_password.removeClass('selected'); + $(this).addClass('selected'); + $app.dom.area_oath.slideDown(100); + $app.dom.area_captcha.slideUp(100); + }); + + + $app.dom.btn_login.click($app.login_account); + + $app.dom.captcha_image.click(function () { + $(this).attr('src', '/auth/captcha?' + Math.random()); + $app.dom.input_captcha.focus().val(''); + }); + $app.dom.input_username.keydown(function (event) { + $('[data-toggle="popover"]').popover('hide'); + if (event.which === 13) { + $app.dom.input_password.focus(); + } + }); + $app.dom.input_password.keydown(function (event) { + $('[data-toggle="popover"]').popover('hide'); + if (event.which === 13) { + if ($app.login_type === LOGIN_TYPE_PASSWORD_CAPTCHA) + $app.dom.input_captcha.focus(); + else if ($app.login_type === LOGIN_TYPE_PASSWORD_OATH) + $app.dom.input_oath.focus(); + } + }); + $app.dom.input_captcha.keydown(function (event) { + $('[data-toggle="popover"]').popover('hide'); + if (event.which === 13) { + $app.login_account(); + } + }); + $app.dom.input_oath.keydown(function (event) { + $('[data-toggle="popover"]').popover('hide'); + if (event.which === 13) { + $app.login_account(); + } + }); + + cb_stack.exec(); +}; + +$app.hide_op_box = function () { + $app.dom.message.hide(); +}; + +$app.show_op_box = function (op_type, op_msg) { + $app.dom.message.html(op_msg); + $app.dom.message.removeClass().addClass('op_box op_' + op_type); + $app.dom.message.show(); +}; + +$app.login_account = function () { + var str_username = $app.dom.input_username.val(); + var str_password = $app.dom.input_password.val(); + var str_captcha = $app.dom.input_captcha.val(); + var str_oath = $app.dom.input_oath.val(); + var is_remember = $app.dom.remember.is(':checked'); + + if (str_username.length === 0) { + $app.show_op_box('error', '缺少账号!'); + $app.dom.input_username.attr('data-content', "请填写您的账号!").popover('show'); + $app.dom.input_username.focus(); + return; + } + + if (str_password.length === 0) { + $app.show_op_box('error', '缺少密码!'); + $app.dom.input_password.attr('data-content', "请填写密码!").popover('show'); + $app.dom.input_password.focus(); + return; + } + + if ($app.login_type === LOGIN_TYPE_PASSWORD_CAPTCHA) { + if (str_captcha.length !== 4) { + $app.show_op_box('error', '验证码错误!'); + setTimeout(function () { + $app.dom.input_captcha.attr('data-content', "验证码为4位数字和字母的组合,请重新填写!").focus().select().popover('show'); + }, 150); + return; + } + } else if ($app.login_type === LOGIN_TYPE_PASSWORD_OATH) { + if (str_oath.length !== 6 || ('' + parseInt(str_oath)) !== str_oath) { + $app.show_op_box('error', '身份验证器动态验证码错误!'); + setTimeout(function () { + $app.dom.input_oath.attr('data-content', "身份验证器动态验证码为6位数字,请重新填写!").focus().select().popover('show'); + }, 150); + return; + } + } + + $app.dom.btn_login.attr('disabled', 'disabled'); + $app.show_op_box('wait', ' 正在进行身份认证,请稍候...'); + + // 先判断一下captcha是否正确,如果不正确,拒绝登录 + if ($app.login_type === LOGIN_TYPE_PASSWORD_CAPTCHA) { + $tp.ajax_post_json('/auth/verify-captcha', {captcha: str_captcha}, + function (ret) { + if (ret.code === TPE_OK) { + // 验证成功 + $app.hide_op_box(); + $app.show_op_box('wait', ' 正在登录TELEPORT,请稍候...'); + $app.do_account_login(str_username, str_password, str_captcha, str_oath, is_remember); + } + else { + $app.hide_op_box(); + $app.show_op_box('error', tp_error_msg(ret.code, ret.message)); + $app.dom.captcha_image.attr('src', '/auth/captcha?' + Math.random()); + $app.dom.input_captcha.focus().select().val(''); + } + + $app.dom.btn_login.removeAttr('disabled'); + }, + function () { + $app.hide_op_box(); + $app.show_op_box('error', '很抱歉,无法连接服务器!请稍后再试一次!'); + $app.dom.btn_login.removeAttr('disabled'); + } + ); + } else { + $app.do_account_login(str_username, str_password, str_captcha, str_oath, is_remember); + } +}; + +$app.do_account_login = function (username, password, captcha, oath, is_remember) { + var args = {type: $app.login_type, username: username, password: password, captcha: captcha, oath: oath, remember: is_remember}; + $tp.ajax_post_json('/auth/do-login', args, + function (ret) { + if (ret.code === TPE_OK) { + window.location.href = $app.options.ref; + } else { + $app.hide_op_box(); + $app.show_op_box('error', '无法登录TELEPORT:' + tp_error_msg(ret.code, ret.message)); + console.log(ret); + } + + $app.dom.btn_login.removeAttr('disabled'); + }, + function () { + $app.hide_op_box(); + $app.show_op_box('error', '很抱歉,无法连接服务器!请稍后再试!'); + $app.dom.btn_login.removeAttr('disabled'); + } + ); +}; + +$app.on_screen_resize = function () { + $('body').backgroundBlur('resize'); +}; + +$app.init_blur_bg = function () { + var img_id = Math.floor(Math.random() * (BLUR_BG_COUNT)); + $('body').backgroundBlur({ + imageURL: '/static/img/login/login-bg-' + img_id + '.png?' + Math.random(), + blurAmount: 10, + duration: 1000, + imageClass: 'bg-blur', + overlayClass: 'bg-blur-overlay' + }); + + setInterval($app._update_blur_bg, 20000); +}; + +$app._update_blur_bg = function () { + var img_id = Math.floor(Math.random() * (BLUR_BG_COUNT)); + $('body').backgroundBlur('/static/img/login/login-bg-' + img_id + '.png?' + Math.random()); +}; + +$app.init_slogan = function () { + $app._update_slogan(); + setInterval($app._update_slogan, 8000); +}; + +$app._update_slogan = function () { + var msg_id = Math.floor(Math.random() * SLOGAN.length); + $app.dom.slogan.fadeOut(1000, function () { + $(this).html(SLOGAN[msg_id]).fadeIn(1000); + }); +}; diff --git a/server/www/teleport/static/js/common/xterm.js.map b/server/www/teleport/static/js/common/xterm.js.map deleted file mode 100644 index 1e47101..0000000 --- a/server/www/teleport/static/js/common/xterm.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"xterm.js","sources":["../src/xterm.js","../src/utils/Mouse.ts","../src/utils/Generic.ts","../src/utils/DomElementObjectPool.ts","../src/utils/CircularList.ts","../src/utils/CharMeasure.ts","../src/utils/Browser.ts","../src/handlers/Clipboard.ts","../src/Viewport.ts","../src/SelectionModel.ts","../src/SelectionManager.ts","../src/Renderer.ts","../src/Parser.ts","../src/Linkifier.ts","../src/InputHandler.ts","../src/EventEmitter.ts","../src/EscapeSequences.ts","../src/CompositionHelper.ts","../src/Charsets.ts","../node_modules/browserify/node_modules/browser-pack/_prelude.js"],"sourcesContent":["/**\n * xterm.js: xterm, in the browser\n * Originally forked from (with the author's permission):\n * Fabrice Bellard's javascript vt100 for jslinux:\n * http://bellard.org/jslinux/\n * Copyright (c) 2011 Fabrice Bellard\n * The original design remains. The terminal itself\n * has been extended to include xterm CSI codes, among\n * other features.\n * @license MIT\n */\n\nimport { CompositionHelper } from './CompositionHelper';\nimport { EventEmitter } from './EventEmitter';\nimport { Viewport } from './Viewport';\nimport { rightClickHandler, moveTextAreaUnderMouseCursor, pasteHandler, copyHandler } from './handlers/Clipboard';\nimport { CircularList } from './utils/CircularList';\nimport { C0 } from './EscapeSequences';\nimport { InputHandler } from './InputHandler';\nimport { Parser } from './Parser';\nimport { Renderer } from './Renderer';\nimport { Linkifier } from './Linkifier';\nimport { SelectionManager } from './SelectionManager';\nimport { CharMeasure } from './utils/CharMeasure';\nimport * as Browser from './utils/Browser';\nimport * as Mouse from './utils/Mouse';\nimport { CHARSETS } from './Charsets';\nimport { getRawByteCoords } from './utils/Mouse';\n\n/**\n * Terminal Emulation References:\n * http://vt100.net/\n * http://invisible-island.net/xterm/ctlseqs/ctlseqs.txt\n * http://invisible-island.net/xterm/ctlseqs/ctlseqs.html\n * http://invisible-island.net/vttest/\n * http://www.inwap.com/pdp10/ansicode.txt\n * http://linux.die.net/man/4/console_codes\n * http://linux.die.net/man/7/urxvt\n */\n\n// Let it work inside Node.js for automated testing purposes.\nvar document = (typeof window != 'undefined') ? window.document : null;\n\n/**\n * The amount of write requests to queue before sending an XOFF signal to the\n * pty process. This number must be small in order for ^C and similar sequences\n * to be responsive.\n */\nvar WRITE_BUFFER_PAUSE_THRESHOLD = 5;\n\n/**\n * The number of writes to perform in a single batch before allowing the\n * renderer to catch up with a 0ms setTimeout.\n */\nvar WRITE_BATCH_SIZE = 300;\n\n/**\n * The time between cursor blinks. This is driven by JS rather than a CSS\n * animation due to a bug in Chromium that causes it to use excessive CPU time.\n * See https://github.com/Microsoft/vscode/issues/22900\n */\nvar CURSOR_BLINK_INTERVAL = 600;\n\n/**\n * Terminal\n */\n\n/**\n * Creates a new `Terminal` object.\n *\n * @param {object} options An object containing a set of options, the available options are:\n * - `cursorBlink` (boolean): Whether the terminal cursor blinks\n * - `cols` (number): The number of columns of the terminal (horizontal size)\n * - `rows` (number): The number of rows of the terminal (vertical size)\n *\n * @public\n * @class Xterm Xterm\n * @alias module:xterm/src/xterm\n */\nfunction Terminal(options) {\n var self = this;\n\n if (!(this instanceof Terminal)) {\n return new Terminal(arguments[0], arguments[1], arguments[2]);\n }\n\n self.browser = Browser;\n self.cancel = Terminal.cancel;\n\n EventEmitter.call(this);\n\n if (typeof options === 'number') {\n options = {\n cols: arguments[0],\n rows: arguments[1],\n handler: arguments[2]\n };\n }\n\n options = options || {};\n\n\n Object.keys(Terminal.defaults).forEach(function(key) {\n if (options[key] == null) {\n options[key] = Terminal.options[key];\n\n if (Terminal[key] !== Terminal.defaults[key]) {\n options[key] = Terminal[key];\n }\n }\n self[key] = options[key];\n });\n\n if (options.colors.length === 8) {\n options.colors = options.colors.concat(Terminal._colors.slice(8));\n } else if (options.colors.length === 16) {\n options.colors = options.colors.concat(Terminal._colors.slice(16));\n } else if (options.colors.length === 10) {\n options.colors = options.colors.slice(0, -2).concat(\n Terminal._colors.slice(8, -2), options.colors.slice(-2));\n } else if (options.colors.length === 18) {\n options.colors = options.colors.concat(\n Terminal._colors.slice(16, -2), options.colors.slice(-2));\n }\n this.colors = options.colors;\n\n this.options = options;\n\n // this.context = options.context || window;\n // this.document = options.document || document;\n this.parent = options.body || options.parent || (\n document ? document.getElementsByTagName('body')[0] : null\n );\n\n this.cols = options.cols || options.geometry[0];\n this.rows = options.rows || options.geometry[1];\n this.geometry = [this.cols, this.rows];\n\n if (options.handler) {\n this.on('data', options.handler);\n }\n\n /**\n * The scroll position of the y cursor, ie. ybase + y = the y position within the entire\n * buffer\n */\n this.ybase = 0;\n\n /**\n * The scroll position of the viewport\n */\n this.ydisp = 0;\n\n /**\n * The cursor's x position after ybase\n */\n this.x = 0;\n\n /**\n * The cursor's y position after ybase\n */\n this.y = 0;\n\n this.cursorState = 0;\n this.cursorHidden = false;\n this.convertEol;\n this.queue = '';\n this.scrollTop = 0;\n this.scrollBottom = this.rows - 1;\n this.customKeyEventHandler = null;\n this.cursorBlinkInterval = null;\n\n // modes\n this.applicationKeypad = false;\n this.applicationCursor = false;\n this.originMode = false;\n this.insertMode = false;\n this.wraparoundMode = true; // defaults: xterm - true, vt100 - false\n this.normal = null;\n\n // charset\n this.charset = null;\n this.gcharset = null;\n this.glevel = 0;\n this.charsets = [null];\n\n // mouse properties\n this.decLocator;\n this.x10Mouse;\n this.vt200Mouse;\n this.vt300Mouse;\n this.normalMouse;\n this.mouseEvents;\n this.sendFocus;\n this.utfMouse;\n this.sgrMouse;\n this.urxvtMouse;\n\n // misc\n this.element;\n this.children;\n this.refreshStart;\n this.refreshEnd;\n this.savedX;\n this.savedY;\n this.savedCols;\n\n // stream\n this.readable = true;\n this.writable = true;\n\n this.defAttr = (0 << 18) | (257 << 9) | (256 << 0);\n this.curAttr = this.defAttr;\n\n this.params = [];\n this.currentParam = 0;\n this.prefix = '';\n this.postfix = '';\n\n this.inputHandler = new InputHandler(this);\n this.parser = new Parser(this.inputHandler, this);\n // Reuse renderer if the Terminal is being recreated via a Terminal.reset call.\n this.renderer = this.renderer || null;\n this.selectionManager = this.selectionManager || null;\n this.linkifier = this.linkifier || new Linkifier();\n\n // user input states\n this.writeBuffer = [];\n this.writeInProgress = false;\n\n /**\n * Whether _xterm.js_ sent XOFF in order to catch up with the pty process.\n * This is a distinct state from writeStopped so that if the user requested\n * XOFF via ^S that it will not automatically resume when the writeBuffer goes\n * below threshold.\n */\n this.xoffSentToCatchUp = false;\n\n /** Whether writing has been stopped as a result of XOFF */\n this.writeStopped = false;\n\n // leftover surrogate high from previous write invocation\n this.surrogate_high = '';\n\n /**\n * An array of all lines in the entire buffer, including the prompt. The lines are array of\n * characters which are 2-length arrays where [0] is an attribute and [1] is the character.\n */\n this.lines = new CircularList(this.scrollback);\n var i = this.rows;\n while (i--) {\n this.lines.push(this.blankLine());\n }\n // Ensure the selection manager has the correct buffer\n if (this.selectionManager) {\n this.selectionManager.setBuffer(this.lines);\n }\n\n this.tabs;\n this.setupStops();\n\n // Store if user went browsing history in scrollback\n this.userScrolling = false;\n}\n\ninherits(Terminal, EventEmitter);\n\n/**\n * back_color_erase feature for xterm.\n */\nTerminal.prototype.eraseAttr = function() {\n // if (this.is('screen')) return this.defAttr;\n return (this.defAttr & ~0x1ff) | (this.curAttr & 0x1ff);\n};\n\n/**\n * Colors\n */\n\n// Colors 0-15\nTerminal.tangoColors = [\n // dark:\n '#2e3436',\n '#cc0000',\n '#4e9a06',\n '#c4a000',\n '#3465a4',\n '#75507b',\n '#06989a',\n '#d3d7cf',\n // bright:\n '#555753',\n '#ef2929',\n '#8ae234',\n '#fce94f',\n '#729fcf',\n '#ad7fa8',\n '#34e2e2',\n '#eeeeec'\n];\n\n// Colors 0-15 + 16-255\n// Much thanks to TooTallNate for writing this.\nTerminal.colors = (function() {\n var colors = Terminal.tangoColors.slice()\n , r = [0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff]\n , i;\n\n // 16-231\n i = 0;\n for (; i < 216; i++) {\n out(r[(i / 36) % 6 | 0], r[(i / 6) % 6 | 0], r[i % 6]);\n }\n\n // 232-255 (grey)\n i = 0;\n for (; i < 24; i++) {\n r = 8 + i * 10;\n out(r, r, r);\n }\n\n function out(r, g, b) {\n colors.push('#' + hex(r) + hex(g) + hex(b));\n }\n\n function hex(c) {\n c = c.toString(16);\n return c.length < 2 ? '0' + c : c;\n }\n\n return colors;\n})();\n\nTerminal._colors = Terminal.colors.slice();\n\nTerminal.vcolors = (function() {\n var out = []\n , colors = Terminal.colors\n , i = 0\n , color;\n\n for (; i < 256; i++) {\n color = parseInt(colors[i].substring(1), 16);\n out.push([\n (color >> 16) & 0xff,\n (color >> 8) & 0xff,\n color & 0xff\n ]);\n }\n\n return out;\n})();\n\n/**\n * Options\n */\n\nTerminal.defaults = {\n colors: Terminal.colors,\n theme: 'default',\n convertEol: false,\n termName: 'xterm',\n geometry: [80, 24],\n cursorBlink: false,\n cursorStyle: 'block',\n visualBell: false,\n popOnBell: false,\n scrollback: 1000,\n screenKeys: false,\n debug: false,\n cancelEvents: false,\n disableStdin: false,\n useFlowControl: false,\n tabStopWidth: 8\n // programFeatures: false,\n // focusKeys: false,\n};\n\nTerminal.options = {};\n\nTerminal.focus = null;\n\neach(keys(Terminal.defaults), function(key) {\n Terminal[key] = Terminal.defaults[key];\n Terminal.options[key] = Terminal.defaults[key];\n});\n\n/**\n * Focus the terminal. Delegates focus handling to the terminal's DOM element.\n */\nTerminal.prototype.focus = function() {\n return this.textarea.focus();\n};\n\n/**\n * Retrieves an option's value from the terminal.\n * @param {string} key The option key.\n */\nTerminal.prototype.getOption = function(key, value) {\n if (!(key in Terminal.defaults)) {\n throw new Error('No option with key \"' + key + '\"');\n }\n\n if (typeof this.options[key] !== 'undefined') {\n return this.options[key];\n }\n\n return this[key];\n};\n\n/**\n * Sets an option on the terminal.\n * @param {string} key The option key.\n * @param {string} value The option value.\n */\nTerminal.prototype.setOption = function(key, value) {\n if (!(key in Terminal.defaults)) {\n throw new Error('No option with key \"' + key + '\"');\n }\n switch (key) {\n case 'scrollback':\n if (value < this.rows) {\n let msg = 'Setting the scrollback value less than the number of rows ';\n\n msg += `(${this.rows}) is not allowed.`;\n\n console.warn(msg);\n return false;\n }\n\n if (this.options[key] !== value) {\n if (this.lines.length > value) {\n const amountToTrim = this.lines.length - value;\n const needsRefresh = (this.ydisp - amountToTrim < 0);\n this.lines.trimStart(amountToTrim);\n this.ybase = Math.max(this.ybase - amountToTrim, 0);\n this.ydisp = Math.max(this.ydisp - amountToTrim, 0);\n if (needsRefresh) {\n this.refresh(0, this.rows - 1);\n }\n }\n this.lines.maxLength = value;\n this.viewport.syncScrollArea();\n }\n break;\n }\n this[key] = value;\n this.options[key] = value;\n switch (key) {\n case 'cursorBlink': this.setCursorBlinking(value); break;\n case 'cursorStyle':\n // Style 'block' applies with no class\n this.element.classList.toggle(`xterm-cursor-style-underline`, value === 'underline');\n this.element.classList.toggle(`xterm-cursor-style-bar`, value === 'bar');\n break;\n case 'tabStopWidth': this.setupStops(); break;\n }\n};\n\nTerminal.prototype.restartCursorBlinking = function () {\n this.setCursorBlinking(this.options.cursorBlink);\n};\n\nTerminal.prototype.setCursorBlinking = function (enabled) {\n this.element.classList.toggle('xterm-cursor-blink', enabled);\n this.clearCursorBlinkingInterval();\n if (enabled) {\n var self = this;\n this.cursorBlinkInterval = setInterval(function () {\n self.element.classList.toggle('xterm-cursor-blink-on');\n }, CURSOR_BLINK_INTERVAL);\n }\n};\n\nTerminal.prototype.clearCursorBlinkingInterval = function () {\n this.element.classList.remove('xterm-cursor-blink-on');\n if (this.cursorBlinkInterval) {\n clearInterval(this.cursorBlinkInterval);\n this.cursorBlinkInterval = null;\n }\n};\n\n/**\n * Binds the desired focus behavior on a given terminal object.\n *\n * @static\n */\nTerminal.bindFocus = function (term) {\n on(term.textarea, 'focus', function (ev) {\n if (term.sendFocus) {\n term.send(C0.ESC + '[I');\n }\n term.element.classList.add('focus');\n term.showCursor();\n term.restartCursorBlinking.apply(term);\n Terminal.focus = term;\n term.emit('focus', {terminal: term});\n });\n};\n\n/**\n * Blur the terminal. Delegates blur handling to the terminal's DOM element.\n */\nTerminal.prototype.blur = function() {\n return this.textarea.blur();\n};\n\n/**\n * Binds the desired blur behavior on a given terminal object.\n *\n * @static\n */\nTerminal.bindBlur = function (term) {\n on(term.textarea, 'blur', function (ev) {\n term.refresh(term.y, term.y);\n if (term.sendFocus) {\n term.send(C0.ESC + '[O');\n }\n term.element.classList.remove('focus');\n term.clearCursorBlinkingInterval.apply(term);\n Terminal.focus = null;\n term.emit('blur', {terminal: term});\n });\n};\n\n/**\n * Initialize default behavior\n */\nTerminal.prototype.initGlobal = function() {\n var term = this;\n\n Terminal.bindKeys(this);\n Terminal.bindFocus(this);\n Terminal.bindBlur(this);\n\n // Bind clipboard functionality\n on(this.element, 'copy', event => {\n // If mouse events are active it means the selection manager is disabled and\n // copy should be handled by the host program.\n if (this.mouseEvents) {\n return;\n }\n copyHandler(event, term, this.selectionManager);\n });\n const pasteHandlerWrapper = event => pasteHandler(event, term);\n on(this.textarea, 'paste', pasteHandlerWrapper);\n on(this.element, 'paste', pasteHandlerWrapper);\n\n // Handle right click context menus\n if (term.browser.isFirefox) {\n // Firefox doesn't appear to fire the contextmenu event on right click\n on(this.element, 'mousedown', event => {\n if (event.button == 2) {\n rightClickHandler(event, this.textarea, this.selectionManager);\n }\n });\n } else {\n on(this.element, 'contextmenu', event => {\n rightClickHandler(event, this.textarea, this.selectionManager);\n });\n }\n\n // Move the textarea under the cursor when middle clicking on Linux to ensure\n // middle click to paste selection works. This only appears to work in Chrome\n // at the time is writing.\n if (term.browser.isLinux) {\n // Use auxclick event over mousedown the latter doesn't seem to work. Note\n // that the regular click event doesn't fire for the middle mouse button.\n on(this.element, 'auxclick', event => {\n if (event.button === 1) {\n moveTextAreaUnderMouseCursor(event, this.textarea, this.selectionManager);\n }\n });\n }\n};\n\n/**\n * Apply key handling to the terminal\n */\nTerminal.bindKeys = function(term) {\n on(term.element, 'keydown', function(ev) {\n if (document.activeElement != this) {\n return;\n }\n term.keyDown(ev);\n }, true);\n\n on(term.element, 'keypress', function(ev) {\n if (document.activeElement != this) {\n return;\n }\n term.keyPress(ev);\n }, true);\n\n on(term.element, 'keyup', function(ev) {\n if (!wasMondifierKeyOnlyEvent(ev)) {\n term.focus(term);\n }\n }, true);\n\n on(term.textarea, 'keydown', function(ev) {\n term.keyDown(ev);\n }, true);\n\n on(term.textarea, 'keypress', function(ev) {\n term.keyPress(ev);\n // Truncate the textarea's value, since it is not needed\n this.value = '';\n }, true);\n\n on(term.textarea, 'compositionstart', term.compositionHelper.compositionstart.bind(term.compositionHelper));\n on(term.textarea, 'compositionupdate', term.compositionHelper.compositionupdate.bind(term.compositionHelper));\n on(term.textarea, 'compositionend', term.compositionHelper.compositionend.bind(term.compositionHelper));\n term.on('refresh', term.compositionHelper.updateCompositionElements.bind(term.compositionHelper));\n term.on('refresh', function (data) {\n term.queueLinkification(data.start, data.end)\n });\n};\n\n\n/**\n * Insert the given row to the terminal or produce a new one\n * if no row argument is passed. Return the inserted row.\n * @param {HTMLElement} row (optional) The row to append to the terminal.\n */\nTerminal.prototype.insertRow = function (row) {\n if (typeof row != 'object') {\n row = document.createElement('div');\n }\n\n this.rowContainer.appendChild(row);\n this.children.push(row);\n\n return row;\n};\n\n/**\n * Opens the terminal within an element.\n *\n * @param {HTMLElement} parent The element to create the terminal within.\n * @param {boolean} focus Focus the terminal, after it gets instantiated in the DOM\n */\nTerminal.prototype.open = function(parent, focus) {\n var self=this, i=0, div;\n\n this.parent = parent || this.parent;\n\n if (!this.parent) {\n throw new Error('Terminal requires a parent element.');\n }\n\n // Grab global elements\n this.context = this.parent.ownerDocument.defaultView;\n this.document = this.parent.ownerDocument;\n this.body = this.document.getElementsByTagName('body')[0];\n\n //Create main element container\n this.element = this.document.createElement('div');\n this.element.classList.add('terminal');\n this.element.classList.add('xterm');\n this.element.classList.add('xterm-theme-' + this.theme);\n this.setCursorBlinking(this.options.cursorBlink);\n\n this.element.setAttribute('tabindex', 0);\n\n this.viewportElement = document.createElement('div');\n this.viewportElement.classList.add('xterm-viewport');\n this.element.appendChild(this.viewportElement);\n this.viewportScrollArea = document.createElement('div');\n this.viewportScrollArea.classList.add('xterm-scroll-area');\n this.viewportElement.appendChild(this.viewportScrollArea);\n\n // Create the selection container.\n this.selectionContainer = document.createElement('div');\n this.selectionContainer.classList.add('xterm-selection');\n this.element.appendChild(this.selectionContainer);\n\n // Create the container that will hold the lines of the terminal and then\n // produce the lines the lines.\n this.rowContainer = document.createElement('div');\n this.rowContainer.classList.add('xterm-rows');\n this.element.appendChild(this.rowContainer);\n this.children = [];\n this.linkifier.attachToDom(document, this.children);\n\n // Create the container that will hold helpers like the textarea for\n // capturing DOM Events. Then produce the helpers.\n this.helperContainer = document.createElement('div');\n this.helperContainer.classList.add('xterm-helpers');\n // TODO: This should probably be inserted once it's filled to prevent an additional layout\n this.element.appendChild(this.helperContainer);\n this.textarea = document.createElement('textarea');\n this.textarea.classList.add('xterm-helper-textarea');\n this.textarea.setAttribute('autocorrect', 'off');\n this.textarea.setAttribute('autocapitalize', 'off');\n this.textarea.setAttribute('spellcheck', 'false');\n this.textarea.tabIndex = 0;\n this.textarea.addEventListener('focus', function() {\n self.emit('focus', {terminal: self});\n });\n this.textarea.addEventListener('blur', function() {\n self.emit('blur', {terminal: self});\n });\n this.helperContainer.appendChild(this.textarea);\n\n this.compositionView = document.createElement('div');\n this.compositionView.classList.add('composition-view');\n this.compositionHelper = new CompositionHelper(this.textarea, this.compositionView, this);\n this.helperContainer.appendChild(this.compositionView);\n\n this.charSizeStyleElement = document.createElement('style');\n this.helperContainer.appendChild(this.charSizeStyleElement);\n\n for (; i < this.rows; i++) {\n this.insertRow();\n }\n this.parent.appendChild(this.element);\n\n this.charMeasure = new CharMeasure(document, this.helperContainer);\n this.charMeasure.on('charsizechanged', function () {\n self.updateCharSizeStyles();\n });\n this.charMeasure.measure();\n\n this.viewport = new Viewport(this, this.viewportElement, this.viewportScrollArea, this.charMeasure);\n this.renderer = new Renderer(this);\n this.selectionManager = new SelectionManager(this, this.lines, this.rowContainer, this.charMeasure);\n this.selectionManager.on('refresh', data => {\n this.renderer.refreshSelection(data.start, data.end);\n });\n this.selectionManager.on('newselection', text => {\n // If there's a new selection, put it into the textarea, focus and select it\n // in order to register it as a selection on the OS. This event is fired\n // only on Linux to enable middle click to paste selection.\n this.textarea.value = text;\n this.textarea.focus();\n this.textarea.select();\n });\n this.on('scroll', () => this.selectionManager.refresh());\n this.viewportElement.addEventListener('scroll', () => this.selectionManager.refresh());\n\n // Setup loop that draws to screen\n this.refresh(0, this.rows - 1);\n\n // Initialize global actions that\n // need to be taken on the document.\n this.initGlobal();\n\n /**\n * Automatic focus functionality.\n * TODO: Default to `false` starting with xterm.js 3.0.\n */\n if (typeof focus == 'undefined') {\n let message = 'You did not pass the `focus` argument in `Terminal.prototype.open()`.\\n';\n\n message += 'The `focus` argument now defaults to `true` but starting with xterm.js 3.0 ';\n message += 'it will default to `false`.';\n\n console.warn(message);\n focus = true;\n }\n\n if (focus) {\n this.focus();\n }\n\n on(this.element, 'click', function() {\n var selection = document.getSelection(),\n collapsed = selection.isCollapsed,\n isRange = typeof collapsed == 'boolean' ? !collapsed : selection.type == 'Range';\n if (!isRange) {\n self.focus();\n }\n });\n\n // Listen for mouse events and translate\n // them into terminal mouse protocols.\n this.bindMouse();\n\n /**\n * This event is emitted when terminal has completed opening.\n *\n * @event open\n */\n this.emit('open');\n};\n\n\n/**\n * Attempts to load an add-on using CommonJS or RequireJS (whichever is available).\n * @param {string} addon The name of the addon to load\n * @static\n */\nTerminal.loadAddon = function(addon, callback) {\n if (typeof exports === 'object' && typeof module === 'object') {\n // CommonJS\n return require('./addons/' + addon + '/' + addon);\n } else if (typeof define == 'function') {\n // RequireJS\n return require(['./addons/' + addon + '/' + addon], callback);\n } else {\n console.error('Cannot load a module without a CommonJS or RequireJS environment.');\n return false;\n }\n};\n\n/**\n * Updates the helper CSS class with any changes necessary after the terminal's\n * character width has been changed.\n */\nTerminal.prototype.updateCharSizeStyles = function() {\n this.charSizeStyleElement.textContent =\n `.xterm-wide-char{width:${this.charMeasure.width * 2}px;}` +\n `.xterm-normal-char{width:${this.charMeasure.width}px;}` +\n `.xterm-rows > div{height:${this.charMeasure.height}px;}`;\n}\n\n/**\n * XTerm mouse events\n * http://invisible-island.net/xterm/ctlseqs/ctlseqs.html#Mouse%20Tracking\n * To better understand these\n * the xterm code is very helpful:\n * Relevant files:\n * button.c, charproc.c, misc.c\n * Relevant functions in xterm/button.c:\n * BtnCode, EmitButtonCode, EditorButton, SendMousePosition\n */\nTerminal.prototype.bindMouse = function() {\n var el = this.element, self = this, pressed = 32;\n\n // mouseup, mousedown, wheel\n // left click: ^[[M 3<^[[M#3<\n // wheel up: ^[[M`3>\n function sendButton(ev) {\n var button\n , pos;\n\n // get the xterm-style button\n button = getButton(ev);\n\n // get mouse coordinates\n pos = getRawByteCoords(ev, self.rowContainer, self.charMeasure, self.cols, self.rows);\n if (!pos) return;\n\n sendEvent(button, pos);\n\n switch (ev.overrideType || ev.type) {\n case 'mousedown':\n pressed = button;\n break;\n case 'mouseup':\n // keep it at the left\n // button, just in case.\n pressed = 32;\n break;\n case 'wheel':\n // nothing. don't\n // interfere with\n // `pressed`.\n break;\n }\n }\n\n // motion example of a left click:\n // ^[[M 3<^[[M@4<^[[M@5<^[[M@6<^[[M@7<^[[M#7<\n function sendMove(ev) {\n var button = pressed\n , pos;\n\n pos = getRawByteCoords(ev, self.rowContainer, self.charMeasure, self.cols, self.rows);\n if (!pos) return;\n\n // buttons marked as motions\n // are incremented by 32\n button += 32;\n\n sendEvent(button, pos);\n }\n\n // encode button and\n // position to characters\n function encode(data, ch) {\n if (!self.utfMouse) {\n if (ch === 255) return data.push(0);\n if (ch > 127) ch = 127;\n data.push(ch);\n } else {\n if (ch === 2047) return data.push(0);\n if (ch < 127) {\n data.push(ch);\n } else {\n if (ch > 2047) ch = 2047;\n data.push(0xC0 | (ch >> 6));\n data.push(0x80 | (ch & 0x3F));\n }\n }\n }\n\n // send a mouse event:\n // regular/utf8: ^[[M Cb Cx Cy\n // urxvt: ^[[ Cb ; Cx ; Cy M\n // sgr: ^[[ Cb ; Cx ; Cy M/m\n // vt300: ^[[ 24(1/3/5)~ [ Cx , Cy ] \\r\n // locator: CSI P e ; P b ; P r ; P c ; P p & w\n function sendEvent(button, pos) {\n // self.emit('mouse', {\n // x: pos.x - 32,\n // y: pos.x - 32,\n // button: button\n // });\n\n if (self.vt300Mouse) {\n // NOTE: Unstable.\n // http://www.vt100.net/docs/vt3xx-gp/chapter15.html\n button &= 3;\n pos.x -= 32;\n pos.y -= 32;\n var data = C0.ESC + '[24';\n if (button === 0) data += '1';\n else if (button === 1) data += '3';\n else if (button === 2) data += '5';\n else if (button === 3) return;\n else data += '0';\n data += '~[' + pos.x + ',' + pos.y + ']\\r';\n self.send(data);\n return;\n }\n\n if (self.decLocator) {\n // NOTE: Unstable.\n button &= 3;\n pos.x -= 32;\n pos.y -= 32;\n if (button === 0) button = 2;\n else if (button === 1) button = 4;\n else if (button === 2) button = 6;\n else if (button === 3) button = 3;\n self.send(C0.ESC + '['\n + button\n + ';'\n + (button === 3 ? 4 : 0)\n + ';'\n + pos.y\n + ';'\n + pos.x\n + ';'\n + (pos.page || 0)\n + '&w');\n return;\n }\n\n if (self.urxvtMouse) {\n pos.x -= 32;\n pos.y -= 32;\n pos.x++;\n pos.y++;\n self.send(C0.ESC + '[' + button + ';' + pos.x + ';' + pos.y + 'M');\n return;\n }\n\n if (self.sgrMouse) {\n pos.x -= 32;\n pos.y -= 32;\n self.send(C0.ESC + '[<'\n + (((button & 3) === 3 ? button & ~3 : button) - 32)\n + ';'\n + pos.x\n + ';'\n + pos.y\n + ((button & 3) === 3 ? 'm' : 'M'));\n return;\n }\n\n var data = [];\n\n encode(data, button);\n encode(data, pos.x);\n encode(data, pos.y);\n\n self.send(C0.ESC + '[M' + String.fromCharCode.apply(String, data));\n }\n\n function getButton(ev) {\n var button\n , shift\n , meta\n , ctrl\n , mod;\n\n // two low bits:\n // 0 = left\n // 1 = middle\n // 2 = right\n // 3 = release\n // wheel up/down:\n // 1, and 2 - with 64 added\n switch (ev.overrideType || ev.type) {\n case 'mousedown':\n button = ev.button != null\n ? +ev.button\n : ev.which != null\n ? ev.which - 1\n : null;\n\n if (self.browser.isMSIE) {\n button = button === 1 ? 0 : button === 4 ? 1 : button;\n }\n break;\n case 'mouseup':\n button = 3;\n break;\n case 'DOMMouseScroll':\n button = ev.detail < 0\n ? 64\n : 65;\n break;\n case 'wheel':\n button = ev.wheelDeltaY > 0\n ? 64\n : 65;\n break;\n }\n\n // next three bits are the modifiers:\n // 4 = shift, 8 = meta, 16 = control\n shift = ev.shiftKey ? 4 : 0;\n meta = ev.metaKey ? 8 : 0;\n ctrl = ev.ctrlKey ? 16 : 0;\n mod = shift | meta | ctrl;\n\n // no mods\n if (self.vt200Mouse) {\n // ctrl only\n mod &= ctrl;\n } else if (!self.normalMouse) {\n mod = 0;\n }\n\n // increment to SP\n button = (32 + (mod << 2)) + button;\n\n return button;\n }\n\n on(el, 'mousedown', function(ev) {\n if (!self.mouseEvents) return;\n\n // send the button\n sendButton(ev);\n\n // ensure focus\n self.focus();\n\n // fix for odd bug\n //if (self.vt200Mouse && !self.normalMouse) {\n if (self.vt200Mouse) {\n ev.overrideType = 'mouseup';\n sendButton(ev);\n return self.cancel(ev);\n }\n\n // bind events\n if (self.normalMouse) on(self.document, 'mousemove', sendMove);\n\n // x10 compatibility mode can't send button releases\n if (!self.x10Mouse) {\n on(self.document, 'mouseup', function up(ev) {\n sendButton(ev);\n if (self.normalMouse) off(self.document, 'mousemove', sendMove);\n off(self.document, 'mouseup', up);\n return self.cancel(ev);\n });\n }\n\n return self.cancel(ev);\n });\n\n //if (self.normalMouse) {\n // on(self.document, 'mousemove', sendMove);\n //}\n\n on(el, 'wheel', function(ev) {\n if (!self.mouseEvents) return;\n if (self.x10Mouse\n || self.vt300Mouse\n || self.decLocator) return;\n sendButton(ev);\n return self.cancel(ev);\n });\n\n // allow wheel scrolling in\n // the shell for example\n on(el, 'wheel', function(ev) {\n if (self.mouseEvents) return;\n self.viewport.onWheel(ev);\n return self.cancel(ev);\n });\n\n on(el, 'touchstart', function(ev) {\n if (self.mouseEvents) return;\n self.viewport.onTouchStart(ev);\n return self.cancel(ev);\n });\n\n on(el, 'touchmove', function(ev) {\n if (self.mouseEvents) return;\n self.viewport.onTouchMove(ev);\n return self.cancel(ev);\n });\n};\n\n/**\n * Destroys the terminal.\n */\nTerminal.prototype.destroy = function() {\n this.readable = false;\n this.writable = false;\n this._events = {};\n this.handler = function() {};\n this.write = function() {};\n if (this.element && this.element.parentNode) {\n this.element.parentNode.removeChild(this.element);\n }\n //this.emit('close');\n};\n\n/**\n * Tells the renderer to refresh terminal content between two rows (inclusive) at the next\n * opportunity.\n * @param {number} start The row to start from (between 0 and this.rows - 1).\n * @param {number} end The row to end at (between start and this.rows - 1).\n */\nTerminal.prototype.refresh = function(start, end) {\n if (this.renderer) {\n this.renderer.queueRefresh(start, end);\n }\n};\n\n/**\n * Queues linkification for the specified rows.\n * @param {number} start The row to start from (between 0 and this.rows - 1).\n * @param {number} end The row to end at (between start and this.rows - 1).\n */\nTerminal.prototype.queueLinkification = function(start, end) {\n if (this.linkifier) {\n for (let i = start; i <= end; i++) {\n this.linkifier.linkifyRow(i);\n }\n }\n};\n\n/**\n * Display the cursor element\n */\nTerminal.prototype.showCursor = function() {\n if (!this.cursorState) {\n this.cursorState = 1;\n this.refresh(this.y, this.y);\n }\n};\n\n/**\n * Scroll the terminal down 1 row, creating a blank line.\n * @param {boolean} isWrapped Whether the new line is wrapped from the previous\n * line.\n */\nTerminal.prototype.scroll = function(isWrapped) {\n var row;\n\n // Make room for the new row in lines\n if (this.lines.length === this.lines.maxLength) {\n this.lines.trimStart(1);\n this.ybase--;\n if (this.ydisp !== 0) {\n this.ydisp--;\n }\n }\n\n this.ybase++;\n\n // TODO: Why is this done twice?\n if (!this.userScrolling) {\n this.ydisp = this.ybase;\n }\n\n // last line\n row = this.ybase + this.rows - 1;\n\n // subtract the bottom scroll region\n row -= this.rows - 1 - this.scrollBottom;\n\n if (row === this.lines.length) {\n // Optimization: pushing is faster than splicing when they amount to the same behavior\n this.lines.push(this.blankLine(undefined, isWrapped));\n } else {\n // add our new line\n this.lines.splice(row, 0, this.blankLine(undefined, isWrapped));\n }\n\n if (this.scrollTop !== 0) {\n if (this.ybase !== 0) {\n this.ybase--;\n if (!this.userScrolling) {\n this.ydisp = this.ybase;\n }\n }\n this.lines.splice(this.ybase + this.scrollTop, 1);\n }\n\n // this.maxRange();\n this.updateRange(this.scrollTop);\n this.updateRange(this.scrollBottom);\n\n /**\n * This event is emitted whenever the terminal is scrolled.\n * The one parameter passed is the new y display position.\n *\n * @event scroll\n */\n this.emit('scroll', this.ydisp);\n};\n\n/**\n * Scroll the display of the terminal\n * @param {number} disp The number of lines to scroll down (negatives scroll up).\n * @param {boolean} suppressScrollEvent Don't emit the scroll event as scrollDisp. This is used\n * to avoid unwanted events being handled by the veiwport when the event was triggered from the\n * viewport originally.\n */\nTerminal.prototype.scrollDisp = function(disp, suppressScrollEvent) {\n if (disp < 0) {\n if (this.ydisp === 0) {\n return;\n }\n this.userScrolling = true;\n } else if (disp + this.ydisp >= this.ybase) {\n this.userScrolling = false;\n }\n\n this.ydisp += disp;\n\n if (this.ydisp > this.ybase) {\n this.ydisp = this.ybase;\n } else if (this.ydisp < 0) {\n this.ydisp = 0;\n }\n\n if (!suppressScrollEvent) {\n this.emit('scroll', this.ydisp);\n }\n\n this.refresh(0, this.rows - 1);\n};\n\n/**\n * Scroll the display of the terminal by a number of pages.\n * @param {number} pageCount The number of pages to scroll (negative scrolls up).\n */\nTerminal.prototype.scrollPages = function(pageCount) {\n this.scrollDisp(pageCount * (this.rows - 1));\n};\n\n/**\n * Scrolls the display of the terminal to the top.\n */\nTerminal.prototype.scrollToTop = function() {\n this.scrollDisp(-this.ydisp);\n};\n\n/**\n * Scrolls the display of the terminal to the bottom.\n */\nTerminal.prototype.scrollToBottom = function() {\n this.scrollDisp(this.ybase - this.ydisp);\n};\n\n/**\n * Writes text to the terminal.\n * @param {string} data The text to write to the terminal.\n */\nTerminal.prototype.write = function(data) {\n this.writeBuffer.push(data);\n\n // Send XOFF to pause the pty process if the write buffer becomes too large so\n // xterm.js can catch up before more data is sent. This is necessary in order\n // to keep signals such as ^C responsive.\n if (this.options.useFlowControl && !this.xoffSentToCatchUp && this.writeBuffer.length >= WRITE_BUFFER_PAUSE_THRESHOLD) {\n // XOFF - stop pty pipe\n // XON will be triggered by emulator before processing data chunk\n this.send(C0.DC3);\n this.xoffSentToCatchUp = true;\n }\n\n if (!this.writeInProgress && this.writeBuffer.length > 0) {\n // Kick off a write which will write all data in sequence recursively\n this.writeInProgress = true;\n // Kick off an async innerWrite so more writes can come in while processing data\n var self = this;\n setTimeout(function () {\n self.innerWrite();\n });\n }\n};\n\nTerminal.prototype.innerWrite = function() {\n var writeBatch = this.writeBuffer.splice(0, WRITE_BATCH_SIZE);\n while (writeBatch.length > 0) {\n var data = writeBatch.shift();\n var l = data.length, i = 0, j, cs, ch, code, low, ch_width, row;\n\n // If XOFF was sent in order to catch up with the pty process, resume it if\n // the writeBuffer is empty to allow more data to come in.\n if (this.xoffSentToCatchUp && writeBatch.length === 0 && this.writeBuffer.length === 0) {\n this.send(C0.DC1);\n this.xoffSentToCatchUp = false;\n }\n\n this.refreshStart = this.y;\n this.refreshEnd = this.y;\n\n // HACK: Set the parser state based on it's state at the time of return.\n // This works around the bug #662 which saw the parser state reset in the\n // middle of parsing escape sequence in two chunks. For some reason the\n // state of the parser resets to 0 after exiting parser.parse. This change\n // just sets the state back based on the correct return statement.\n var state = this.parser.parse(data);\n this.parser.setState(state);\n\n this.updateRange(this.y);\n this.refresh(this.refreshStart, this.refreshEnd);\n }\n if (this.writeBuffer.length > 0) {\n // Allow renderer to catch up before processing the next batch\n var self = this;\n setTimeout(function () {\n self.innerWrite();\n }, 0);\n } else {\n this.writeInProgress = false;\n }\n};\n\n/**\n * Writes text to the terminal, followed by a break line character (\\n).\n * @param {string} data The text to write to the terminal.\n */\nTerminal.prototype.writeln = function(data) {\n this.write(data + '\\r\\n');\n};\n\n/**\n * DEPRECATED: only for backward compatibility. Please use attachCustomKeyEventHandler() instead.\n * @param {function} customKeydownHandler The custom KeyboardEvent handler to attach. This is a\n * function that takes a KeyboardEvent, allowing consumers to stop propogation and/or prevent\n * the default action. The function returns whether the event should be processed by xterm.js.\n */\nTerminal.prototype.attachCustomKeydownHandler = function(customKeydownHandler) {\n let message = 'attachCustomKeydownHandler() is DEPRECATED and will be removed soon. Please use attachCustomKeyEventHandler() instead.';\n console.warn(message);\n this.attachCustomKeyEventHandler(customKeydownHandler);\n};\n\n/**\n * Attaches a custom key event handler which is run before keys are processed, giving consumers of\n * xterm.js ultimate control as to what keys should be processed by the terminal and what keys\n * should not.\n * @param {function} customKeyEventHandler The custom KeyboardEvent handler to attach. This is a\n * function that takes a KeyboardEvent, allowing consumers to stop propogation and/or prevent\n * the default action. The function returns whether the event should be processed by xterm.js.\n */\nTerminal.prototype.attachCustomKeyEventHandler = function(customKeyEventHandler) {\n this.customKeyEventHandler = customKeyEventHandler;\n};\n\n/**\n * Attaches a http(s) link handler, forcing web links to behave differently to\n * regular tags. This will trigger a refresh as links potentially need to be\n * reconstructed. Calling this with null will remove the handler.\n * @param {LinkMatcherHandler} handler The handler callback function.\n */\nTerminal.prototype.setHypertextLinkHandler = function(handler) {\n if (!this.linkifier) {\n throw new Error('Cannot attach a hypertext link handler before Terminal.open is called');\n }\n this.linkifier.setHypertextLinkHandler(handler);\n // Refresh to force links to refresh\n this.refresh(0, this.rows - 1);\n};\n\n/**\n * Attaches a validation callback for hypertext links. This is useful to use\n * validation logic or to do something with the link's element and url.\n * @param {LinkMatcherValidationCallback} callback The callback to use, this can\n * be cleared with null.\n */\nTerminal.prototype.setHypertextValidationCallback = function(callback) {\n if (!this.linkifier) {\n throw new Error('Cannot attach a hypertext validation callback before Terminal.open is called');\n }\n this.linkifier.setHypertextValidationCallback(callback);\n // Refresh to force links to refresh\n this.refresh(0, this.rows - 1);\n};\n\n/**\n * Registers a link matcher, allowing custom link patterns to be matched and\n * handled.\n * @param {RegExp} regex The regular expression to search for, specifically\n * this searches the textContent of the rows. You will want to use \\s to match\n * a space ' ' character for example.\n * @param {LinkMatcherHandler} handler The callback when the link is called.\n * @param {LinkMatcherOptions} [options] Options for the link matcher.\n * @return {number} The ID of the new matcher, this can be used to deregister.\n */\nTerminal.prototype.registerLinkMatcher = function(regex, handler, options) {\n if (this.linkifier) {\n var matcherId = this.linkifier.registerLinkMatcher(regex, handler, options);\n this.refresh(0, this.rows - 1);\n return matcherId;\n }\n};\n\n/**\n * Deregisters a link matcher if it has been registered.\n * @param {number} matcherId The link matcher's ID (returned after register)\n */\nTerminal.prototype.deregisterLinkMatcher = function(matcherId) {\n if (this.linkifier) {\n if (this.linkifier.deregisterLinkMatcher(matcherId)) {\n this.refresh(0, this.rows - 1);\n }\n }\n};\n\n/**\n * Gets whether the terminal has an active selection.\n */\nTerminal.prototype.hasSelection = function() {\n return this.selectionManager.hasSelection;\n};\n\n/**\n * Gets the terminal's current selection, this is useful for implementing copy\n * behavior outside of xterm.js.\n */\nTerminal.prototype.getSelection = function() {\n return this.selectionManager.selectionText;\n};\n\n/**\n * Clears the current terminal selection.\n */\nTerminal.prototype.clearSelection = function() {\n this.selectionManager.clearSelection();\n};\n\n/**\n * Selects all text within the terminal.\n */\nTerminal.prototype.selectAll = function() {\n this.selectionManager.selectAll();\n};\n\n/**\n * Handle a keydown event\n * Key Resources:\n * - https://developer.mozilla.org/en-US/docs/DOM/KeyboardEvent\n * @param {KeyboardEvent} ev The keydown event to be handled.\n */\nTerminal.prototype.keyDown = function(ev) {\n if (this.customKeyEventHandler && this.customKeyEventHandler(ev) === false) {\n return false;\n }\n\n this.restartCursorBlinking();\n\n if (!this.compositionHelper.keydown.bind(this.compositionHelper)(ev)) {\n if (this.ybase !== this.ydisp) {\n this.scrollToBottom();\n }\n return false;\n }\n\n var self = this;\n var result = this.evaluateKeyEscapeSequence(ev);\n\n if (result.key === C0.DC3) { // XOFF\n this.writeStopped = true;\n } else if (result.key === C0.DC1) { // XON\n this.writeStopped = false;\n }\n\n if (result.scrollDisp) {\n this.scrollDisp(result.scrollDisp);\n return this.cancel(ev, true);\n }\n\n if (isThirdLevelShift(this, ev)) {\n return true;\n }\n\n if (result.cancel) {\n // The event is canceled at the end already, is this necessary?\n this.cancel(ev, true);\n }\n\n if (!result.key) {\n return true;\n }\n\n this.emit('keydown', ev);\n this.emit('key', result.key, ev);\n this.showCursor();\n this.handler(result.key);\n\n return this.cancel(ev, true);\n};\n\n/**\n * Returns an object that determines how a KeyboardEvent should be handled. The key of the\n * returned value is the new key code to pass to the PTY.\n *\n * Reference: http://invisible-island.net/xterm/ctlseqs/ctlseqs.html\n * @param {KeyboardEvent} ev The keyboard event to be translated to key escape sequence.\n */\nTerminal.prototype.evaluateKeyEscapeSequence = function(ev) {\n var result = {\n // Whether to cancel event propogation (NOTE: this may not be needed since the event is\n // canceled at the end of keyDown\n cancel: false,\n // The new key even to emit\n key: undefined,\n // The number of characters to scroll, if this is defined it will cancel the event\n scrollDisp: undefined\n };\n var modifiers = ev.shiftKey << 0 | ev.altKey << 1 | ev.ctrlKey << 2 | ev.metaKey << 3;\n switch (ev.keyCode) {\n case 8:\n // backspace\n if (ev.shiftKey) {\n result.key = C0.BS; // ^H\n break;\n }\n result.key = C0.DEL; // ^?\n break;\n case 9:\n // tab\n if (ev.shiftKey) {\n result.key = C0.ESC + '[Z';\n break;\n }\n result.key = C0.HT;\n result.cancel = true;\n break;\n case 13:\n // return/enter\n result.key = C0.CR;\n result.cancel = true;\n break;\n case 27:\n // escape\n result.key = C0.ESC;\n result.cancel = true;\n break;\n case 37:\n // left-arrow\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'D';\n // HACK: Make Alt + left-arrow behave like Ctrl + left-arrow: move one word backwards\n // http://unix.stackexchange.com/a/108106\n // macOS uses different escape sequences than linux\n if (result.key == C0.ESC + '[1;3D') {\n result.key = (this.browser.isMac) ? C0.ESC + 'b' : C0.ESC + '[1;5D';\n }\n } else if (this.applicationCursor) {\n result.key = C0.ESC + 'OD';\n } else {\n result.key = C0.ESC + '[D';\n }\n break;\n case 39:\n // right-arrow\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'C';\n // HACK: Make Alt + right-arrow behave like Ctrl + right-arrow: move one word forward\n // http://unix.stackexchange.com/a/108106\n // macOS uses different escape sequences than linux\n if (result.key == C0.ESC + '[1;3C') {\n result.key = (this.browser.isMac) ? C0.ESC + 'f' : C0.ESC + '[1;5C';\n }\n } else if (this.applicationCursor) {\n result.key = C0.ESC + 'OC';\n } else {\n result.key = C0.ESC + '[C';\n }\n break;\n case 38:\n // up-arrow\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'A';\n // HACK: Make Alt + up-arrow behave like Ctrl + up-arrow\n // http://unix.stackexchange.com/a/108106\n if (result.key == C0.ESC + '[1;3A') {\n result.key = C0.ESC + '[1;5A';\n }\n } else if (this.applicationCursor) {\n result.key = C0.ESC + 'OA';\n } else {\n result.key = C0.ESC + '[A';\n }\n break;\n case 40:\n // down-arrow\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'B';\n // HACK: Make Alt + down-arrow behave like Ctrl + down-arrow\n // http://unix.stackexchange.com/a/108106\n if (result.key == C0.ESC + '[1;3B') {\n result.key = C0.ESC + '[1;5B';\n }\n } else if (this.applicationCursor) {\n result.key = C0.ESC + 'OB';\n } else {\n result.key = C0.ESC + '[B';\n }\n break;\n case 45:\n // insert\n if (!ev.shiftKey && !ev.ctrlKey) {\n // or + are used to\n // copy-paste on some systems.\n result.key = C0.ESC + '[2~';\n }\n break;\n case 46:\n // delete\n if (modifiers) {\n result.key = C0.ESC + '[3;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[3~';\n }\n break;\n case 36:\n // home\n if (modifiers)\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'H';\n else if (this.applicationCursor)\n result.key = C0.ESC + 'OH';\n else\n result.key = C0.ESC + '[H';\n break;\n case 35:\n // end\n if (modifiers)\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'F';\n else if (this.applicationCursor)\n result.key = C0.ESC + 'OF';\n else\n result.key = C0.ESC + '[F';\n break;\n case 33:\n // page up\n if (ev.shiftKey) {\n result.scrollDisp = -(this.rows - 1);\n } else {\n result.key = C0.ESC + '[5~';\n }\n break;\n case 34:\n // page down\n if (ev.shiftKey) {\n result.scrollDisp = this.rows - 1;\n } else {\n result.key = C0.ESC + '[6~';\n }\n break;\n case 112:\n // F1-F12\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'P';\n } else {\n result.key = C0.ESC + 'OP';\n }\n break;\n case 113:\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'Q';\n } else {\n result.key = C0.ESC + 'OQ';\n }\n break;\n case 114:\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'R';\n } else {\n result.key = C0.ESC + 'OR';\n }\n break;\n case 115:\n if (modifiers) {\n result.key = C0.ESC + '[1;' + (modifiers + 1) + 'S';\n } else {\n result.key = C0.ESC + 'OS';\n }\n break;\n case 116:\n if (modifiers) {\n result.key = C0.ESC + '[15;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[15~';\n }\n break;\n case 117:\n if (modifiers) {\n result.key = C0.ESC + '[17;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[17~';\n }\n break;\n case 118:\n if (modifiers) {\n result.key = C0.ESC + '[18;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[18~';\n }\n break;\n case 119:\n if (modifiers) {\n result.key = C0.ESC + '[19;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[19~';\n }\n break;\n case 120:\n if (modifiers) {\n result.key = C0.ESC + '[20;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[20~';\n }\n break;\n case 121:\n if (modifiers) {\n result.key = C0.ESC + '[21;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[21~';\n }\n break;\n case 122:\n if (modifiers) {\n result.key = C0.ESC + '[23;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[23~';\n }\n break;\n case 123:\n if (modifiers) {\n result.key = C0.ESC + '[24;' + (modifiers + 1) + '~';\n } else {\n result.key = C0.ESC + '[24~';\n }\n break;\n default:\n // a-z and space\n if (ev.ctrlKey && !ev.shiftKey && !ev.altKey && !ev.metaKey) {\n if (ev.keyCode >= 65 && ev.keyCode <= 90) {\n result.key = String.fromCharCode(ev.keyCode - 64);\n } else if (ev.keyCode === 32) {\n // NUL\n result.key = String.fromCharCode(0);\n } else if (ev.keyCode >= 51 && ev.keyCode <= 55) {\n // escape, file sep, group sep, record sep, unit sep\n result.key = String.fromCharCode(ev.keyCode - 51 + 27);\n } else if (ev.keyCode === 56) {\n // delete\n result.key = String.fromCharCode(127);\n } else if (ev.keyCode === 219) {\n // ^[ - Control Sequence Introducer (CSI)\n result.key = String.fromCharCode(27);\n } else if (ev.keyCode === 220) {\n // ^\\ - String Terminator (ST)\n result.key = String.fromCharCode(28);\n } else if (ev.keyCode === 221) {\n // ^] - Operating System Command (OSC)\n result.key = String.fromCharCode(29);\n }\n } else if (!this.browser.isMac && ev.altKey && !ev.ctrlKey && !ev.metaKey) {\n // On Mac this is a third level shift. Use instead.\n if (ev.keyCode >= 65 && ev.keyCode <= 90) {\n result.key = C0.ESC + String.fromCharCode(ev.keyCode + 32);\n } else if (ev.keyCode === 192) {\n result.key = C0.ESC + '`';\n } else if (ev.keyCode >= 48 && ev.keyCode <= 57) {\n result.key = C0.ESC + (ev.keyCode - 48);\n }\n } else if (this.browser.isMac && !ev.altKey && !ev.ctrlKey && ev.metaKey) {\n if (ev.keyCode === 65) { // cmd + a\n this.selectAll();\n }\n }\n break;\n }\n\n return result;\n};\n\n/**\n * Set the G level of the terminal\n * @param g\n */\nTerminal.prototype.setgLevel = function(g) {\n this.glevel = g;\n this.charset = this.charsets[g];\n};\n\n/**\n * Set the charset for the given G level of the terminal\n * @param g\n * @param charset\n */\nTerminal.prototype.setgCharset = function(g, charset) {\n this.charsets[g] = charset;\n if (this.glevel === g) {\n this.charset = charset;\n }\n};\n\n/**\n * Handle a keypress event.\n * Key Resources:\n * - https://developer.mozilla.org/en-US/docs/DOM/KeyboardEvent\n * @param {KeyboardEvent} ev The keypress event to be handled.\n */\nTerminal.prototype.keyPress = function(ev) {\n var key;\n\n if (this.customKeyEventHandler && this.customKeyEventHandler(ev) === false) {\n return false;\n }\n\n this.cancel(ev);\n\n if (ev.charCode) {\n key = ev.charCode;\n } else if (ev.which == null) {\n key = ev.keyCode;\n } else if (ev.which !== 0 && ev.charCode !== 0) {\n key = ev.which;\n } else {\n return false;\n }\n\n if (!key || (\n (ev.altKey || ev.ctrlKey || ev.metaKey) && !isThirdLevelShift(this, ev)\n )) {\n return false;\n }\n\n key = String.fromCharCode(key);\n\n this.emit('keypress', key, ev);\n this.emit('key', key, ev);\n this.showCursor();\n this.handler(key);\n\n return true;\n};\n\n/**\n * Send data for handling to the terminal\n * @param {string} data\n */\nTerminal.prototype.send = function(data) {\n var self = this;\n\n if (!this.queue) {\n setTimeout(function() {\n self.handler(self.queue);\n self.queue = '';\n }, 1);\n }\n\n this.queue += data;\n};\n\n/**\n * Ring the bell.\n * Note: We could do sweet things with webaudio here\n */\nTerminal.prototype.bell = function() {\n if (!this.visualBell) return;\n var self = this;\n this.element.style.borderColor = 'white';\n setTimeout(function() {\n self.element.style.borderColor = '';\n }, 10);\n if (this.popOnBell) this.focus();\n};\n\n/**\n * Log the current state to the console.\n */\nTerminal.prototype.log = function() {\n if (!this.debug) return;\n if (!this.context.console || !this.context.console.log) return;\n var args = Array.prototype.slice.call(arguments);\n this.context.console.log.apply(this.context.console, args);\n};\n\n/**\n * Log the current state as error to the console.\n */\nTerminal.prototype.error = function() {\n if (!this.debug) return;\n if (!this.context.console || !this.context.console.error) return;\n var args = Array.prototype.slice.call(arguments);\n this.context.console.error.apply(this.context.console, args);\n};\n\n/**\n * Resizes the terminal.\n *\n * @param {number} x The number of columns to resize to.\n * @param {number} y The number of rows to resize to.\n */\nTerminal.prototype.resize = function(x, y) {\n if (isNaN(x) || isNaN(y)) {\n return;\n }\n\n if (y > this.getOption('scrollback')) {\n this.setOption('scrollback', y)\n }\n\n var line\n , el\n , i\n , j\n , ch\n , addToY;\n\n if (x === this.cols && y === this.rows) {\n return;\n }\n\n if (x < 1) x = 1;\n if (y < 1) y = 1;\n\n // resize cols\n j = this.cols;\n if (j < x) {\n ch = [this.defAttr, ' ', 1]; // does xterm use the default attr?\n i = this.lines.length;\n while (i--) {\n while (this.lines.get(i).length < x) {\n this.lines.get(i).push(ch);\n }\n }\n }\n\n this.cols = x;\n this.setupStops(this.cols);\n\n // resize rows\n j = this.rows;\n addToY = 0;\n if (j < y) {\n el = this.element;\n while (j++ < y) {\n // y is rows, not this.y\n if (this.lines.length < y + this.ybase) {\n if (this.ybase > 0 && this.lines.length <= this.ybase + this.y + addToY + 1) {\n // There is room above the buffer and there are no empty elements below the line,\n // scroll up\n this.ybase--;\n addToY++;\n if (this.ydisp > 0) {\n // Viewport is at the top of the buffer, must increase downwards\n this.ydisp--;\n }\n } else {\n // Add a blank line if there is no buffer left at the top to scroll to, or if there\n // are blank lines after the cursor\n this.lines.push(this.blankLine());\n }\n }\n if (this.children.length < y) {\n this.insertRow();\n }\n }\n } else { // (j > y)\n while (j-- > y) {\n if (this.lines.length > y + this.ybase) {\n if (this.lines.length > this.ybase + this.y + 1) {\n // The line is a blank line below the cursor, remove it\n this.lines.pop();\n } else {\n // The line is the cursor, scroll down\n this.ybase++;\n this.ydisp++;\n }\n }\n if (this.children.length > y) {\n el = this.children.shift();\n if (!el) continue;\n el.parentNode.removeChild(el);\n }\n }\n }\n this.rows = y;\n\n // Make sure that the cursor stays on screen\n if (this.y >= y) {\n this.y = y - 1;\n }\n if (addToY) {\n this.y += addToY;\n }\n\n if (this.x >= x) {\n this.x = x - 1;\n }\n\n this.scrollTop = 0;\n this.scrollBottom = y - 1;\n\n this.charMeasure.measure();\n\n this.refresh(0, this.rows - 1);\n\n this.normal = null;\n\n this.geometry = [this.cols, this.rows];\n this.emit('resize', {terminal: this, cols: x, rows: y});\n};\n\n/**\n * Updates the range of rows to refresh\n * @param {number} y The number of rows to refresh next.\n */\nTerminal.prototype.updateRange = function(y) {\n if (y < this.refreshStart) this.refreshStart = y;\n if (y > this.refreshEnd) this.refreshEnd = y;\n // if (y > this.refreshEnd) {\n // this.refreshEnd = y;\n // if (y > this.rows - 1) {\n // this.refreshEnd = this.rows - 1;\n // }\n // }\n};\n\n/**\n * Set the range of refreshing to the maximum value\n */\nTerminal.prototype.maxRange = function() {\n this.refreshStart = 0;\n this.refreshEnd = this.rows - 1;\n};\n\n\n\n/**\n * Setup the tab stops.\n * @param {number} i\n */\nTerminal.prototype.setupStops = function(i) {\n if (i != null) {\n if (!this.tabs[i]) {\n i = this.prevStop(i);\n }\n } else {\n this.tabs = {};\n i = 0;\n }\n\n for (; i < this.cols; i += this.getOption('tabStopWidth')) {\n this.tabs[i] = true;\n }\n};\n\n\n/**\n * Move the cursor to the previous tab stop from the given position (default is current).\n * @param {number} x The position to move the cursor to the previous tab stop.\n */\nTerminal.prototype.prevStop = function(x) {\n if (x == null) x = this.x;\n while (!this.tabs[--x] && x > 0);\n return x >= this.cols\n ? this.cols - 1\n : x < 0 ? 0 : x;\n};\n\n\n/**\n * Move the cursor one tab stop forward from the given position (default is current).\n * @param {number} x The position to move the cursor one tab stop forward.\n */\nTerminal.prototype.nextStop = function(x) {\n if (x == null) x = this.x;\n while (!this.tabs[++x] && x < this.cols);\n return x >= this.cols\n ? this.cols - 1\n : x < 0 ? 0 : x;\n};\n\n\n/**\n * Erase in the identified line everything from \"x\" to the end of the line (right).\n * @param {number} x The column from which to start erasing to the end of the line.\n * @param {number} y The line in which to operate.\n */\nTerminal.prototype.eraseRight = function(x, y) {\n var line = this.lines.get(this.ybase + y);\n if (!line) {\n return;\n }\n var ch = [this.eraseAttr(), ' ', 1]; // xterm\n for (; x < this.cols; x++) {\n line[x] = ch;\n }\n this.updateRange(y);\n};\n\n\n\n/**\n * Erase in the identified line everything from \"x\" to the start of the line (left).\n * @param {number} x The column from which to start erasing to the start of the line.\n * @param {number} y The line in which to operate.\n */\nTerminal.prototype.eraseLeft = function(x, y) {\n var line = this.lines.get(this.ybase + y);\n if (!line) {\n return;\n }\n var ch = [this.eraseAttr(), ' ', 1]; // xterm\n x++;\n while (x--) {\n line[x] = ch;\n }\n this.updateRange(y);\n};\n\n/**\n * Clears the entire buffer, making the prompt line the new first line.\n */\nTerminal.prototype.clear = function() {\n if (this.ybase === 0 && this.y === 0) {\n // Don't clear if it's already clear\n return;\n }\n this.lines.set(0, this.lines.get(this.ybase + this.y));\n this.lines.length = 1;\n this.ydisp = 0;\n this.ybase = 0;\n this.y = 0;\n for (var i = 1; i < this.rows; i++) {\n this.lines.push(this.blankLine());\n }\n this.refresh(0, this.rows - 1);\n this.emit('scroll', this.ydisp);\n};\n\n/**\n * Erase all content in the given line\n * @param {number} y The line to erase all of its contents.\n */\nTerminal.prototype.eraseLine = function(y) {\n this.eraseRight(0, y);\n};\n\n\n/**\n * Return the data array of a blank line\n * @param {number} cur First bunch of data for each \"blank\" character.\n * @param {boolean} isWrapped Whether the new line is wrapped from the previous line.\n */\nTerminal.prototype.blankLine = function(cur, isWrapped) {\n var attr = cur\n ? this.eraseAttr()\n : this.defAttr;\n\n var ch = [attr, ' ', 1] // width defaults to 1 halfwidth character\n , line = []\n , i = 0;\n\n // TODO: It is not ideal that this is a property on an array, a buffer line\n // class should be added that will hold this data and other useful functions.\n if (isWrapped) {\n line.isWrapped = isWrapped;\n }\n\n for (; i < this.cols; i++) {\n line[i] = ch;\n }\n\n return line;\n};\n\n\n/**\n * If cur return the back color xterm feature attribute. Else return defAttr.\n * @param {object} cur\n */\nTerminal.prototype.ch = function(cur) {\n return cur\n ? [this.eraseAttr(), ' ', 1]\n : [this.defAttr, ' ', 1];\n};\n\n\n/**\n * Evaluate if the current erminal is the given argument.\n * @param {object} term The terminal to evaluate\n */\nTerminal.prototype.is = function(term) {\n var name = this.termName;\n return (name + '').indexOf(term) === 0;\n};\n\n\n/**\n * Emit the 'data' event and populate the given data.\n * @param {string} data The data to populate in the event.\n */\nTerminal.prototype.handler = function(data) {\n // Prevents all events to pty process if stdin is disabled\n if (this.options.disableStdin) {\n return;\n }\n\n // Input is being sent to the terminal, the terminal should focus the prompt.\n if (this.ybase !== this.ydisp) {\n this.scrollToBottom();\n }\n this.emit('data', data);\n};\n\n\n/**\n * Emit the 'title' event and populate the given title.\n * @param {string} title The title to populate in the event.\n */\nTerminal.prototype.handleTitle = function(title) {\n /**\n * This event is emitted when the title of the terminal is changed\n * from inside the terminal. The parameter is the new title.\n *\n * @event title\n */\n this.emit('title', title);\n};\n\n\n/**\n * ESC\n */\n\n/**\n * ESC D Index (IND is 0x84).\n */\nTerminal.prototype.index = function() {\n this.y++;\n if (this.y > this.scrollBottom) {\n this.y--;\n this.scroll();\n }\n // If the end of the line is hit, prevent this action from wrapping around to the next line.\n if (this.x >= this.cols) {\n this.x--;\n }\n};\n\n\n/**\n * ESC M Reverse Index (RI is 0x8d).\n *\n * Move the cursor up one row, inserting a new blank line if necessary.\n */\nTerminal.prototype.reverseIndex = function() {\n var j;\n if (this.y === this.scrollTop) {\n // possibly move the code below to term.reverseScroll();\n // test: echo -ne '\\e[1;1H\\e[44m\\eM\\e[0m'\n // blankLine(true) is xterm/linux behavior\n this.lines.shiftElements(this.y + this.ybase, this.rows - 1, 1);\n this.lines.set(this.y + this.ybase, this.blankLine(true));\n this.updateRange(this.scrollTop);\n this.updateRange(this.scrollBottom);\n } else {\n this.y--;\n }\n};\n\n\n/**\n * ESC c Full Reset (RIS).\n */\nTerminal.prototype.reset = function() {\n this.options.rows = this.rows;\n this.options.cols = this.cols;\n var customKeyEventHandler = this.customKeyEventHandler;\n var cursorBlinkInterval = this.cursorBlinkInterval;\n Terminal.call(this, this.options);\n this.customKeyEventHandler = customKeyEventHandler;\n this.cursorBlinkInterval = cursorBlinkInterval;\n this.refresh(0, this.rows - 1);\n this.viewport.syncScrollArea();\n};\n\n\n/**\n * ESC H Tab Set (HTS is 0x88).\n */\nTerminal.prototype.tabSet = function() {\n this.tabs[this.x] = true;\n};\n\n/**\n * Helpers\n */\n\nfunction on(el, type, handler, capture) {\n if (!Array.isArray(el)) {\n el = [el];\n }\n el.forEach(function (element) {\n element.addEventListener(type, handler, capture || false);\n });\n}\n\nfunction off(el, type, handler, capture) {\n el.removeEventListener(type, handler, capture || false);\n}\n\nfunction cancel(ev, force) {\n if (!this.cancelEvents && !force) {\n return;\n }\n ev.preventDefault();\n ev.stopPropagation();\n return false;\n}\n\nfunction inherits(child, parent) {\n function f() {\n this.constructor = child;\n }\n f.prototype = parent.prototype;\n child.prototype = new f;\n}\n\nfunction indexOf(obj, el) {\n var i = obj.length;\n while (i--) {\n if (obj[i] === el) return i;\n }\n return -1;\n}\n\nfunction isThirdLevelShift(term, ev) {\n var thirdLevelKey =\n (term.browser.isMac && ev.altKey && !ev.ctrlKey && !ev.metaKey) ||\n (term.browser.isMSWindows && ev.altKey && ev.ctrlKey && !ev.metaKey);\n\n if (ev.type == 'keypress') {\n return thirdLevelKey;\n }\n\n // Don't invoke for arrows, pageDown, home, backspace, etc. (on non-keypress events)\n return thirdLevelKey && (!ev.keyCode || ev.keyCode > 47);\n}\n\n// Expose to InputHandler (temporary)\nTerminal.prototype.matchColor = matchColor;\n\nfunction matchColor(r1, g1, b1) {\n var hash = (r1 << 16) | (g1 << 8) | b1;\n\n if (matchColor._cache[hash] != null) {\n return matchColor._cache[hash];\n }\n\n var ldiff = Infinity\n , li = -1\n , i = 0\n , c\n , r2\n , g2\n , b2\n , diff;\n\n for (; i < Terminal.vcolors.length; i++) {\n c = Terminal.vcolors[i];\n r2 = c[0];\n g2 = c[1];\n b2 = c[2];\n\n diff = matchColor.distance(r1, g1, b1, r2, g2, b2);\n\n if (diff === 0) {\n li = i;\n break;\n }\n\n if (diff < ldiff) {\n ldiff = diff;\n li = i;\n }\n }\n\n return matchColor._cache[hash] = li;\n}\n\nmatchColor._cache = {};\n\n// http://stackoverflow.com/questions/1633828\nmatchColor.distance = function(r1, g1, b1, r2, g2, b2) {\n return Math.pow(30 * (r1 - r2), 2)\n + Math.pow(59 * (g1 - g2), 2)\n + Math.pow(11 * (b1 - b2), 2);\n};\n\nfunction each(obj, iter, con) {\n if (obj.forEach) return obj.forEach(iter, con);\n for (var i = 0; i < obj.length; i++) {\n iter.call(con, obj[i], i, obj);\n }\n}\n\nfunction wasMondifierKeyOnlyEvent(ev) {\n return ev.keyCode === 16 || // Shift\n ev.keyCode === 17 || // Ctrl\n ev.keyCode === 18; // Alt\n}\n\nfunction keys(obj) {\n if (Object.keys) return Object.keys(obj);\n var key, keys = [];\n for (key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n keys.push(key);\n }\n }\n return keys;\n}\n\n/**\n * Expose\n */\n\nTerminal.EventEmitter = EventEmitter;\nTerminal.inherits = inherits;\n\n/**\n * Adds an event listener to the terminal.\n *\n * @param {string} event The name of the event. TODO: Document all event types\n * @param {function} callback The function to call when the event is triggered.\n */\nTerminal.on = on;\nTerminal.off = off;\nTerminal.cancel = cancel;\n\nmodule.exports = Terminal;\n","/**\n * @license MIT\n */\n\nimport { CharMeasure } from './CharMeasure';\n\nexport function getCoordsRelativeToElement(event: MouseEvent, element: HTMLElement): [number, number] {\n // Ignore browsers that don't support MouseEvent.pageX\n if (event.pageX == null) {\n return null;\n }\n\n let x = event.pageX;\n let y = event.pageY;\n\n // Converts the coordinates from being relative to the document to being\n // relative to the terminal.\n while (element && element !== self.document.documentElement) {\n x -= element.offsetLeft;\n y -= element.offsetTop;\n element = 'offsetParent' in element ? element.offsetParent : element.parentElement;\n }\n return [x, y];\n}\n\n/**\n * Gets coordinates within the terminal for a particular mouse event. The result\n * is returned as an array in the form [x, y] instead of an object as it's a\n * little faster and this function is used in some low level code.\n * @param event The mouse event.\n * @param rowContainer The terminal's row container.\n * @param charMeasure The char measure object used to determine character sizes.\n * @param colCount The number of columns in the terminal.\n * @param rowCount The number of rows n the terminal.\n * @param isSelection Whether the request is for the selection or not. This will\n * apply an offset to the x value such that the left half of the cell will\n * select that cell and the right half will select the next cell.\n */\nexport function getCoords(event: MouseEvent, rowContainer: HTMLElement, charMeasure: CharMeasure, colCount: number, rowCount: number, isSelection?: boolean): [number, number] {\n const coords = getCoordsRelativeToElement(event, rowContainer);\n\n // Convert to cols/rows.\n coords[0] = Math.ceil((coords[0] + (isSelection ? charMeasure.width / 2 : 0)) / charMeasure.width);\n coords[1] = Math.ceil(coords[1] / charMeasure.height);\n\n // Ensure coordinates are within the terminal viewport.\n coords[0] = Math.min(Math.max(coords[0], 1), colCount + 1);\n coords[1] = Math.min(Math.max(coords[1], 1), rowCount + 1);\n\n return coords;\n}\n\n/**\n * Gets coordinates within the terminal for a particular mouse event, wrapping\n * them to the bounds of the terminal and adding 32 to both the x and y values\n * as expected by xterm.\n * @param event The mouse event.\n * @param rowContainer The terminal's row container.\n * @param charMeasure The char measure object used to determine character sizes.\n * @param colCount The number of columns in the terminal.\n * @param rowCount The number of rows in the terminal.\n */\nexport function getRawByteCoords(event: MouseEvent, rowContainer: HTMLElement, charMeasure: CharMeasure, colCount: number, rowCount: number): { x: number, y: number } {\n const coords = getCoords(event, rowContainer, charMeasure, colCount, rowCount);\n let x = coords[0];\n let y = coords[1];\n\n // xterm sends raw bytes and starts at 32 (SP) for each.\n x += 32;\n y += 32;\n\n return { x, y };\n}\n","/**\n * Generic utilities module with methods that can be helpful at different parts of the code base.\n * @module xterm/utils/Generic\n * @license MIT\n */\n\n/**\n * Return if the given array contains the given element\n * @param {Array} array The array to search for the given element.\n * @param {Object} el The element to look for into the array\n */\nexport function contains(arr: any[], el: any) {\n return arr.indexOf(el) >= 0;\n};\n","/**\n * @module xterm/utils/DomElementObjectPool\n * @license MIT\n */\n\n/**\n * An object pool that manages acquisition and releasing of DOM elements for\n * when reuse is desirable.\n */\nexport class DomElementObjectPool {\n private static readonly OBJECT_ID_ATTRIBUTE = 'data-obj-id';\n\n private static _objectCount = 0;\n\n private _type: string;\n private _pool: HTMLElement[];\n private _inUse: {[key: string]: HTMLElement};\n\n /**\n * @param type The DOM element type (div, span, etc.).\n */\n constructor(private type: string) {\n this._type = type;\n this._pool = [];\n this._inUse = {};\n }\n\n /**\n * Acquire an element from the pool, creating it if the pool is empty.\n */\n public acquire(): HTMLElement {\n let element: HTMLElement;\n if (this._pool.length === 0) {\n element = this._createNew();\n } else {\n element = this._pool.pop();\n }\n this._inUse[element.getAttribute(DomElementObjectPool.OBJECT_ID_ATTRIBUTE)] = element;\n return element;\n }\n\n /**\n * Release an element back into the pool. It's up to the caller of this\n * function to ensure that all external references to the element have been\n * removed.\n * @param element The element being released.\n */\n public release(element: HTMLElement): void {\n if (!this._inUse[element.getAttribute(DomElementObjectPool.OBJECT_ID_ATTRIBUTE)]) {\n throw new Error('Could not release an element not yet acquired');\n }\n delete this._inUse[element.getAttribute(DomElementObjectPool.OBJECT_ID_ATTRIBUTE)];\n this._cleanElement(element);\n this._pool.push(element);\n }\n\n /**\n * Creates a new element for the pool.\n */\n private _createNew(): HTMLElement {\n const element = document.createElement(this._type);\n const id = DomElementObjectPool._objectCount++;\n element.setAttribute(DomElementObjectPool.OBJECT_ID_ATTRIBUTE, id.toString(10));\n return element;\n }\n\n /**\n * Resets an element back to a \"clean state\".\n * @param element The element to be cleaned.\n */\n private _cleanElement(element: HTMLElement): void {\n element.className = '';\n element.innerHTML = '';\n }\n}\n","/**\n * Represents a circular list; a list with a maximum size that wraps around when push is called,\n * overriding values at the start of the list.\n * @module xterm/utils/CircularList\n * @license MIT\n */\nimport { EventEmitter } from '../EventEmitter';\n\nexport class CircularList extends EventEmitter {\n private _array: T[];\n private _startIndex: number;\n private _length: number;\n\n constructor(maxLength: number) {\n super();\n this._array = new Array(maxLength);\n this._startIndex = 0;\n this._length = 0;\n }\n\n public get maxLength(): number {\n return this._array.length;\n }\n\n public set maxLength(newMaxLength: number) {\n // Reconstruct array, starting at index 0. Only transfer values from the\n // indexes 0 to length.\n let newArray = new Array(newMaxLength);\n for (let i = 0; i < Math.min(newMaxLength, this.length); i++) {\n newArray[i] = this._array[this._getCyclicIndex(i)];\n }\n this._array = newArray;\n this._startIndex = 0;\n }\n\n public get length(): number {\n return this._length;\n }\n\n public set length(newLength: number) {\n if (newLength > this._length) {\n for (let i = this._length; i < newLength; i++) {\n this._array[i] = undefined;\n }\n }\n this._length = newLength;\n }\n\n public get forEach(): (callbackfn: (value: T, index: number) => void) => void {\n return (callbackfn: (value: T, index: number) => void) => {\n let i = 0;\n let length = this.length;\n for (let i = 0; i < length; i++) {\n callbackfn(this.get(i), i);\n }\n };\n }\n\n /**\n * Gets the value at an index.\n *\n * Note that for performance reasons there is no bounds checking here, the index reference is\n * circular so this should always return a value and never throw.\n * @param index The index of the value to get.\n * @return The value corresponding to the index.\n */\n public get(index: number): T {\n return this._array[this._getCyclicIndex(index)];\n }\n\n /**\n * Sets the value at an index.\n *\n * Note that for performance reasons there is no bounds checking here, the index reference is\n * circular so this should always return a value and never throw.\n * @param index The index to set.\n * @param value The value to set.\n */\n public set(index: number, value: T): void {\n this._array[this._getCyclicIndex(index)] = value;\n }\n\n /**\n * Pushes a new value onto the list, wrapping around to the start of the array, overriding index 0\n * if the maximum length is reached.\n * @param value The value to push onto the list.\n */\n public push(value: T): void {\n this._array[this._getCyclicIndex(this._length)] = value;\n if (this._length === this.maxLength) {\n this._startIndex++;\n if (this._startIndex === this.maxLength) {\n this._startIndex = 0;\n }\n this.emit('trim', 1);\n } else {\n this._length++;\n }\n }\n\n /**\n * Removes and returns the last value on the list.\n * @return The popped value.\n */\n public pop(): T {\n return this._array[this._getCyclicIndex(this._length-- - 1)];\n }\n\n /**\n * Deletes and/or inserts items at a particular index (in that order). Unlike\n * Array.prototype.splice, this operation does not return the deleted items as a new array in\n * order to save creating a new array. Note that this operation may shift all values in the list\n * in the worst case.\n * @param start The index to delete and/or insert.\n * @param deleteCount The number of elements to delete.\n * @param items The items to insert.\n */\n public splice(start: number, deleteCount: number, ...items: T[]): void {\n // Delete items\n if (deleteCount) {\n for (let i = start; i < this._length - deleteCount; i++) {\n this._array[this._getCyclicIndex(i)] = this._array[this._getCyclicIndex(i + deleteCount)];\n }\n this._length -= deleteCount;\n }\n\n if (items && items.length) {\n // Add items\n for (let i = this._length - 1; i >= start; i--) {\n this._array[this._getCyclicIndex(i + items.length)] = this._array[this._getCyclicIndex(i)];\n }\n for (let i = 0; i < items.length; i++) {\n this._array[this._getCyclicIndex(start + i)] = items[i];\n }\n\n // Adjust length as needed\n if (this._length + items.length > this.maxLength) {\n const countToTrim = (this._length + items.length) - this.maxLength;\n this._startIndex += countToTrim;\n this._length = this.maxLength;\n this.emit('trim', countToTrim);\n } else {\n this._length += items.length;\n }\n }\n }\n\n /**\n * Trims a number of items from the start of the list.\n * @param count The number of items to remove.\n */\n public trimStart(count: number): void {\n if (count > this._length) {\n count = this._length;\n }\n this._startIndex += count;\n this._length -= count;\n this.emit('trim', count);\n }\n\n public shiftElements(start: number, count: number, offset: number): void {\n if (count <= 0) {\n return;\n }\n if (start < 0 || start >= this._length) {\n throw new Error('start argument out of range');\n }\n if (start + offset < 0) {\n throw new Error('Cannot shift elements in list beyond index 0');\n }\n\n if (offset > 0) {\n for (let i = count - 1; i >= 0; i--) {\n this.set(start + i + offset, this.get(start + i));\n }\n const expandListBy = (start + count + offset) - this._length;\n if (expandListBy > 0) {\n this._length += expandListBy;\n while (this._length > this.maxLength) {\n this._length--;\n this._startIndex++;\n this.emit('trim', 1);\n }\n }\n } else {\n for (let i = 0; i < count; i++) {\n this.set(start + i + offset, this.get(start + i));\n }\n }\n }\n\n /**\n * Gets the cyclic index for the specified regular index. The cyclic index can then be used on the\n * backing array to get the element associated with the regular index.\n * @param index The regular index.\n * @returns The cyclic index.\n */\n private _getCyclicIndex(index: number): number {\n return (this._startIndex + index) % this.maxLength;\n }\n}\n","/**\n * @module xterm/utils/CharMeasure\n * @license MIT\n */\n\nimport { EventEmitter } from '../EventEmitter.js';\n\n/**\n * Utility class that measures the size of a character.\n */\nexport class CharMeasure extends EventEmitter {\n private _document: Document;\n private _parentElement: HTMLElement;\n private _measureElement: HTMLElement;\n private _width: number;\n private _height: number;\n\n constructor(document: Document, parentElement: HTMLElement) {\n super();\n this._document = document;\n this._parentElement = parentElement;\n }\n\n public get width(): number {\n return this._width;\n }\n\n public get height(): number {\n return this._height;\n }\n\n public measure(): void {\n if (!this._measureElement) {\n this._measureElement = this._document.createElement('span');\n this._measureElement.style.position = 'absolute';\n this._measureElement.style.top = '0';\n this._measureElement.style.left = '-9999em';\n this._measureElement.textContent = 'W';\n this._measureElement.setAttribute('aria-hidden', 'true');\n this._parentElement.appendChild(this._measureElement);\n // Perform _doMeasure async if the element was just attached as sometimes\n // getBoundingClientRect does not return accurate values without this.\n setTimeout(() => this._doMeasure(), 0);\n } else {\n this._doMeasure();\n }\n }\n\n private _doMeasure(): void {\n const geometry = this._measureElement.getBoundingClientRect();\n // The element is likely currently display:none, we should retain the\n // previous value.\n if (geometry.width === 0 || geometry.height === 0) {\n return;\n }\n if (this._width !== geometry.width || this._height !== geometry.height) {\n this._width = geometry.width;\n this._height = geometry.height;\n this.emit('charsizechanged');\n }\n }\n}\n","/**\n * Attributes and methods to help with identifying the current browser and platform.\n * @module xterm/utils/Browser\n * @license MIT\n */\n\nimport { contains } from './Generic';\n\nconst isNode = (typeof navigator === 'undefined') ? true : false;\nconst userAgent = (isNode) ? 'node' : navigator.userAgent;\nconst platform = (isNode) ? 'node' : navigator.platform;\n\nexport const isFirefox = !!~userAgent.indexOf('Firefox');\nexport const isMSIE = !!~userAgent.indexOf('MSIE') || !!~userAgent.indexOf('Trident');\n\n// Find the users platform. We use this to interpret the meta key\n// and ISO third level shifts.\n// http://stackoverflow.com/q/19877924/577598\nexport const isMac = contains(['Macintosh', 'MacIntel', 'MacPPC', 'Mac68K'], platform);\nexport const isIpad = platform === 'iPad';\nexport const isIphone = platform === 'iPhone';\nexport const isMSWindows = contains(['Windows', 'Win16', 'Win32', 'WinCE'], platform);\nexport const isLinux = platform.indexOf('Linux') >= 0;\n","/**\n * Clipboard handler module: exports methods for handling all clipboard-related events in the\n * terminal.\n * @module xterm/handlers/Clipboard\n * @license MIT\n */\n\nimport { ITerminal, ISelectionManager } from '../Interfaces';\n\ninterface IWindow extends Window {\n clipboardData?: {\n getData(format: string): string;\n setData(format: string, data: string);\n };\n}\n\ndeclare var window: IWindow;\n\n/**\n * Prepares text to be pasted into the terminal by normalizing the line endings\n * @param text The pasted text that needs processing before inserting into the terminal\n */\nexport function prepareTextForTerminal(text: string, isMSWindows: boolean): string {\n if (isMSWindows) {\n return text.replace(/\\r?\\n/g, '\\r');\n }\n return text;\n}\n\n/**\n * Binds copy functionality to the given terminal.\n * @param {ClipboardEvent} ev The original copy event to be handled\n */\nexport function copyHandler(ev: ClipboardEvent, term: ITerminal, selectionManager: ISelectionManager) {\n if (term.browser.isMSIE) {\n window.clipboardData.setData('Text', selectionManager.selectionText);\n } else {\n ev.clipboardData.setData('text/plain', selectionManager.selectionText);\n }\n\n // Prevent or the original text will be copied.\n ev.preventDefault();\n}\n\n/**\n * Redirect the clipboard's data to the terminal's input handler.\n * @param {ClipboardEvent} ev The original paste event to be handled\n * @param {Terminal} term The terminal on which to apply the handled paste event\n */\nexport function pasteHandler(ev: ClipboardEvent, term: ITerminal) {\n ev.stopPropagation();\n\n let text: string;\n\n let dispatchPaste = function(text) {\n text = prepareTextForTerminal(text, term.browser.isMSWindows);\n term.handler(text);\n term.textarea.value = '';\n term.emit('paste', text);\n\n return term.cancel(ev);\n };\n\n if (term.browser.isMSIE) {\n if (window.clipboardData) {\n text = window.clipboardData.getData('Text');\n dispatchPaste(text);\n }\n } else {\n if (ev.clipboardData) {\n text = ev.clipboardData.getData('text/plain');\n dispatchPaste(text);\n }\n }\n}\n\n/**\n * Moves the textarea under the mouse cursor and focuses it.\n * @param ev The original right click event to be handled.\n * @param textarea The terminal's textarea.\n */\nexport function moveTextAreaUnderMouseCursor(ev: MouseEvent, textarea: HTMLTextAreaElement) {\n // Bring textarea at the cursor position\n textarea.style.position = 'fixed';\n textarea.style.width = '20px';\n textarea.style.height = '20px';\n textarea.style.left = (ev.clientX - 10) + 'px';\n textarea.style.top = (ev.clientY - 10) + 'px';\n textarea.style.zIndex = '1000';\n\n textarea.focus();\n\n // Reset the terminal textarea's styling\n setTimeout(function () {\n textarea.style.position = null;\n textarea.style.width = null;\n textarea.style.height = null;\n textarea.style.left = null;\n textarea.style.top = null;\n textarea.style.zIndex = null;\n }, 4);\n}\n\n/**\n * Bind to right-click event and allow right-click copy and paste.\n * @param ev The original right click event to be handled.\n * @param textarea The terminal's textarea.\n * @param selectionManager The terminal's selection manager.\n */\nexport function rightClickHandler(ev: MouseEvent, textarea: HTMLTextAreaElement, selectionManager: ISelectionManager) {\n moveTextAreaUnderMouseCursor(ev, textarea);\n\n // Get textarea ready to copy from the context menu\n textarea.value = selectionManager.selectionText;\n textarea.select();\n}\n","/**\n * @license MIT\n */\n\nimport { ITerminal } from './Interfaces';\nimport { CharMeasure } from './utils/CharMeasure';\n\n/**\n * Represents the viewport of a terminal, the visible area within the larger buffer of output.\n * Logic for the virtual scroll bar is included in this object.\n */\nexport class Viewport {\n private currentRowHeight: number;\n private lastRecordedBufferLength: number;\n private lastRecordedViewportHeight: number;\n private lastTouchY: number;\n\n /**\n * Creates a new Viewport.\n * @param terminal The terminal this viewport belongs to.\n * @param viewportElement The DOM element acting as the viewport.\n * @param scrollArea The DOM element acting as the scroll area.\n * @param charMeasureElement A DOM element used to measure the character size of. the terminal.\n */\n constructor(\n private terminal: ITerminal,\n private viewportElement: HTMLElement,\n private scrollArea: HTMLElement,\n private charMeasure: CharMeasure\n ) {\n this.currentRowHeight = 0;\n this.lastRecordedBufferLength = 0;\n this.lastRecordedViewportHeight = 0;\n\n this.terminal.on('scroll', this.syncScrollArea.bind(this));\n this.terminal.on('resize', this.syncScrollArea.bind(this));\n this.viewportElement.addEventListener('scroll', this.onScroll.bind(this));\n\n // Perform this async to ensure the CharMeasure is ready.\n setTimeout(() => this.syncScrollArea(), 0);\n }\n\n /**\n * Refreshes row height, setting line-height, viewport height and scroll area height if\n * necessary.\n * @param charSize A character size measurement bounding rect object, if it doesn't exist it will\n * be created.\n */\n private refresh(): void {\n if (this.charMeasure.height > 0) {\n const rowHeightChanged = this.charMeasure.height !== this.currentRowHeight;\n if (rowHeightChanged) {\n this.currentRowHeight = this.charMeasure.height;\n this.viewportElement.style.lineHeight = this.charMeasure.height + 'px';\n this.terminal.rowContainer.style.lineHeight = this.charMeasure.height + 'px';\n }\n const viewportHeightChanged = this.lastRecordedViewportHeight !== this.terminal.rows;\n if (rowHeightChanged || viewportHeightChanged) {\n this.lastRecordedViewportHeight = this.terminal.rows;\n this.viewportElement.style.height = this.charMeasure.height * this.terminal.rows + 'px';\n this.terminal.selectionContainer.style.height = this.viewportElement.style.height;\n }\n this.scrollArea.style.height = (this.charMeasure.height * this.lastRecordedBufferLength) + 'px';\n }\n }\n\n /**\n * Updates dimensions and synchronizes the scroll area if necessary.\n */\n public syncScrollArea(): void {\n if (this.lastRecordedBufferLength !== this.terminal.lines.length) {\n // If buffer height changed\n this.lastRecordedBufferLength = this.terminal.lines.length;\n this.refresh();\n } else if (this.lastRecordedViewportHeight !== this.terminal.rows) {\n // If viewport height changed\n this.refresh();\n } else {\n // If size has changed, refresh viewport\n if (this.charMeasure.height !== this.currentRowHeight) {\n this.refresh();\n }\n }\n\n // Sync scrollTop\n const scrollTop = this.terminal.ydisp * this.currentRowHeight;\n if (this.viewportElement.scrollTop !== scrollTop) {\n this.viewportElement.scrollTop = scrollTop;\n }\n }\n\n /**\n * Handles scroll events on the viewport, calculating the new viewport and requesting the\n * terminal to scroll to it.\n * @param ev The scroll event.\n */\n private onScroll(ev: Event) {\n const newRow = Math.round(this.viewportElement.scrollTop / this.currentRowHeight);\n const diff = newRow - this.terminal.ydisp;\n this.terminal.scrollDisp(diff, true);\n }\n\n /**\n * Handles mouse wheel events by adjusting the viewport's scrollTop and delegating the actual\n * scrolling to `onScroll`, this event needs to be attached manually by the consumer of\n * `Viewport`.\n * @param ev The mouse wheel event.\n */\n public onWheel(ev: WheelEvent) {\n if (ev.deltaY === 0) {\n // Do nothing if it's not a vertical scroll event\n return;\n }\n // Fallback to WheelEvent.DOM_DELTA_PIXEL\n let multiplier = 1;\n if (ev.deltaMode === WheelEvent.DOM_DELTA_LINE) {\n multiplier = this.currentRowHeight;\n } else if (ev.deltaMode === WheelEvent.DOM_DELTA_PAGE) {\n multiplier = this.currentRowHeight * this.terminal.rows;\n }\n this.viewportElement.scrollTop += ev.deltaY * multiplier;\n // Prevent the page from scrolling when the terminal scrolls\n ev.preventDefault();\n };\n\n /**\n * Handles the touchstart event, recording the touch occurred.\n * @param ev The touch event.\n */\n public onTouchStart(ev: TouchEvent) {\n this.lastTouchY = ev.touches[0].pageY;\n };\n\n /**\n * Handles the touchmove event, scrolling the viewport if the position shifted.\n * @param ev The touch event.\n */\n public onTouchMove(ev: TouchEvent) {\n let deltaY = this.lastTouchY - ev.touches[0].pageY;\n this.lastTouchY = ev.touches[0].pageY;\n if (deltaY === 0) {\n return;\n }\n this.viewportElement.scrollTop += deltaY;\n ev.preventDefault();\n };\n}\n","/**\n * @license MIT\n */\n\nimport { ITerminal } from './Interfaces';\n\n/**\n * Represents a selection within the buffer. This model only cares about column\n * and row coordinates, not wide characters.\n */\nexport class SelectionModel {\n /**\n * Whether select all is currently active.\n */\n public isSelectAllActive: boolean;\n\n /**\n * The [x, y] position the selection starts at.\n */\n public selectionStart: [number, number];\n\n /**\n * The minimal length of the selection from the start position. When double\n * clicking on a word, the word will be selected which makes the selection\n * start at the start of the word and makes this variable the length.\n */\n public selectionStartLength: number;\n\n /**\n * The [x, y] position the selection ends at.\n */\n public selectionEnd: [number, number];\n\n constructor(\n private _terminal: ITerminal\n ) {\n this.clearSelection();\n }\n\n /**\n * Clears the current selection.\n */\n public clearSelection(): void {\n this.selectionStart = null;\n this.selectionEnd = null;\n this.isSelectAllActive = false;\n this.selectionStartLength = 0;\n }\n\n /**\n * The final selection start, taking into consideration select all.\n */\n public get finalSelectionStart(): [number, number] {\n if (this.isSelectAllActive) {\n return [0, 0];\n }\n\n if (!this.selectionEnd || !this.selectionStart) {\n return this.selectionStart;\n }\n\n return this.areSelectionValuesReversed() ? this.selectionEnd : this.selectionStart;\n }\n\n /**\n * The final selection end, taking into consideration select all, double click\n * word selection and triple click line selection.\n */\n public get finalSelectionEnd(): [number, number] {\n if (this.isSelectAllActive) {\n return [this._terminal.cols, this._terminal.ybase + this._terminal.rows - 1];\n }\n\n if (!this.selectionStart) {\n return null;\n }\n\n // Use the selection start if the end doesn't exist or they're reversed\n if (!this.selectionEnd || this.areSelectionValuesReversed()) {\n return [this.selectionStart[0] + this.selectionStartLength, this.selectionStart[1]];\n }\n\n // Ensure the the word/line is selected after a double/triple click\n if (this.selectionStartLength) {\n // Select the larger of the two when start and end are on the same line\n if (this.selectionEnd[1] === this.selectionStart[1]) {\n return [Math.max(this.selectionStart[0] + this.selectionStartLength, this.selectionEnd[0]), this.selectionEnd[1]];\n }\n }\n return this.selectionEnd;\n }\n\n /**\n * Returns whether the selection start and end are reversed.\n */\n public areSelectionValuesReversed(): boolean {\n const start = this.selectionStart;\n const end = this.selectionEnd;\n return start[1] > end[1] || (start[1] === end[1] && start[0] > end[0]);\n }\n\n /**\n * Handle the buffer being trimmed, adjust the selection position.\n * @param amount The amount the buffer is being trimmed.\n * @return Whether a refresh is necessary.\n */\n public onTrim(amount: number): boolean {\n // Adjust the selection position based on the trimmed amount.\n if (this.selectionStart) {\n this.selectionStart[1] -= amount;\n }\n if (this.selectionEnd) {\n this.selectionEnd[1] -= amount;\n }\n\n // The selection has moved off the buffer, clear it.\n if (this.selectionEnd && this.selectionEnd[1] < 0) {\n this.clearSelection();\n return true;\n }\n\n // If the selection start is trimmed, ensure the start column is 0.\n if (this.selectionStart && this.selectionStart[1] < 0) {\n this.selectionStart[1] = 0;\n }\n return false;\n }\n}\n","/**\n * @license MIT\n */\n\nimport * as Mouse from './utils/Mouse';\nimport * as Browser from './utils/Browser';\nimport { CharMeasure } from './utils/CharMeasure';\nimport { CircularList } from './utils/CircularList';\nimport { EventEmitter } from './EventEmitter';\nimport { ITerminal } from './Interfaces';\nimport { SelectionModel } from './SelectionModel';\n\n/**\n * The number of pixels the mouse needs to be above or below the viewport in\n * order to scroll at the maximum speed.\n */\nconst DRAG_SCROLL_MAX_THRESHOLD = 50;\n\n/**\n * The maximum scrolling speed\n */\nconst DRAG_SCROLL_MAX_SPEED = 15;\n\n/**\n * The number of milliseconds between drag scroll updates.\n */\nconst DRAG_SCROLL_INTERVAL = 50;\n\n/**\n * The amount of time before mousedown events are no longer stacked to create\n * double/triple click events.\n */\nconst CLEAR_MOUSE_DOWN_TIME = 400;\n\n/**\n * The number of pixels in each direction that the mouse must move before\n * mousedown events are no longer stacked to create double/triple click events.\n */\nconst CLEAR_MOUSE_DISTANCE = 10;\n\n/**\n * A string containing all characters that are considered word separated by the\n * double click to select work logic.\n */\nconst WORD_SEPARATORS = ' ()[]{}\\'\"';\n\n// TODO: Move these constants elsewhere, they belong in a buffer or buffer\n// data/line class.\nconst LINE_DATA_CHAR_INDEX = 1;\nconst LINE_DATA_WIDTH_INDEX = 2;\n\nconst NON_BREAKING_SPACE_CHAR = String.fromCharCode(160);\nconst ALL_NON_BREAKING_SPACE_REGEX = new RegExp(NON_BREAKING_SPACE_CHAR, 'g');\n\n/**\n * Represents a position of a word on a line.\n */\ninterface IWordPosition {\n start: number;\n length: number;\n}\n\n/**\n * A selection mode, this drives how the selection behaves on mouse move.\n */\nenum SelectionMode {\n NORMAL,\n WORD,\n LINE\n}\n\n/**\n * A class that manages the selection of the terminal. With help from\n * SelectionModel, SelectionManager handles with all logic associated with\n * dealing with the selection, including handling mouse interaction, wide\n * characters and fetching the actual text within the selection. Rendering is\n * not handled by the SelectionManager but a 'refresh' event is fired when the\n * selection is ready to be redrawn.\n */\nexport class SelectionManager extends EventEmitter {\n protected _model: SelectionModel;\n\n /**\n * The amount to scroll every drag scroll update (depends on how far the mouse\n * drag is above or below the terminal).\n */\n private _dragScrollAmount: number;\n\n /**\n * The last time the mousedown event fired, this is used to track double and\n * triple clicks.\n */\n private _lastMouseDownTime: number;\n\n /**\n * The last position the mouse was clicked [x, y].\n */\n private _lastMousePosition: [number, number];\n\n /**\n * The number of clicks of the mousedown event. This is used to keep track of\n * double and triple clicks.\n */\n private _clickCount: number;\n\n /**\n * The current selection mode.\n */\n private _activeSelectionMode: SelectionMode;\n\n /**\n * A setInterval timer that is active while the mouse is down whose callback\n * scrolls the viewport when necessary.\n */\n private _dragScrollIntervalTimer: NodeJS.Timer;\n\n /**\n * The animation frame ID used for refreshing the selection.\n */\n private _refreshAnimationFrame: number;\n\n private _bufferTrimListener: any;\n private _mouseMoveListener: EventListener;\n private _mouseDownListener: EventListener;\n private _mouseUpListener: EventListener;\n\n constructor(\n private _terminal: ITerminal,\n private _buffer: CircularList,\n private _rowContainer: HTMLElement,\n private _charMeasure: CharMeasure\n ) {\n super();\n this._initListeners();\n this.enable();\n\n this._model = new SelectionModel(_terminal);\n this._lastMouseDownTime = 0;\n this._activeSelectionMode = SelectionMode.NORMAL;\n }\n\n /**\n * Initializes listener variables.\n */\n private _initListeners() {\n this._bufferTrimListener = (amount: number) => this._onTrim(amount);\n this._mouseMoveListener = event => this._onMouseMove(event);\n this._mouseDownListener = event => this._onMouseDown(event);\n this._mouseUpListener = event => this._onMouseUp(event);\n }\n\n /**\n * Disables the selection manager. This is useful for when terminal mouse\n * are enabled.\n */\n public disable() {\n this.clearSelection();\n this._buffer.off('trim', this._bufferTrimListener);\n this._rowContainer.removeEventListener('mousedown', this._mouseDownListener);\n }\n\n /**\n * Enable the selection manager.\n */\n public enable() {\n // Only adjust the selection on trim, shiftElements is rarely used (only in\n // reverseIndex) and delete in a splice is only ever used when the same\n // number of elements was just added. Given this is could actually be\n // beneficial to leave the selection as is for these cases.\n this._buffer.on('trim', this._bufferTrimListener);\n this._rowContainer.addEventListener('mousedown', this._mouseDownListener);\n }\n\n /**\n * Sets the active buffer, this should be called when the alt buffer is\n * switched in or out.\n * @param buffer The active buffer.\n */\n public setBuffer(buffer: CircularList): void {\n this._buffer = buffer;\n this.clearSelection();\n }\n\n /**\n * Gets whether there is an active text selection.\n */\n public get hasSelection(): boolean {\n const start = this._model.finalSelectionStart;\n const end = this._model.finalSelectionEnd;\n if (!start || !end) {\n return false;\n }\n return start[0] !== end[0] || start[1] !== end[1];\n }\n\n /**\n * Gets the text currently selected.\n */\n public get selectionText(): string {\n const start = this._model.finalSelectionStart;\n const end = this._model.finalSelectionEnd;\n if (!start || !end) {\n return '';\n }\n\n // Get first row\n const startRowEndCol = start[1] === end[1] ? end[0] : null;\n let result: string[] = [];\n result.push(this._translateBufferLineToString(this._buffer.get(start[1]), true, start[0], startRowEndCol));\n\n // Get middle rows\n for (let i = start[1] + 1; i <= end[1] - 1; i++) {\n const bufferLine = this._buffer.get(i);\n const lineText = this._translateBufferLineToString(bufferLine, true);\n if (bufferLine.isWrapped) {\n result[result.length - 1] += lineText;\n } else {\n result.push(lineText);\n }\n }\n\n // Get final row\n if (start[1] !== end[1]) {\n const bufferLine = this._buffer.get(end[1]);\n const lineText = this._translateBufferLineToString(bufferLine, true, 0, end[0]);\n if (bufferLine.isWrapped) {\n result[result.length - 1] += lineText;\n } else {\n result.push(lineText);\n }\n }\n\n // Format string by replacing non-breaking space chars with regular spaces\n // and joining the array into a multi-line string.\n const formattedResult = result.map(line => {\n return line.replace(ALL_NON_BREAKING_SPACE_REGEX, ' ');\n }).join(Browser.isMSWindows ? '\\r\\n' : '\\n');\n\n return formattedResult;\n }\n\n /**\n * Clears the current terminal selection.\n */\n public clearSelection(): void {\n this._model.clearSelection();\n this._removeMouseDownListeners();\n this.refresh();\n }\n\n /**\n * Translates a buffer line to a string, with optional start and end columns.\n * Wide characters will count as two columns in the resulting string. This\n * function is useful for getting the actual text underneath the raw selection\n * position.\n * @param line The line being translated.\n * @param trimRight Whether to trim whitespace to the right.\n * @param startCol The column to start at.\n * @param endCol The column to end at.\n */\n private _translateBufferLineToString(line: any, trimRight: boolean, startCol: number = 0, endCol: number = null): string {\n // TODO: This function should live in a buffer or buffer line class\n\n // Get full line\n let lineString = '';\n let widthAdjustedStartCol = startCol;\n let widthAdjustedEndCol = endCol;\n for (let i = 0; i < line.length; i++) {\n const char = line[i];\n lineString += char[LINE_DATA_CHAR_INDEX];\n // Adjust start and end cols for wide characters if they affect their\n // column indexes\n if (char[LINE_DATA_WIDTH_INDEX] === 0) {\n if (startCol >= i) {\n widthAdjustedStartCol--;\n }\n if (endCol >= i) {\n widthAdjustedEndCol--;\n }\n }\n }\n\n // Calculate the final end col by trimming whitespace on the right of the\n // line if needed.\n let finalEndCol = widthAdjustedEndCol || line.length;\n if (trimRight) {\n const rightWhitespaceIndex = lineString.search(/\\s+$/);\n if (rightWhitespaceIndex !== -1) {\n finalEndCol = Math.min(finalEndCol, rightWhitespaceIndex);\n }\n // Return the empty string if only trimmed whitespace is selected\n if (finalEndCol <= widthAdjustedStartCol) {\n return '';\n }\n }\n\n return lineString.substring(widthAdjustedStartCol, finalEndCol);\n }\n\n /**\n * Queues a refresh, redrawing the selection on the next opportunity.\n * @param isNewSelection Whether the selection should be registered as a new\n * selection on Linux.\n */\n public refresh(isNewSelection?: boolean): void {\n // Queue the refresh for the renderer\n if (!this._refreshAnimationFrame) {\n this._refreshAnimationFrame = window.requestAnimationFrame(() => this._refresh());\n }\n\n // If the platform is Linux and the refresh call comes from a mouse event,\n // we need to update the selection for middle click to paste selection.\n if (Browser.isLinux && isNewSelection) {\n const selectionText = this.selectionText;\n if (selectionText.length) {\n this.emit('newselection', this.selectionText);\n }\n }\n }\n\n /**\n * Fires the refresh event, causing consumers to pick it up and redraw the\n * selection state.\n */\n private _refresh(): void {\n this._refreshAnimationFrame = null;\n this.emit('refresh', { start: this._model.finalSelectionStart, end: this._model.finalSelectionEnd });\n }\n\n /**\n * Selects all text within the terminal.\n */\n public selectAll(): void {\n this._model.isSelectAllActive = true;\n this.refresh();\n }\n\n /**\n * Handle the buffer being trimmed, adjust the selection position.\n * @param amount The amount the buffer is being trimmed.\n */\n private _onTrim(amount: number) {\n const needsRefresh = this._model.onTrim(amount);\n if (needsRefresh) {\n this.refresh();\n }\n }\n\n /**\n * Gets the 0-based [x, y] buffer coordinates of the current mouse event.\n * @param event The mouse event.\n */\n private _getMouseBufferCoords(event: MouseEvent): [number, number] {\n const coords = Mouse.getCoords(event, this._rowContainer, this._charMeasure, this._terminal.cols, this._terminal.rows, true);\n // Convert to 0-based\n coords[0]--;\n coords[1]--;\n // Convert viewport coords to buffer coords\n coords[1] += this._terminal.ydisp;\n return coords;\n }\n\n /**\n * Gets the amount the viewport should be scrolled based on how far out of the\n * terminal the mouse is.\n * @param event The mouse event.\n */\n private _getMouseEventScrollAmount(event: MouseEvent): number {\n let offset = Mouse.getCoordsRelativeToElement(event, this._rowContainer)[1];\n const terminalHeight = this._terminal.rows * this._charMeasure.height;\n if (offset >= 0 && offset <= terminalHeight) {\n return 0;\n }\n if (offset > terminalHeight) {\n offset -= terminalHeight;\n }\n\n offset = Math.min(Math.max(offset, -DRAG_SCROLL_MAX_THRESHOLD), DRAG_SCROLL_MAX_THRESHOLD);\n offset /= DRAG_SCROLL_MAX_THRESHOLD;\n return (offset / Math.abs(offset)) + Math.round(offset * (DRAG_SCROLL_MAX_SPEED - 1));\n }\n\n /**\n * Handles te mousedown event, setting up for a new selection.\n * @param event The mousedown event.\n */\n private _onMouseDown(event: MouseEvent) {\n // Only action the primary button\n if (event.button !== 0) {\n return;\n }\n\n // Tell the browser not to start a regular selection\n event.preventDefault();\n\n // Reset drag scroll state\n this._dragScrollAmount = 0;\n\n this._setMouseClickCount(event);\n\n if (event.shiftKey) {\n this._onShiftClick(event);\n } else {\n if (this._clickCount === 1) {\n this._onSingleClick(event);\n } else if (this._clickCount === 2) {\n this._onDoubleClick(event);\n } else if (this._clickCount === 3) {\n this._onTripleClick(event);\n }\n }\n\n this._addMouseDownListeners();\n this.refresh(true);\n }\n\n /**\n * Adds listeners when mousedown is triggered.\n */\n private _addMouseDownListeners(): void {\n // Listen on the document so that dragging outside of viewport works\n this._rowContainer.ownerDocument.addEventListener('mousemove', this._mouseMoveListener);\n this._rowContainer.ownerDocument.addEventListener('mouseup', this._mouseUpListener);\n this._dragScrollIntervalTimer = setInterval(() => this._dragScroll(), DRAG_SCROLL_INTERVAL);\n }\n\n /**\n * Removes the listeners that are registered when mousedown is triggered.\n */\n private _removeMouseDownListeners(): void {\n this._rowContainer.ownerDocument.removeEventListener('mousemove', this._mouseMoveListener);\n this._rowContainer.ownerDocument.removeEventListener('mouseup', this._mouseUpListener);\n clearInterval(this._dragScrollIntervalTimer);\n this._dragScrollIntervalTimer = null;\n }\n\n /**\n * Performs a shift click, setting the selection end position to the mouse\n * position.\n * @param event The mouse event.\n */\n private _onShiftClick(event: MouseEvent): void {\n if (this._model.selectionStart) {\n this._model.selectionEnd = this._getMouseBufferCoords(event);\n }\n }\n\n /**\n * Performs a single click, resetting relevant state and setting the selection\n * start position.\n * @param event The mouse event.\n */\n private _onSingleClick(event: MouseEvent): void {\n this._model.selectionStartLength = 0;\n this._model.isSelectAllActive = false;\n this._activeSelectionMode = SelectionMode.NORMAL;\n this._model.selectionStart = this._getMouseBufferCoords(event);\n if (this._model.selectionStart) {\n this._model.selectionEnd = null;\n // If the mouse is over the second half of a wide character, adjust the\n // selection to cover the whole character\n const char = this._buffer.get(this._model.selectionStart[1])[this._model.selectionStart[0]];\n if (char[LINE_DATA_WIDTH_INDEX] === 0) {\n this._model.selectionStart[0]++;\n }\n }\n }\n\n /**\n * Performs a double click, selecting the current work.\n * @param event The mouse event.\n */\n private _onDoubleClick(event: MouseEvent): void {\n const coords = this._getMouseBufferCoords(event);\n if (coords) {\n this._activeSelectionMode = SelectionMode.WORD;\n this._selectWordAt(coords);\n }\n }\n\n /**\n * Performs a triple click, selecting the current line and activating line\n * select mode.\n * @param event The mouse event.\n */\n private _onTripleClick(event: MouseEvent): void {\n const coords = this._getMouseBufferCoords(event);\n if (coords) {\n this._activeSelectionMode = SelectionMode.LINE;\n this._selectLineAt(coords[1]);\n }\n }\n\n /**\n * Sets the number of clicks for the current mousedown event based on the time\n * and position of the last mousedown event.\n * @param event The mouse event.\n */\n private _setMouseClickCount(event: MouseEvent): void {\n let currentTime = (new Date()).getTime();\n if (currentTime - this._lastMouseDownTime > CLEAR_MOUSE_DOWN_TIME || this._distanceFromLastMousePosition(event) > CLEAR_MOUSE_DISTANCE) {\n this._clickCount = 0;\n }\n this._lastMouseDownTime = currentTime;\n this._lastMousePosition = [event.pageX, event.pageY];\n this._clickCount++;\n }\n\n /**\n * Gets the maximum number of pixels in each direction the mouse has moved.\n * @param event The mouse event.\n */\n private _distanceFromLastMousePosition(event: MouseEvent): number {\n const result = Math.max(\n Math.abs(this._lastMousePosition[0] - event.pageX),\n Math.abs(this._lastMousePosition[1] - event.pageY));\n return result;\n }\n\n /**\n * Handles the mousemove event when the mouse button is down, recording the\n * end of the selection and refreshing the selection.\n * @param event The mousemove event.\n */\n private _onMouseMove(event: MouseEvent) {\n // Record the previous position so we know whether to redraw the selection\n // at the end.\n const previousSelectionEnd = this._model.selectionEnd ? [this._model.selectionEnd[0], this._model.selectionEnd[1]] : null;\n\n // Set the initial selection end based on the mouse coordinates\n this._model.selectionEnd = this._getMouseBufferCoords(event);\n\n // Select the entire line if line select mode is active.\n if (this._activeSelectionMode === SelectionMode.LINE) {\n if (this._model.selectionEnd[1] < this._model.selectionStart[1]) {\n this._model.selectionEnd[0] = 0;\n } else {\n this._model.selectionEnd[0] = this._terminal.cols;\n }\n } else if (this._activeSelectionMode === SelectionMode.WORD) {\n this._selectToWordAt(this._model.selectionEnd);\n }\n\n // Determine the amount of scrolling that will happen.\n this._dragScrollAmount = this._getMouseEventScrollAmount(event);\n\n // If the cursor was above or below the viewport, make sure it's at the\n // start or end of the viewport respectively.\n if (this._dragScrollAmount > 0) {\n this._model.selectionEnd[0] = this._terminal.cols - 1;\n } else if (this._dragScrollAmount < 0) {\n this._model.selectionEnd[0] = 0;\n }\n\n // If the character is a wide character include the cell to the right in the\n // selection. Note that selections at the very end of the line will never\n // have a character.\n if (this._model.selectionEnd[1] < this._buffer.length) {\n const char = this._buffer.get(this._model.selectionEnd[1])[this._model.selectionEnd[0]];\n if (char && char[2] === 0) {\n this._model.selectionEnd[0]++;\n }\n }\n\n // Only draw here if the selection changes.\n if (!previousSelectionEnd ||\n previousSelectionEnd[0] !== this._model.selectionEnd[0] ||\n previousSelectionEnd[1] !== this._model.selectionEnd[1]) {\n this.refresh(true);\n }\n }\n\n /**\n * The callback that occurs every DRAG_SCROLL_INTERVAL ms that does the\n * scrolling of the viewport.\n */\n private _dragScroll() {\n if (this._dragScrollAmount) {\n this._terminal.scrollDisp(this._dragScrollAmount, false);\n // Re-evaluate selection\n if (this._dragScrollAmount > 0) {\n this._model.selectionEnd = [this._terminal.cols - 1, this._terminal.ydisp + this._terminal.rows];\n } else {\n this._model.selectionEnd = [0, this._terminal.ydisp];\n }\n this.refresh();\n }\n }\n\n /**\n * Handles the mouseup event, removing the mousedown listeners.\n * @param event The mouseup event.\n */\n private _onMouseUp(event: MouseEvent) {\n this._removeMouseDownListeners();\n }\n\n /**\n * Converts a viewport column to the character index on the buffer line, the\n * latter takes into account wide characters.\n * @param coords The coordinates to find the 2 index for.\n */\n private _convertViewportColToCharacterIndex(bufferLine: any, coords: [number, number]): number {\n let charIndex = coords[0];\n for (let i = 0; coords[0] >= i; i++) {\n const char = bufferLine[i];\n if (char[LINE_DATA_WIDTH_INDEX] === 0) {\n charIndex--;\n }\n }\n return charIndex;\n }\n\n /**\n * Gets positional information for the word at the coordinated specified.\n * @param coords The coordinates to get the word at.\n */\n private _getWordAt(coords: [number, number]): IWordPosition {\n const bufferLine = this._buffer.get(coords[1]);\n const line = this._translateBufferLineToString(bufferLine, false);\n\n // Get actual index, taking into consideration wide characters\n let endIndex = this._convertViewportColToCharacterIndex(bufferLine, coords);\n let startIndex = endIndex;\n\n // Record offset to be used later\n const charOffset = coords[0] - startIndex;\n let leftWideCharCount = 0;\n let rightWideCharCount = 0;\n\n if (line.charAt(startIndex) === ' ') {\n // Expand until non-whitespace is hit\n while (startIndex > 0 && line.charAt(startIndex - 1) === ' ') {\n startIndex--;\n }\n while (endIndex < line.length && line.charAt(endIndex + 1) === ' ') {\n endIndex++;\n }\n } else {\n // Expand until whitespace is hit. This algorithm works by scanning left\n // and right from the starting position, keeping both the index format\n // (line) and the column format (bufferLine) in sync. When a wide\n // character is hit, it is recorded and the column index is adjusted.\n let startCol = coords[0];\n let endCol = coords[0];\n // Consider the initial position, skip it and increment the wide char\n // variable\n if (bufferLine[startCol][LINE_DATA_WIDTH_INDEX] === 0) {\n leftWideCharCount++;\n startCol--;\n }\n if (bufferLine[endCol][LINE_DATA_WIDTH_INDEX] === 2) {\n rightWideCharCount++;\n endCol++;\n }\n // Expand the string in both directions until a space is hit\n while (startIndex > 0 && !this._isCharWordSeparator(line.charAt(startIndex - 1))) {\n if (bufferLine[startCol - 1][LINE_DATA_WIDTH_INDEX] === 0) {\n // If the next character is a wide char, record it and skip the column\n leftWideCharCount++;\n startCol--;\n }\n startIndex--;\n startCol--;\n }\n while (endIndex + 1 < line.length && !this._isCharWordSeparator(line.charAt(endIndex + 1))) {\n if (bufferLine[endCol + 1][LINE_DATA_WIDTH_INDEX] === 2) {\n // If the next character is a wide char, record it and skip the column\n rightWideCharCount++;\n endCol++;\n }\n endIndex++;\n endCol++;\n }\n }\n\n const start = startIndex + charOffset - leftWideCharCount;\n const length = Math.min(endIndex - startIndex + leftWideCharCount + rightWideCharCount + 1/*include endIndex char*/, this._terminal.cols);\n return {start, length};\n }\n\n /**\n * Selects the word at the coordinates specified.\n * @param coords The coordinates to get the word at.\n */\n protected _selectWordAt(coords: [number, number]): void {\n const wordPosition = this._getWordAt(coords);\n this._model.selectionStart = [wordPosition.start, coords[1]];\n this._model.selectionStartLength = wordPosition.length;\n }\n\n /**\n * Sets the selection end to the word at the coordinated specified.\n * @param coords The coordinates to get the word at.\n */\n private _selectToWordAt(coords: [number, number]): void {\n const wordPosition = this._getWordAt(coords);\n this._model.selectionEnd = [this._model.areSelectionValuesReversed() ? wordPosition.start : (wordPosition.start + wordPosition.length), coords[1]];\n }\n\n /**\n * Gets whether the character is considered a word separator by the select\n * word logic.\n * @param char The character to check.\n */\n private _isCharWordSeparator(char: string): boolean {\n return WORD_SEPARATORS.indexOf(char) >= 0;\n }\n\n /**\n * Selects the line specified.\n * @param line The line index.\n */\n protected _selectLineAt(line: number): void {\n this._model.selectionStart = [0, line];\n this._model.selectionStartLength = this._terminal.cols;\n }\n}\n","/**\n * @license MIT\n */\n\nimport { ITerminal } from './Interfaces';\nimport { DomElementObjectPool } from './utils/DomElementObjectPool';\n\n/**\n * The maximum number of refresh frames to skip when the write buffer is non-\n * empty. Note that these frames may be intermingled with frames that are\n * skipped via requestAnimationFrame's mechanism.\n */\nconst MAX_REFRESH_FRAME_SKIP = 5;\n\n/**\n * Flags used to render terminal text properly.\n */\nenum FLAGS {\n BOLD = 1,\n UNDERLINE = 2,\n BLINK = 4,\n INVERSE = 8,\n INVISIBLE = 16\n};\n\nlet brokenBold: boolean = null;\n\nexport class Renderer {\n /** A queue of the rows to be refreshed */\n private _refreshRowsQueue: {start: number, end: number}[] = [];\n private _refreshFramesSkipped = 0;\n private _refreshAnimationFrame = null;\n\n private _spanElementObjectPool = new DomElementObjectPool('span');\n\n constructor(private _terminal: ITerminal) {\n // Figure out whether boldness affects\n // the character width of monospace fonts.\n if (brokenBold === null) {\n brokenBold = checkBoldBroken((this._terminal).element);\n }\n this._spanElementObjectPool = new DomElementObjectPool('span');\n\n // TODO: Pull more DOM interactions into Renderer.constructor, element for\n // example should be owned by Renderer (and also exposed by Terminal due to\n // to established public API).\n }\n\n /**\n * Queues a refresh between two rows (inclusive), to be done on next animation\n * frame.\n * @param {number} start The start row.\n * @param {number} end The end row.\n */\n public queueRefresh(start: number, end: number): void {\n this._refreshRowsQueue.push({ start: start, end: end });\n if (!this._refreshAnimationFrame) {\n this._refreshAnimationFrame = window.requestAnimationFrame(this._refreshLoop.bind(this));\n }\n }\n\n /**\n * Performs the refresh loop callback, calling refresh only if a refresh is\n * necessary before queueing up the next one.\n */\n private _refreshLoop(): void {\n // Skip MAX_REFRESH_FRAME_SKIP frames if the writeBuffer is non-empty as it\n // will need to be immediately refreshed anyway. This saves a lot of\n // rendering time as the viewport DOM does not need to be refreshed, no\n // scroll events, no layouts, etc.\n const skipFrame = this._terminal.writeBuffer.length > 0 && this._refreshFramesSkipped++ <= MAX_REFRESH_FRAME_SKIP;\n if (skipFrame) {\n this._refreshAnimationFrame = window.requestAnimationFrame(this._refreshLoop.bind(this));\n return;\n }\n\n this._refreshFramesSkipped = 0;\n let start;\n let end;\n if (this._refreshRowsQueue.length > 4) {\n // Just do a full refresh when 5+ refreshes are queued\n start = 0;\n end = this._terminal.rows - 1;\n } else {\n // Get start and end rows that need refreshing\n start = this._refreshRowsQueue[0].start;\n end = this._refreshRowsQueue[0].end;\n for (let i = 1; i < this._refreshRowsQueue.length; i++) {\n if (this._refreshRowsQueue[i].start < start) {\n start = this._refreshRowsQueue[i].start;\n }\n if (this._refreshRowsQueue[i].end > end) {\n end = this._refreshRowsQueue[i].end;\n }\n }\n }\n this._refreshRowsQueue = [];\n this._refreshAnimationFrame = null;\n this._refresh(start, end);\n }\n\n /**\n * Refreshes (re-renders) terminal content within two rows (inclusive)\n *\n * Rendering Engine:\n *\n * In the screen buffer, each character is stored as a an array with a character\n * and a 32-bit integer:\n * - First value: a utf-16 character.\n * - Second value:\n * - Next 9 bits: background color (0-511).\n * - Next 9 bits: foreground color (0-511).\n * - Next 14 bits: a mask for misc. flags:\n * - 1=bold\n * - 2=underline\n * - 4=blink\n * - 8=inverse\n * - 16=invisible\n *\n * @param {number} start The row to start from (between 0 and terminal's height terminal - 1)\n * @param {number} end The row to end at (between fromRow and terminal's height terminal - 1)\n */\n private _refresh(start: number, end: number): void {\n // If this is a big refresh, remove the terminal rows from the DOM for faster calculations\n let parent;\n if (end - start >= this._terminal.rows / 2) {\n parent = this._terminal.element.parentNode;\n if (parent) {\n this._terminal.element.removeChild(this._terminal.rowContainer);\n }\n }\n\n let width = this._terminal.cols;\n let y = start;\n\n if (end >= this._terminal.rows) {\n this._terminal.log('`end` is too large. Most likely a bad CSR.');\n end = this._terminal.rows - 1;\n }\n\n for (; y <= end; y++) {\n let row = y + this._terminal.ydisp;\n\n let line = this._terminal.lines.get(row);\n\n let x;\n if (this._terminal.y === y - (this._terminal.ybase - this._terminal.ydisp) &&\n this._terminal.cursorState &&\n !this._terminal.cursorHidden) {\n x = this._terminal.x;\n } else {\n x = -1;\n }\n\n let attr = this._terminal.defAttr;\n\n const documentFragment = document.createDocumentFragment();\n let innerHTML = '';\n let currentElement;\n\n // Return the row's spans to the pool\n while (this._terminal.children[y].children.length) {\n const child = this._terminal.children[y].children[0];\n this._terminal.children[y].removeChild(child);\n this._spanElementObjectPool.release(child);\n }\n\n for (let i = 0; i < width; i++) {\n // TODO: Could data be a more specific type?\n let data: any = line[i][0];\n const ch = line[i][1];\n const ch_width: any = line[i][2];\n if (!ch_width) {\n continue;\n }\n\n if (i === x) {\n data = -1;\n }\n\n if (data !== attr) {\n if (attr !== this._terminal.defAttr) {\n if (innerHTML) {\n currentElement.innerHTML = innerHTML;\n innerHTML = '';\n }\n documentFragment.appendChild(currentElement);\n currentElement = null;\n }\n if (data !== this._terminal.defAttr) {\n if (innerHTML && !currentElement) {\n currentElement = this._spanElementObjectPool.acquire();\n }\n if (currentElement) {\n if (innerHTML) {\n currentElement.innerHTML = innerHTML;\n innerHTML = '';\n }\n documentFragment.appendChild(currentElement);\n }\n currentElement = this._spanElementObjectPool.acquire();\n if (data === -1) {\n currentElement.classList.add('reverse-video');\n currentElement.classList.add('terminal-cursor');\n } else {\n let bg = data & 0x1ff;\n let fg = (data >> 9) & 0x1ff;\n let flags = data >> 18;\n\n if (flags & FLAGS.BOLD) {\n if (!brokenBold) {\n currentElement.classList.add('xterm-bold');\n }\n // See: XTerm*boldColors\n if (fg < 8) {\n fg += 8;\n }\n }\n\n if (flags & FLAGS.UNDERLINE) {\n currentElement.classList.add('xterm-underline');\n }\n\n if (flags & FLAGS.BLINK) {\n currentElement.classList.add('xterm-blink');\n }\n\n // If inverse flag is on, then swap the foreground and background variables.\n if (flags & FLAGS.INVERSE) {\n let temp = bg;\n bg = fg;\n fg = temp;\n // Should inverse just be before the above boldColors effect instead?\n if ((flags & 1) && fg < 8) {\n fg += 8;\n }\n }\n\n if (flags & FLAGS.INVISIBLE) {\n currentElement.classList.add('xterm-hidden');\n }\n\n /**\n * Weird situation: Invert flag used black foreground and white background results\n * in invalid background color, positioned at the 256 index of the 256 terminal\n * color map. Pin the colors manually in such a case.\n *\n * Source: https://github.com/sourcelair/xterm.js/issues/57\n */\n if (flags & FLAGS.INVERSE) {\n if (bg === 257) {\n bg = 15;\n }\n if (fg === 256) {\n fg = 0;\n }\n }\n\n if (bg < 256) {\n currentElement.classList.add(`xterm-bg-color-${bg}`);\n }\n\n if (fg < 256) {\n currentElement.classList.add(`xterm-color-${fg}`);\n }\n }\n }\n }\n\n if (ch_width === 2) {\n // Wrap wide characters so they're sized correctly. It's more difficult to release these\n // from the object pool so just create new ones via innerHTML.\n innerHTML += `${ch}`;\n } else if (ch.charCodeAt(0) > 255) {\n // Wrap any non-wide unicode character as some fonts size them badly\n innerHTML += `${ch}`;\n } else {\n switch (ch) {\n case '&':\n innerHTML += '&';\n break;\n case '<':\n innerHTML += '<';\n break;\n case '>':\n innerHTML += '>';\n break;\n default:\n if (ch <= ' ') {\n innerHTML += ' ';\n } else {\n innerHTML += ch;\n }\n break;\n }\n }\n\n attr = data;\n }\n\n if (innerHTML && !currentElement) {\n currentElement = this._spanElementObjectPool.acquire();\n }\n if (currentElement) {\n if (innerHTML) {\n currentElement.innerHTML = innerHTML;\n innerHTML = '';\n }\n documentFragment.appendChild(currentElement);\n currentElement = null;\n }\n\n this._terminal.children[y].appendChild(documentFragment);\n }\n\n if (parent) {\n this._terminal.element.appendChild(this._terminal.rowContainer);\n }\n\n this._terminal.emit('refresh', {element: this._terminal.element, start: start, end: end});\n };\n\n /**\n * Refreshes the selection in the DOM.\n * @param start The selection start.\n * @param end The selection end.\n */\n public refreshSelection(start: [number, number], end: [number, number]) {\n // Remove all selections\n while (this._terminal.selectionContainer.children.length) {\n this._terminal.selectionContainer.removeChild(this._terminal.selectionContainer.children[0]);\n }\n\n // Selection does not exist\n if (!start || !end) {\n return;\n }\n\n // Translate from buffer position to viewport position\n const viewportStartRow = start[1] - this._terminal.ydisp;\n const viewportEndRow = end[1] - this._terminal.ydisp;\n const viewportCappedStartRow = Math.max(viewportStartRow, 0);\n const viewportCappedEndRow = Math.min(viewportEndRow, this._terminal.rows - 1);\n\n // No need to draw the selection\n if (viewportCappedStartRow >= this._terminal.rows || viewportCappedEndRow < 0) {\n return;\n }\n\n // Create the selections\n const documentFragment = document.createDocumentFragment();\n // Draw first row\n const startCol = viewportStartRow === viewportCappedStartRow ? start[0] : 0;\n const endCol = viewportCappedStartRow === viewportCappedEndRow ? end[0] : this._terminal.cols;\n documentFragment.appendChild(this._createSelectionElement(viewportCappedStartRow, startCol, endCol));\n // Draw middle rows\n const middleRowsCount = viewportCappedEndRow - viewportCappedStartRow - 1;\n documentFragment.appendChild(this._createSelectionElement(viewportCappedStartRow + 1, 0, this._terminal.cols, middleRowsCount));\n // Draw final row\n if (viewportCappedStartRow !== viewportCappedEndRow) {\n // Only draw viewportEndRow if it's not the same as viewporttartRow\n const endCol = viewportEndRow === viewportCappedEndRow ? end[0] : this._terminal.cols;\n documentFragment.appendChild(this._createSelectionElement(viewportCappedEndRow, 0, endCol));\n }\n this._terminal.selectionContainer.appendChild(documentFragment);\n }\n\n /**\n * Creates a selection element at the specified position.\n * @param row The row of the selection.\n * @param colStart The start column.\n * @param colEnd The end columns.\n */\n private _createSelectionElement(row: number, colStart: number, colEnd: number, rowCount: number = 1): HTMLElement {\n const element = document.createElement('div');\n element.style.height = `${rowCount * this._terminal.charMeasure.height}px`;\n element.style.top = `${row * this._terminal.charMeasure.height}px`;\n element.style.left = `${colStart * this._terminal.charMeasure.width}px`;\n element.style.width = `${this._terminal.charMeasure.width * (colEnd - colStart)}px`;\n return element;\n }\n}\n\n\n// If bold is broken, we can't use it in the terminal.\nfunction checkBoldBroken(terminal) {\n const document = terminal.ownerDocument;\n const el = document.createElement('span');\n el.innerHTML = 'hello world';\n terminal.appendChild(el);\n const w1 = el.offsetWidth;\n const h1 = el.offsetHeight;\n el.style.fontWeight = 'bold';\n const w2 = el.offsetWidth;\n const h2 = el.offsetHeight;\n terminal.removeChild(el);\n return w1 !== w2 || h1 !== h2;\n}\n","/**\n * @license MIT\n */\n\nimport { C0 } from './EscapeSequences';\nimport { IInputHandler } from './Interfaces';\nimport { CHARSETS, DEFAULT_CHARSET } from './Charsets';\n\nconst normalStateHandler: {[key: string]: (parser: Parser, handler: IInputHandler) => void} = {};\nnormalStateHandler[C0.BEL] = (parser, handler) => handler.bell();\nnormalStateHandler[C0.LF] = (parser, handler) => handler.lineFeed();\nnormalStateHandler[C0.VT] = normalStateHandler[C0.LF];\nnormalStateHandler[C0.FF] = normalStateHandler[C0.LF];\nnormalStateHandler[C0.CR] = (parser, handler) => handler.carriageReturn();\nnormalStateHandler[C0.BS] = (parser, handler) => handler.backspace();\nnormalStateHandler[C0.HT] = (parser, handler) => handler.tab();\nnormalStateHandler[C0.SO] = (parser, handler) => handler.shiftOut();\nnormalStateHandler[C0.SI] = (parser, handler) => handler.shiftIn();\nnormalStateHandler[C0.ESC] = (parser, handler) => parser.setState(ParserState.ESCAPED);\n\n// TODO: Remove terminal when parser owns params and currentParam\nconst escapedStateHandler: {[key: string]: (parser: Parser, terminal: any) => void} = {};\nescapedStateHandler['['] = (parser, terminal) => {\n // ESC [ Control Sequence Introducer (CSI is 0x9b)\n terminal.params = [];\n terminal.currentParam = 0;\n parser.setState(ParserState.CSI_PARAM);\n};\nescapedStateHandler[']'] = (parser, terminal) => {\n // ESC ] Operating System Command (OSC is 0x9d)\n terminal.params = [];\n terminal.currentParam = 0;\n parser.setState(ParserState.OSC);\n};\nescapedStateHandler['P'] = (parser, terminal) => {\n // ESC P Device Control String (DCS is 0x90)\n terminal.params = [];\n terminal.currentParam = 0;\n parser.setState(ParserState.DCS);\n};\nescapedStateHandler['_'] = (parser, terminal) => {\n // ESC _ Application Program Command ( APC is 0x9f).\n parser.setState(ParserState.IGNORE);\n};\nescapedStateHandler['^'] = (parser, terminal) => {\n // ESC ^ Privacy Message ( PM is 0x9e).\n parser.setState(ParserState.IGNORE);\n};\nescapedStateHandler['c'] = (parser, terminal) => {\n // ESC c Full Reset (RIS).\n terminal.reset();\n};\nescapedStateHandler['E'] = (parser, terminal) => {\n // ESC E Next Line ( NEL is 0x85).\n terminal.x = 0;\n terminal.index();\n parser.setState(ParserState.NORMAL);\n};\nescapedStateHandler['D'] = (parser, terminal) => {\n // ESC D Index ( IND is 0x84).\n terminal.index();\n parser.setState(ParserState.NORMAL);\n};\nescapedStateHandler['M'] = (parser, terminal) => {\n // ESC M Reverse Index ( RI is 0x8d).\n terminal.reverseIndex();\n parser.setState(ParserState.NORMAL);\n};\nescapedStateHandler['%'] = (parser, terminal) => {\n // ESC % Select default/utf-8 character set.\n // @ = default, G = utf-8\n terminal.setgLevel(0);\n terminal.setgCharset(0, DEFAULT_CHARSET); // US (default)\n parser.setState(ParserState.NORMAL);\n parser.skipNextChar();\n};\nescapedStateHandler[C0.CAN] = (parser) => parser.setState(ParserState.NORMAL);\n\nconst csiParamStateHandler: {[key: string]: (parser: Parser) => void} = {};\ncsiParamStateHandler['?'] = (parser) => parser.setPrefix('?');\ncsiParamStateHandler['>'] = (parser) => parser.setPrefix('>');\ncsiParamStateHandler['!'] = (parser) => parser.setPrefix('!');\ncsiParamStateHandler['0'] = (parser) => parser.setParam(parser.getParam() * 10);\ncsiParamStateHandler['1'] = (parser) => parser.setParam(parser.getParam() * 10 + 1);\ncsiParamStateHandler['2'] = (parser) => parser.setParam(parser.getParam() * 10 + 2);\ncsiParamStateHandler['3'] = (parser) => parser.setParam(parser.getParam() * 10 + 3);\ncsiParamStateHandler['4'] = (parser) => parser.setParam(parser.getParam() * 10 + 4);\ncsiParamStateHandler['5'] = (parser) => parser.setParam(parser.getParam() * 10 + 5);\ncsiParamStateHandler['6'] = (parser) => parser.setParam(parser.getParam() * 10 + 6);\ncsiParamStateHandler['7'] = (parser) => parser.setParam(parser.getParam() * 10 + 7);\ncsiParamStateHandler['8'] = (parser) => parser.setParam(parser.getParam() * 10 + 8);\ncsiParamStateHandler['9'] = (parser) => parser.setParam(parser.getParam() * 10 + 9);\ncsiParamStateHandler['$'] = (parser) => parser.setPostfix('$');\ncsiParamStateHandler['\"'] = (parser) => parser.setPostfix('\"');\ncsiParamStateHandler[' '] = (parser) => parser.setPostfix(' ');\ncsiParamStateHandler['\\''] = (parser) => parser.setPostfix('\\'');\ncsiParamStateHandler[';'] = (parser) => parser.finalizeParam();\ncsiParamStateHandler[C0.CAN] = (parser) => parser.setState(ParserState.NORMAL);\n\nconst csiStateHandler: {[key: string]: (handler: IInputHandler, params: number[], prefix: string, postfix: string, parser: Parser) => void} = {};\ncsiStateHandler['@'] = (handler, params, prefix) => handler.insertChars(params);\ncsiStateHandler['A'] = (handler, params, prefix) => handler.cursorUp(params);\ncsiStateHandler['B'] = (handler, params, prefix) => handler.cursorDown(params);\ncsiStateHandler['C'] = (handler, params, prefix) => handler.cursorForward(params);\ncsiStateHandler['D'] = (handler, params, prefix) => handler.cursorBackward(params);\ncsiStateHandler['E'] = (handler, params, prefix) => handler.cursorNextLine(params);\ncsiStateHandler['F'] = (handler, params, prefix) => handler.cursorPrecedingLine(params);\ncsiStateHandler['G'] = (handler, params, prefix) => handler.cursorCharAbsolute(params);\ncsiStateHandler['H'] = (handler, params, prefix) => handler.cursorPosition(params);\ncsiStateHandler['I'] = (handler, params, prefix) => handler.cursorForwardTab(params);\ncsiStateHandler['J'] = (handler, params, prefix) => handler.eraseInDisplay(params);\ncsiStateHandler['K'] = (handler, params, prefix) => handler.eraseInLine(params);\ncsiStateHandler['L'] = (handler, params, prefix) => handler.insertLines(params);\ncsiStateHandler['M'] = (handler, params, prefix) => handler.deleteLines(params);\ncsiStateHandler['P'] = (handler, params, prefix) => handler.deleteChars(params);\ncsiStateHandler['S'] = (handler, params, prefix) => handler.scrollUp(params);\ncsiStateHandler['T'] = (handler, params, prefix) => {\n if (params.length < 2 && !prefix) {\n handler.scrollDown(params);\n }\n};\ncsiStateHandler['X'] = (handler, params, prefix) => handler.eraseChars(params);\ncsiStateHandler['Z'] = (handler, params, prefix) => handler.cursorBackwardTab(params);\ncsiStateHandler['`'] = (handler, params, prefix) => handler.charPosAbsolute(params);\ncsiStateHandler['a'] = (handler, params, prefix) => handler.HPositionRelative(params);\ncsiStateHandler['b'] = (handler, params, prefix) => handler.repeatPrecedingCharacter(params);\ncsiStateHandler['c'] = (handler, params, prefix) => handler.sendDeviceAttributes(params);\ncsiStateHandler['d'] = (handler, params, prefix) => handler.linePosAbsolute(params);\ncsiStateHandler['e'] = (handler, params, prefix) => handler.VPositionRelative(params);\ncsiStateHandler['f'] = (handler, params, prefix) => handler.HVPosition(params);\ncsiStateHandler['g'] = (handler, params, prefix) => handler.tabClear(params);\ncsiStateHandler['h'] = (handler, params, prefix) => handler.setMode(params);\ncsiStateHandler['l'] = (handler, params, prefix) => handler.resetMode(params);\ncsiStateHandler['m'] = (handler, params, prefix) => handler.charAttributes(params);\ncsiStateHandler['n'] = (handler, params, prefix) => handler.deviceStatus(params);\ncsiStateHandler['p'] = (handler, params, prefix) => {\n switch (prefix) {\n case '!': handler.softReset(params); break;\n }\n};\ncsiStateHandler['q'] = (handler, params, prefix, postfix) => {\n if (postfix === ' ') {\n handler.setCursorStyle(params);\n }\n};\ncsiStateHandler['r'] = (handler, params) => handler.setScrollRegion(params);\ncsiStateHandler['s'] = (handler, params) => handler.saveCursor(params);\ncsiStateHandler['u'] = (handler, params) => handler.restoreCursor(params);\ncsiStateHandler[C0.CAN] = (handler, params, prefix, postfix, parser) => parser.setState(ParserState.NORMAL);\n\nenum ParserState {\n NORMAL = 0,\n ESCAPED = 1,\n CSI_PARAM = 2,\n CSI = 3,\n OSC = 4,\n CHARSET = 5,\n DCS = 6,\n IGNORE = 7\n}\n\n/**\n * The terminal's parser, all input into the terminal goes through the parser\n * which parses and defers the actual input handling the the IInputHandler\n * specified in the constructor.\n */\nexport class Parser {\n private _state: ParserState;\n private _position: number;\n\n // TODO: Remove terminal when handler can do everything\n constructor(\n private _inputHandler: IInputHandler,\n private _terminal: any\n ) {\n this._state = ParserState.NORMAL;\n }\n\n /**\n * Parse and handle data.\n *\n * @param data The data to parse.\n */\n public parse(data: string): ParserState {\n let l = data.length, j, cs, ch, code, low;\n\n this._position = 0;\n // apply leftover surrogate high from last write\n if (this._terminal.surrogate_high) {\n data = this._terminal.surrogate_high + data;\n this._terminal.surrogate_high = '';\n }\n\n for (; this._position < l; this._position++) {\n ch = data[this._position];\n\n // FIXME: higher chars than 0xa0 are not allowed in escape sequences\n // --> maybe move to default\n code = data.charCodeAt(this._position);\n if (0xD800 <= code && code <= 0xDBFF) {\n // we got a surrogate high\n // get surrogate low (next 2 bytes)\n low = data.charCodeAt(this._position + 1);\n if (isNaN(low)) {\n // end of data stream, save surrogate high\n this._terminal.surrogate_high = ch;\n continue;\n }\n code = ((code - 0xD800) * 0x400) + (low - 0xDC00) + 0x10000;\n ch += data.charAt(this._position + 1);\n }\n // surrogate low - already handled above\n if (0xDC00 <= code && code <= 0xDFFF)\n continue;\n\n switch (this._state) {\n case ParserState.NORMAL:\n if (ch in normalStateHandler) {\n normalStateHandler[ch](this, this._inputHandler);\n } else {\n this._inputHandler.addChar(ch, code);\n }\n break;\n case ParserState.ESCAPED:\n if (ch in escapedStateHandler) {\n escapedStateHandler[ch](this, this._terminal);\n // Skip switch as it was just handled\n break;\n }\n switch (ch) {\n\n // ESC (,),*,+,-,. Designate G0-G2 Character Set.\n case '(': // <-- this seems to get all the attention\n case ')':\n case '*':\n case '+':\n case '-':\n case '.':\n switch (ch) {\n case '(':\n this._terminal.gcharset = 0;\n break;\n case ')':\n this._terminal.gcharset = 1;\n break;\n case '*':\n this._terminal.gcharset = 2;\n break;\n case '+':\n this._terminal.gcharset = 3;\n break;\n case '-':\n this._terminal.gcharset = 1;\n break;\n case '.':\n this._terminal.gcharset = 2;\n break;\n }\n this._state = ParserState.CHARSET;\n break;\n\n // Designate G3 Character Set (VT300).\n // A = ISO Latin-1 Supplemental.\n // Not implemented.\n case '/':\n this._terminal.gcharset = 3;\n this._state = ParserState.CHARSET;\n this._position--;\n break;\n\n // ESC N\n // Single Shift Select of G2 Character Set\n // ( SS2 is 0x8e). This affects next character only.\n case 'N':\n break;\n // ESC O\n // Single Shift Select of G3 Character Set\n // ( SS3 is 0x8f). This affects next character only.\n case 'O':\n break;\n // ESC n\n // Invoke the G2 Character Set as GL (LS2).\n case 'n':\n this._terminal.setgLevel(2);\n break;\n // ESC o\n // Invoke the G3 Character Set as GL (LS3).\n case 'o':\n this._terminal.setgLevel(3);\n break;\n // ESC |\n // Invoke the G3 Character Set as GR (LS3R).\n case '|':\n this._terminal.setgLevel(3);\n break;\n // ESC }\n // Invoke the G2 Character Set as GR (LS2R).\n case '}':\n this._terminal.setgLevel(2);\n break;\n // ESC ~\n // Invoke the G1 Character Set as GR (LS1R).\n case '~':\n this._terminal.setgLevel(1);\n break;\n\n // ESC 7 Save Cursor (DECSC).\n case '7':\n this._inputHandler.saveCursor();\n this._state = ParserState.NORMAL;\n break;\n\n // ESC 8 Restore Cursor (DECRC).\n case '8':\n this._inputHandler.restoreCursor();\n this._state = ParserState.NORMAL;\n break;\n\n // ESC # 3 DEC line height/width\n case '#':\n this._state = ParserState.NORMAL;\n this._position++;\n break;\n\n // ESC H Tab Set (HTS is 0x88).\n case 'H':\n this._terminal.tabSet();\n this._state = ParserState.NORMAL;\n break;\n\n // ESC = Application Keypad (DECKPAM).\n case '=':\n this._terminal.log('Serial port requested application keypad.');\n this._terminal.applicationKeypad = true;\n this._terminal.viewport.syncScrollArea();\n this._state = ParserState.NORMAL;\n break;\n\n // ESC > Normal Keypad (DECKPNM).\n case '>':\n this._terminal.log('Switching back to normal keypad.');\n this._terminal.applicationKeypad = false;\n this._terminal.viewport.syncScrollArea();\n this._state = ParserState.NORMAL;\n break;\n\n default:\n this._state = ParserState.NORMAL;\n this._terminal.error('Unknown ESC control: %s.', ch);\n break;\n }\n break;\n\n case ParserState.CHARSET:\n if (ch in CHARSETS) {\n cs = CHARSETS[ch];\n if (ch === '/') { // ISOLatin is actually /A\n this.skipNextChar();\n }\n } else {\n cs = DEFAULT_CHARSET;\n }\n this._terminal.setgCharset(this._terminal.gcharset, cs);\n this._terminal.gcharset = null;\n this._state = ParserState.NORMAL;\n break;\n\n case ParserState.OSC:\n // OSC Ps ; Pt ST\n // OSC Ps ; Pt BEL\n // Set Text Parameters.\n if (ch === C0.ESC || ch === C0.BEL) {\n if (ch === C0.ESC) this._position++;\n\n this._terminal.params.push(this._terminal.currentParam);\n\n switch (this._terminal.params[0]) {\n case 0:\n case 1:\n case 2:\n if (this._terminal.params[1]) {\n this._terminal.title = this._terminal.params[1];\n this._terminal.handleTitle(this._terminal.title);\n }\n break;\n case 3:\n // set X property\n break;\n case 4:\n case 5:\n // change dynamic colors\n break;\n case 10:\n case 11:\n case 12:\n case 13:\n case 14:\n case 15:\n case 16:\n case 17:\n case 18:\n case 19:\n // change dynamic ui colors\n break;\n case 46:\n // change log file\n break;\n case 50:\n // dynamic font\n break;\n case 51:\n // emacs shell\n break;\n case 52:\n // manipulate selection data\n break;\n case 104:\n case 105:\n case 110:\n case 111:\n case 112:\n case 113:\n case 114:\n case 115:\n case 116:\n case 117:\n case 118:\n // reset colors\n break;\n }\n\n this._terminal.params = [];\n this._terminal.currentParam = 0;\n this._state = ParserState.NORMAL;\n } else {\n if (!this._terminal.params.length) {\n if (ch >= '0' && ch <= '9') {\n this._terminal.currentParam =\n this._terminal.currentParam * 10 + ch.charCodeAt(0) - 48;\n } else if (ch === ';') {\n this._terminal.params.push(this._terminal.currentParam);\n this._terminal.currentParam = '';\n }\n } else {\n this._terminal.currentParam += ch;\n }\n }\n break;\n\n case ParserState.CSI_PARAM:\n if (ch in csiParamStateHandler) {\n csiParamStateHandler[ch](this);\n break;\n }\n this.finalizeParam();\n // Fall through the CSI as this character should be the CSI code.\n this._state = ParserState.CSI;\n\n case ParserState.CSI:\n if (ch in csiStateHandler) {\n csiStateHandler[ch](this._inputHandler, this._terminal.params, this._terminal.prefix, this._terminal.postfix, this);\n } else {\n this._terminal.error('Unknown CSI code: %s.', ch);\n }\n\n this._state = ParserState.NORMAL;\n this._terminal.prefix = '';\n this._terminal.postfix = '';\n break;\n\n case ParserState.DCS:\n if (ch === C0.ESC || ch === C0.BEL) {\n if (ch === C0.ESC) this._position++;\n let pt;\n let valid: boolean;\n\n switch (this._terminal.prefix) {\n // User-Defined Keys (DECUDK).\n case '':\n break;\n\n // Request Status String (DECRQSS).\n // test: echo -e '\\eP$q\"p\\e\\\\'\n case '$q':\n pt = this._terminal.currentParam;\n valid = false;\n\n switch (pt) {\n // DECSCA\n case '\"q':\n pt = '0\"q';\n break;\n\n // DECSCL\n case '\"p':\n pt = '61\"p';\n break;\n\n // DECSTBM\n case 'r':\n pt = ''\n + (this._terminal.scrollTop + 1)\n + ';'\n + (this._terminal.scrollBottom + 1)\n + 'r';\n break;\n\n // SGR\n case 'm':\n pt = '0m';\n break;\n\n default:\n this._terminal.error('Unknown DCS Pt: %s.', pt);\n pt = '';\n break;\n }\n\n this._terminal.send(C0.ESC + 'P' + +valid + '$r' + pt + C0.ESC + '\\\\');\n break;\n\n // Set Termcap/Terminfo Data (xterm, experimental).\n case '+p':\n break;\n\n // Request Termcap/Terminfo String (xterm, experimental)\n // Regular xterm does not even respond to this sequence.\n // This can cause a small glitch in vim.\n // test: echo -ne '\\eP+q6b64\\e\\\\'\n case '+q':\n pt = this._terminal.currentParam;\n valid = false;\n\n this._terminal.send(C0.ESC + 'P' + +valid + '+r' + pt + C0.ESC + '\\\\');\n break;\n\n default:\n this._terminal.error('Unknown DCS prefix: %s.', this._terminal.prefix);\n break;\n }\n\n this._terminal.currentParam = 0;\n this._terminal.prefix = '';\n this._state = ParserState.NORMAL;\n } else if (!this._terminal.currentParam) {\n if (!this._terminal.prefix && ch !== '$' && ch !== '+') {\n this._terminal.currentParam = ch;\n } else if (this._terminal.prefix.length === 2) {\n this._terminal.currentParam = ch;\n } else {\n this._terminal.prefix += ch;\n }\n } else {\n this._terminal.currentParam += ch;\n }\n break;\n\n case ParserState.IGNORE:\n // For PM and APC.\n if (ch === C0.ESC || ch === C0.BEL) {\n if (ch === C0.ESC) this._position++;\n this._state = ParserState.NORMAL;\n }\n break;\n }\n }\n return this._state;\n }\n\n /**\n * Set the parser's current parsing state.\n *\n * @param state The new state.\n */\n public setState(state: ParserState): void {\n this._state = state;\n }\n\n /**\n * Sets the parsier's current prefix. CSI codes can have prefixes of '?', '>'\n * or '!'.\n *\n * @param prefix The prefix.\n */\n public setPrefix(prefix: string): void {\n this._terminal.prefix = prefix;\n }\n\n /**\n * Sets the parsier's current prefix. CSI codes can have postfixes of '$',\n * '\"', ' ', '\\''.\n *\n * @param postfix The postfix.\n */\n public setPostfix(postfix: string): void {\n this._terminal.postfix = postfix;\n }\n\n /**\n * Sets the parser's current parameter.\n *\n * @param param the parameter.\n */\n public setParam(param: number) {\n this._terminal.currentParam = param;\n }\n\n /**\n * Gets the parser's current parameter.\n */\n public getParam(): number {\n return this._terminal.currentParam;\n }\n\n /**\n * Finalizes the parser's current parameter, adding it to the list of\n * parameters and setting the new current parameter to 0.\n */\n public finalizeParam(): void {\n this._terminal.params.push(this._terminal.currentParam);\n this._terminal.currentParam = 0;\n }\n\n /**\n * Tell the parser to skip the next character.\n */\n public skipNextChar(): void {\n this._position++;\n }\n\n /**\n * Tell the parser to repeat parsing the current character (for example if it\n * needs parsing using a different state.\n */\n // public repeatChar(): void {\n // this._position--;\n // }\n}\n","/**\n * @license MIT\n */\n\nimport { LinkMatcherOptions } from './Interfaces';\nimport { LinkMatcher, LinkMatcherHandler, LinkMatcherValidationCallback } from './Types';\n\nconst INVALID_LINK_CLASS = 'xterm-invalid-link';\n\nconst protocolClause = '(https?:\\\\/\\\\/)';\nconst domainCharacterSet = '[\\\\da-z\\\\.-]+';\nconst negatedDomainCharacterSet = '[^\\\\da-z\\\\.-]+';\nconst domainBodyClause = '(' + domainCharacterSet + ')';\nconst tldClause = '([a-z\\\\.]{2,6})';\nconst ipClause = '((\\\\d{1,3}\\\\.){3}\\\\d{1,3})';\nconst localHostClause = '(localhost)';\nconst portClause = '(:\\\\d{1,5})';\nconst hostClause = '((' + domainBodyClause + '\\\\.' + tldClause + ')|' + ipClause + '|' + localHostClause + ')' + portClause + '?';\nconst pathClause = '(\\\\/[\\\\/\\\\w\\\\.\\\\-%~]*)*';\nconst queryStringHashFragmentCharacterSet = '[0-9\\\\w\\\\[\\\\]\\\\(\\\\)\\\\/\\\\?\\\\!#@$%&\\'*+,:;~\\\\=\\\\.\\\\-]*';\nconst queryStringClause = '(\\\\?' + queryStringHashFragmentCharacterSet + ')?';\nconst hashFragmentClause = '(#' + queryStringHashFragmentCharacterSet + ')?';\nconst negatedPathCharacterSet = '[^\\\\/\\\\w\\\\.\\\\-%]+';\nconst bodyClause = hostClause + pathClause + queryStringClause + hashFragmentClause;\nconst start = '(?:^|' + negatedDomainCharacterSet + ')(';\nconst end = ')($|' + negatedPathCharacterSet + ')';\nconst strictUrlRegex = new RegExp(start + protocolClause + bodyClause + end);\n\n/**\n * The ID of the built in http(s) link matcher.\n */\nconst HYPERTEXT_LINK_MATCHER_ID = 0;\n\n/**\n * The Linkifier applies links to rows shortly after they have been refreshed.\n */\nexport class Linkifier {\n /**\n * The time to wait after a row is changed before it is linkified. This prevents\n * the costly operation of searching every row multiple times, potentially a\n * huge amount of times.\n */\n protected static TIME_BEFORE_LINKIFY = 200;\n\n protected _linkMatchers: LinkMatcher[];\n\n private _document: Document;\n private _rows: HTMLElement[];\n private _rowTimeoutIds: number[];\n private _nextLinkMatcherId = HYPERTEXT_LINK_MATCHER_ID;\n\n constructor() {\n this._rowTimeoutIds = [];\n this._linkMatchers = [];\n this.registerLinkMatcher(strictUrlRegex, null, { matchIndex: 1 });\n }\n\n /**\n * Attaches the linkifier to the DOM, enabling linkification.\n * @param document The document object.\n * @param rows The array of rows to apply links to.\n */\n public attachToDom(document: Document, rows: HTMLElement[]) {\n this._document = document;\n this._rows = rows;\n }\n\n /**\n * Queues a row for linkification.\n * @param {number} rowIndex The index of the row to linkify.\n */\n public linkifyRow(rowIndex: number): void {\n // Don't attempt linkify if not yet attached to DOM\n if (!this._document) {\n return;\n }\n\n const timeoutId = this._rowTimeoutIds[rowIndex];\n if (timeoutId) {\n clearTimeout(timeoutId);\n }\n this._rowTimeoutIds[rowIndex] = setTimeout(this._linkifyRow.bind(this, rowIndex), Linkifier.TIME_BEFORE_LINKIFY);\n }\n\n /**\n * Attaches a handler for hypertext links, overriding default behavior\n * for standard http(s) links.\n * @param {LinkHandler} handler The handler to use, this can be cleared with\n * null.\n */\n public setHypertextLinkHandler(handler: LinkMatcherHandler): void {\n this._linkMatchers[HYPERTEXT_LINK_MATCHER_ID].handler = handler;\n }\n\n /**\n * Attaches a validation callback for hypertext links.\n * @param {LinkMatcherValidationCallback} callback The callback to use, this\n * can be cleared with null.\n */\n public setHypertextValidationCallback(callback: LinkMatcherValidationCallback): void {\n this._linkMatchers[HYPERTEXT_LINK_MATCHER_ID].validationCallback = callback;\n }\n\n /**\n * Registers a link matcher, allowing custom link patterns to be matched and\n * handled.\n * @param {RegExp} regex The regular expression to search for, specifically\n * this searches the textContent of the rows. You will want to use \\s to match\n * a space ' ' character for example.\n * @param {LinkHandler} handler The callback when the link is called.\n * @param {LinkMatcherOptions} [options] Options for the link matcher.\n * @return {number} The ID of the new matcher, this can be used to deregister.\n */\n public registerLinkMatcher(regex: RegExp, handler: LinkMatcherHandler, options: LinkMatcherOptions = {}): number {\n if (this._nextLinkMatcherId !== HYPERTEXT_LINK_MATCHER_ID && !handler) {\n throw new Error('handler must be defined');\n }\n const matcher: LinkMatcher = {\n id: this._nextLinkMatcherId++,\n regex,\n handler,\n matchIndex: options.matchIndex,\n validationCallback: options.validationCallback,\n priority: options.priority || 0\n };\n this._addLinkMatcherToList(matcher);\n return matcher.id;\n }\n\n /**\n * Inserts a link matcher to the list in the correct position based on the\n * priority of each link matcher. New link matchers of equal priority are\n * considered after older link matchers.\n * @param matcher The link matcher to be added.\n */\n private _addLinkMatcherToList(matcher: LinkMatcher): void {\n if (this._linkMatchers.length === 0) {\n this._linkMatchers.push(matcher);\n return;\n }\n\n for (let i = this._linkMatchers.length - 1; i >= 0; i--) {\n if (matcher.priority <= this._linkMatchers[i].priority) {\n this._linkMatchers.splice(i + 1, 0, matcher);\n return;\n }\n }\n\n this._linkMatchers.splice(0, 0, matcher);\n }\n\n /**\n * Deregisters a link matcher if it has been registered.\n * @param {number} matcherId The link matcher's ID (returned after register)\n * @return {boolean} Whether a link matcher was found and deregistered.\n */\n public deregisterLinkMatcher(matcherId: number): boolean {\n // ID 0 is the hypertext link matcher which cannot be deregistered\n for (let i = 1; i < this._linkMatchers.length; i++) {\n if (this._linkMatchers[i].id === matcherId) {\n this._linkMatchers.splice(i, 1);\n return true;\n }\n }\n return false;\n }\n\n /**\n * Linkifies a row.\n * @param {number} rowIndex The index of the row to linkify.\n */\n private _linkifyRow(rowIndex: number): void {\n const row = this._rows[rowIndex];\n if (!row) {\n return;\n }\n const text = row.textContent;\n for (let i = 0; i < this._linkMatchers.length; i++) {\n const matcher = this._linkMatchers[i];\n const linkElements = this._doLinkifyRow(row, matcher);\n if (linkElements.length > 0) {\n // Fire validation callback\n if (matcher.validationCallback) {\n for (let j = 0; j < linkElements.length; j++) {\n const element = linkElements[j];\n matcher.validationCallback(element.textContent, element, isValid => {\n if (!isValid) {\n element.classList.add(INVALID_LINK_CLASS);\n }\n });\n }\n }\n // Only allow a single LinkMatcher to trigger on any given row.\n return;\n }\n }\n }\n\n /**\n * Linkifies a row given a specific handler.\n * @param {HTMLElement} row The row to linkify.\n * @param {LinkMatcher} matcher The link matcher for this line.\n * @return The link element(s) that were added.\n */\n private _doLinkifyRow(row: HTMLElement, matcher: LinkMatcher): HTMLElement[] {\n // Iterate over nodes as we want to consider text nodes\n let result = [];\n const isHttpLinkMatcher = matcher.id === HYPERTEXT_LINK_MATCHER_ID;\n const nodes = row.childNodes;\n\n // Find the first match\n let match = row.textContent.match(matcher.regex);\n if (!match || match.length === 0) {\n return result;\n }\n let uri = match[typeof matcher.matchIndex !== 'number' ? 0 : matcher.matchIndex];\n // Set the next searches start index\n let rowStartIndex = match.index + uri.length;\n\n for (let i = 0; i < nodes.length; i++) {\n const node = nodes[i];\n const searchIndex = node.textContent.indexOf(uri);\n if (searchIndex >= 0) {\n const linkElement = this._createAnchorElement(uri, matcher.handler, isHttpLinkMatcher);\n if (node.textContent.length === uri.length) {\n // Matches entire string\n if (node.nodeType === 3 /*Node.TEXT_NODE*/) {\n this._replaceNode(node, linkElement);\n } else {\n const element = (node);\n if (element.nodeName === 'A') {\n // This row has already been linkified\n return result;\n }\n element.innerHTML = '';\n element.appendChild(linkElement);\n }\n } else if (node.childNodes.length > 1) {\n // Matches part of string in an element with multiple child nodes\n for (let j = 0; j < node.childNodes.length; j++) {\n const childNode = node.childNodes[j];\n const childSearchIndex = childNode.textContent.indexOf(uri);\n if (childSearchIndex !== -1) {\n // Match found in currentNode\n this._replaceNodeSubstringWithNode(childNode, linkElement, uri, childSearchIndex);\n // Don't need to count nodesAdded by replacing the node as this\n // is a child node, not a top-level node.\n break;\n }\n }\n } else {\n // Matches part of string in a single text node\n const nodesAdded = this._replaceNodeSubstringWithNode(node, linkElement, uri, searchIndex);\n // No need to consider the new nodes\n i += nodesAdded;\n }\n result.push(linkElement);\n\n // Find the next match\n match = row.textContent.substring(rowStartIndex).match(matcher.regex);\n if (!match || match.length === 0) {\n return result;\n }\n uri = match[typeof matcher.matchIndex !== 'number' ? 0 : matcher.matchIndex];\n rowStartIndex += match.index + uri.length;\n }\n }\n return result;\n }\n\n /**\n * Creates a link anchor element.\n * @param {string} uri The uri of the link.\n * @return {HTMLAnchorElement} The link.\n */\n private _createAnchorElement(uri: string, handler: LinkMatcherHandler, isHypertextLinkHandler: boolean): HTMLAnchorElement {\n const element = this._document.createElement('a');\n element.textContent = uri;\n element.draggable = false;\n if (isHypertextLinkHandler) {\n element.href = uri;\n // Force link on another tab so work is not lost\n element.target = '_blank';\n element.addEventListener('click', (event: MouseEvent) => {\n if (handler) {\n return handler(event, uri);\n }\n });\n } else {\n element.addEventListener('click', (event: MouseEvent) => {\n // Don't execute the handler if the link is flagged as invalid\n if (element.classList.contains(INVALID_LINK_CLASS)) {\n return;\n }\n return handler(event, uri);\n });\n }\n return element;\n }\n\n /**\n * Replace a node with 1 or more other nodes.\n * @param {Node} oldNode The node to replace.\n * @param {Node[]} newNodes The new nodes to insert in order.\n */\n private _replaceNode(oldNode: Node, ...newNodes: Node[]): void {\n const parent = oldNode.parentNode;\n for (let i = 0; i < newNodes.length; i++) {\n parent.insertBefore(newNodes[i], oldNode);\n }\n parent.removeChild(oldNode);\n }\n\n /**\n * Replace a substring within a node with a new node.\n * @param {Node} targetNode The target node; either a text node or a \n * containing a single text node.\n * @param {Node} newNode The new node to insert.\n * @param {string} substring The substring to replace.\n * @param {number} substringIndex The index of the substring within the string.\n * @return The number of nodes to skip when searching for the next uri.\n */\n private _replaceNodeSubstringWithNode(targetNode: Node, newNode: Node, substring: string, substringIndex: number): number {\n // If the targetNode is a non-text node with a single child, make the child\n // the new targetNode.\n if (targetNode.childNodes.length === 1) {\n targetNode = targetNode.childNodes[0];\n }\n\n // The targetNode will be either a text node or a . The text node\n // (targetNode or its only-child) needs to be replaced with newNode plus new\n // text nodes potentially on either side.\n if (targetNode.nodeType !== 3/*Node.TEXT_NODE*/) {\n throw new Error('targetNode must be a text node or only contain a single text node');\n }\n\n const fullText = targetNode.textContent;\n\n if (substringIndex === 0) {\n // Replace with \n const rightText = fullText.substring(substring.length);\n const rightTextNode = this._document.createTextNode(rightText);\n this._replaceNode(targetNode, newNode, rightTextNode);\n return 0;\n }\n\n if (substringIndex === targetNode.textContent.length - substring.length) {\n // Replace with \n const leftText = fullText.substring(0, substringIndex);\n const leftTextNode = this._document.createTextNode(leftText);\n this._replaceNode(targetNode, leftTextNode, newNode);\n return 0;\n }\n\n // Replace with \n const leftText = fullText.substring(0, substringIndex);\n const leftTextNode = this._document.createTextNode(leftText);\n const rightText = fullText.substring(substringIndex + substring.length);\n const rightTextNode = this._document.createTextNode(rightText);\n this._replaceNode(targetNode, leftTextNode, newNode, rightTextNode);\n return 1;\n }\n}\n","/**\n * @license MIT\n */\n\nimport { IInputHandler, ITerminal } from './Interfaces';\nimport { C0 } from './EscapeSequences';\nimport { DEFAULT_CHARSET } from './Charsets';\n\n/**\n * The terminal's standard implementation of IInputHandler, this handles all\n * input from the Parser.\n *\n * Refer to http://invisible-island.net/xterm/ctlseqs/ctlseqs.html to understand\n * each function's header comment.\n */\nexport class InputHandler implements IInputHandler {\n // TODO: We want to type _terminal when it's pulled into TS\n constructor(private _terminal: any) { }\n\n public addChar(char: string, code: number): void {\n if (char >= ' ') {\n // calculate print space\n // expensive call, therefore we save width in line buffer\n const ch_width = wcwidth(code);\n\n if (this._terminal.charset && this._terminal.charset[char]) {\n char = this._terminal.charset[char];\n }\n\n let row = this._terminal.y + this._terminal.ybase;\n\n // insert combining char in last cell\n // FIXME: needs handling after cursor jumps\n if (!ch_width && this._terminal.x) {\n // dont overflow left\n if (this._terminal.lines.get(row)[this._terminal.x - 1]) {\n if (!this._terminal.lines.get(row)[this._terminal.x - 1][2]) {\n\n // found empty cell after fullwidth, need to go 2 cells back\n if (this._terminal.lines.get(row)[this._terminal.x - 2])\n this._terminal.lines.get(row)[this._terminal.x - 2][1] += char;\n\n } else {\n this._terminal.lines.get(row)[this._terminal.x - 1][1] += char;\n }\n this._terminal.updateRange(this._terminal.y);\n }\n return;\n }\n\n // goto next line if ch would overflow\n // TODO: needs a global min terminal width of 2\n if (this._terminal.x + ch_width - 1 >= this._terminal.cols) {\n // autowrap - DECAWM\n if (this._terminal.wraparoundMode) {\n this._terminal.x = 0;\n this._terminal.y++;\n if (this._terminal.y > this._terminal.scrollBottom) {\n // Insert a new line, scroll and mark as a wrapped line\n this._terminal.y--;\n this._terminal.scroll(true);\n } else {\n // The line already exists (eg. the initial viewport), mark it as a\n // wrapped line\n this._terminal.lines.get(this._terminal.y).isWrapped = true;\n }\n } else {\n if (ch_width === 2) // FIXME: check for xterm behavior\n return;\n }\n }\n row = this._terminal.y + this._terminal.ybase;\n\n // insert mode: move characters to right\n if (this._terminal.insertMode) {\n // do this twice for a fullwidth char\n for (let moves = 0; moves < ch_width; ++moves) {\n // remove last cell, if it's width is 0\n // we have to adjust the second last cell as well\n const removed = this._terminal.lines.get(this._terminal.y + this._terminal.ybase).pop();\n if (removed[2] === 0\n && this._terminal.lines.get(row)[this._terminal.cols - 2]\n && this._terminal.lines.get(row)[this._terminal.cols - 2][2] === 2) {\n this._terminal.lines.get(row)[this._terminal.cols - 2] = [this._terminal.curAttr, ' ', 1];\n }\n\n // insert empty cell at cursor\n this._terminal.lines.get(row).splice(this._terminal.x, 0, [this._terminal.curAttr, ' ', 1]);\n }\n }\n\n this._terminal.lines.get(row)[this._terminal.x] = [this._terminal.curAttr, char, ch_width];\n this._terminal.x++;\n this._terminal.updateRange(this._terminal.y);\n\n // fullwidth char - set next cell width to zero and advance cursor\n if (ch_width === 2) {\n this._terminal.lines.get(row)[this._terminal.x] = [this._terminal.curAttr, '', 0];\n this._terminal.x++;\n }\n }\n }\n\n /**\n * BEL\n * Bell (Ctrl-G).\n */\n public bell(): void {\n if (!this._terminal.visualBell) {\n return;\n }\n this._terminal.element.style.borderColor = 'white';\n setTimeout(() => this._terminal.element.style.borderColor = '', 10);\n if (this._terminal.popOnBell) {\n this._terminal.focus();\n }\n }\n\n /**\n * LF\n * Line Feed or New Line (NL). (LF is Ctrl-J).\n */\n public lineFeed(): void {\n if (this._terminal.convertEol) {\n this._terminal.x = 0;\n }\n this._terminal.y++;\n if (this._terminal.y > this._terminal.scrollBottom) {\n this._terminal.y--;\n this._terminal.scroll();\n }\n // If the end of the line is hit, prevent this action from wrapping around to the next line.\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x--;\n }\n }\n\n /**\n * CR\n * Carriage Return (Ctrl-M).\n */\n public carriageReturn(): void {\n this._terminal.x = 0;\n }\n\n /**\n * BS\n * Backspace (Ctrl-H).\n */\n public backspace(): void {\n if (this._terminal.x > 0) {\n this._terminal.x--;\n }\n }\n\n /**\n * TAB\n * Horizontal Tab (HT) (Ctrl-I).\n */\n public tab(): void {\n this._terminal.x = this._terminal.nextStop();\n }\n\n /**\n * SO\n * Shift Out (Ctrl-N) -> Switch to Alternate Character Set. This invokes the\n * G1 character set.\n */\n public shiftOut(): void {\n this._terminal.setgLevel(1);\n }\n\n /**\n * SI\n * Shift In (Ctrl-O) -> Switch to Standard Character Set. This invokes the G0\n * character set (the default).\n */\n public shiftIn(): void {\n this._terminal.setgLevel(0);\n }\n\n /**\n * CSI Ps @\n * Insert Ps (Blank) Character(s) (default = 1) (ICH).\n */\n public insertChars(params: number[]): void {\n let param, row, j, ch;\n\n param = params[0];\n if (param < 1) param = 1;\n\n row = this._terminal.y + this._terminal.ybase;\n j = this._terminal.x;\n ch = [this._terminal.eraseAttr(), ' ', 1]; // xterm\n\n while (param-- && j < this._terminal.cols) {\n this._terminal.lines.get(row).splice(j++, 0, ch);\n this._terminal.lines.get(row).pop();\n }\n }\n\n /**\n * CSI Ps A\n * Cursor Up Ps Times (default = 1) (CUU).\n */\n public cursorUp(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.y -= param;\n if (this._terminal.y < 0) {\n this._terminal.y = 0;\n }\n }\n\n /**\n * CSI Ps B\n * Cursor Down Ps Times (default = 1) (CUD).\n */\n public cursorDown(params: number[]) {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.y += param;\n if (this._terminal.y >= this._terminal.rows) {\n this._terminal.y = this._terminal.rows - 1;\n }\n // If the end of the line is hit, prevent this action from wrapping around to the next line.\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x--;\n }\n }\n\n /**\n * CSI Ps C\n * Cursor Forward Ps Times (default = 1) (CUF).\n */\n public cursorForward(params: number[]) {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.x += param;\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x = this._terminal.cols - 1;\n }\n }\n\n /**\n * CSI Ps D\n * Cursor Backward Ps Times (default = 1) (CUB).\n */\n public cursorBackward(params: number[]) {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n // If the end of the line is hit, prevent this action from wrapping around to the next line.\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x--;\n }\n this._terminal.x -= param;\n if (this._terminal.x < 0) {\n this._terminal.x = 0;\n }\n }\n\n /**\n * CSI Ps E\n * Cursor Next Line Ps Times (default = 1) (CNL).\n * same as CSI Ps B ?\n */\n public cursorNextLine(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.y += param;\n if (this._terminal.y >= this._terminal.rows) {\n this._terminal.y = this._terminal.rows - 1;\n }\n this._terminal.x = 0;\n };\n\n\n /**\n * CSI Ps F\n * Cursor Preceding Line Ps Times (default = 1) (CNL).\n * reuse CSI Ps A ?\n */\n public cursorPrecedingLine(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.y -= param;\n if (this._terminal.y < 0) {\n this._terminal.y = 0;\n }\n this._terminal.x = 0;\n };\n\n\n /**\n * CSI Ps G\n * Cursor Character Absolute [column] (default = [row,1]) (CHA).\n */\n public cursorCharAbsolute(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.x = param - 1;\n }\n\n /**\n * CSI Ps ; Ps H\n * Cursor Position [row;column] (default = [1,1]) (CUP).\n */\n public cursorPosition(params: number[]): void {\n let row, col;\n\n row = params[0] - 1;\n\n if (params.length >= 2) {\n col = params[1] - 1;\n } else {\n col = 0;\n }\n\n if (row < 0) {\n row = 0;\n } else if (row >= this._terminal.rows) {\n row = this._terminal.rows - 1;\n }\n\n if (col < 0) {\n col = 0;\n } else if (col >= this._terminal.cols) {\n col = this._terminal.cols - 1;\n }\n\n this._terminal.x = col;\n this._terminal.y = row;\n }\n\n /**\n * CSI Ps I\n * Cursor Forward Tabulation Ps tab stops (default = 1) (CHT).\n */\n public cursorForwardTab(params: number[]): void {\n let param = params[0] || 1;\n while (param--) {\n this._terminal.x = this._terminal.nextStop();\n }\n }\n\n /**\n * CSI Ps J Erase in Display (ED).\n * Ps = 0 -> Erase Below (default).\n * Ps = 1 -> Erase Above.\n * Ps = 2 -> Erase All.\n * Ps = 3 -> Erase Saved Lines (xterm).\n * CSI ? Ps J\n * Erase in Display (DECSED).\n * Ps = 0 -> Selective Erase Below (default).\n * Ps = 1 -> Selective Erase Above.\n * Ps = 2 -> Selective Erase All.\n */\n public eraseInDisplay(params: number[]): void {\n let j;\n switch (params[0]) {\n case 0:\n this._terminal.eraseRight(this._terminal.x, this._terminal.y);\n j = this._terminal.y + 1;\n for (; j < this._terminal.rows; j++) {\n this._terminal.eraseLine(j);\n }\n break;\n case 1:\n this._terminal.eraseLeft(this._terminal.x, this._terminal.y);\n j = this._terminal.y;\n while (j--) {\n this._terminal.eraseLine(j);\n }\n break;\n case 2:\n j = this._terminal.rows;\n while (j--) this._terminal.eraseLine(j);\n break;\n case 3:\n // Clear scrollback (everything not in viewport)\n const scrollBackSize = this._terminal.lines.length - this._terminal.rows;\n if (scrollBackSize > 0) {\n this._terminal.lines.trimStart(scrollBackSize);\n this._terminal.ybase = Math.max(this._terminal.ybase - scrollBackSize, 0);\n this._terminal.ydisp = Math.max(this._terminal.ydisp - scrollBackSize, 0);\n }\n break;\n }\n }\n\n /**\n * CSI Ps K Erase in Line (EL).\n * Ps = 0 -> Erase to Right (default).\n * Ps = 1 -> Erase to Left.\n * Ps = 2 -> Erase All.\n * CSI ? Ps K\n * Erase in Line (DECSEL).\n * Ps = 0 -> Selective Erase to Right (default).\n * Ps = 1 -> Selective Erase to Left.\n * Ps = 2 -> Selective Erase All.\n */\n public eraseInLine(params: number[]): void {\n switch (params[0]) {\n case 0:\n this._terminal.eraseRight(this._terminal.x, this._terminal.y);\n break;\n case 1:\n this._terminal.eraseLeft(this._terminal.x, this._terminal.y);\n break;\n case 2:\n this._terminal.eraseLine(this._terminal.y);\n break;\n }\n }\n\n /**\n * CSI Ps L\n * Insert Ps Line(s) (default = 1) (IL).\n */\n public insertLines(params: number[]): void {\n let param, row, j;\n\n param = params[0];\n if (param < 1) {\n param = 1;\n }\n row = this._terminal.y + this._terminal.ybase;\n\n j = this._terminal.rows - 1 - this._terminal.scrollBottom;\n j = this._terminal.rows - 1 + this._terminal.ybase - j + 1;\n\n while (param--) {\n if (this._terminal.lines.length === this._terminal.lines.maxLength) {\n // Trim the start of lines to make room for the new line\n this._terminal.lines.trimStart(1);\n this._terminal.ybase--;\n this._terminal.ydisp--;\n row--;\n j--;\n }\n // test: echo -e '\\e[44m\\e[1L\\e[0m'\n // blankLine(true) - xterm/linux behavior\n this._terminal.lines.splice(row, 0, this._terminal.blankLine(true));\n this._terminal.lines.splice(j, 1);\n }\n\n // this.maxRange();\n this._terminal.updateRange(this._terminal.y);\n this._terminal.updateRange(this._terminal.scrollBottom);\n }\n\n /**\n * CSI Ps M\n * Delete Ps Line(s) (default = 1) (DL).\n */\n public deleteLines(params: number[]): void {\n let param, row, j;\n\n param = params[0];\n if (param < 1) {\n param = 1;\n }\n row = this._terminal.y + this._terminal.ybase;\n\n j = this._terminal.rows - 1 - this._terminal.scrollBottom;\n j = this._terminal.rows - 1 + this._terminal.ybase - j;\n\n while (param--) {\n if (this._terminal.lines.length === this._terminal.lines.maxLength) {\n // Trim the start of lines to make room for the new line\n this._terminal.lines.trimStart(1);\n this._terminal.ybase -= 1;\n this._terminal.ydisp -= 1;\n }\n // test: echo -e '\\e[44m\\e[1M\\e[0m'\n // blankLine(true) - xterm/linux behavior\n this._terminal.lines.splice(j + 1, 0, this._terminal.blankLine(true));\n this._terminal.lines.splice(row, 1);\n }\n\n // this.maxRange();\n this._terminal.updateRange(this._terminal.y);\n this._terminal.updateRange(this._terminal.scrollBottom);\n }\n\n /**\n * CSI Ps P\n * Delete Ps Character(s) (default = 1) (DCH).\n */\n public deleteChars(params: number[]): void {\n let param, row, ch;\n\n param = params[0];\n if (param < 1) {\n param = 1;\n }\n\n row = this._terminal.y + this._terminal.ybase;\n ch = [this._terminal.eraseAttr(), ' ', 1]; // xterm\n\n while (param--) {\n this._terminal.lines.get(row).splice(this._terminal.x, 1);\n this._terminal.lines.get(row).push(ch);\n }\n }\n\n /**\n * CSI Ps S Scroll up Ps lines (default = 1) (SU).\n */\n public scrollUp(params: number[]): void {\n let param = params[0] || 1;\n while (param--) {\n this._terminal.lines.splice(this._terminal.ybase + this._terminal.scrollTop, 1);\n this._terminal.lines.splice(this._terminal.ybase + this._terminal.scrollBottom, 0, this._terminal.blankLine());\n }\n // this.maxRange();\n this._terminal.updateRange(this._terminal.scrollTop);\n this._terminal.updateRange(this._terminal.scrollBottom);\n }\n\n /**\n * CSI Ps T Scroll down Ps lines (default = 1) (SD).\n */\n public scrollDown(params: number[]): void {\n let param = params[0] || 1;\n while (param--) {\n this._terminal.lines.splice(this._terminal.ybase + this._terminal.scrollBottom, 1);\n this._terminal.lines.splice(this._terminal.ybase + this._terminal.scrollTop, 0, this._terminal.blankLine());\n }\n // this.maxRange();\n this._terminal.updateRange(this._terminal.scrollTop);\n this._terminal.updateRange(this._terminal.scrollBottom);\n }\n\n /**\n * CSI Ps X\n * Erase Ps Character(s) (default = 1) (ECH).\n */\n public eraseChars(params: number[]): void {\n let param, row, j, ch;\n\n param = params[0];\n if (param < 1) {\n param = 1;\n }\n\n row = this._terminal.y + this._terminal.ybase;\n j = this._terminal.x;\n ch = [this._terminal.eraseAttr(), ' ', 1]; // xterm\n\n while (param-- && j < this._terminal.cols) {\n this._terminal.lines.get(row)[j++] = ch;\n }\n }\n\n /**\n * CSI Ps Z Cursor Backward Tabulation Ps tab stops (default = 1) (CBT).\n */\n public cursorBackwardTab(params: number[]): void {\n let param = params[0] || 1;\n while (param--) {\n this._terminal.x = this._terminal.prevStop();\n }\n }\n\n /**\n * CSI Pm ` Character Position Absolute\n * [column] (default = [row,1]) (HPA).\n */\n public charPosAbsolute(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.x = param - 1;\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x = this._terminal.cols - 1;\n }\n }\n\n /**\n * CSI Pm a Character Position Relative\n * [columns] (default = [row,col+1]) (HPR)\n * reuse CSI Ps C ?\n */\n public HPositionRelative(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.x += param;\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x = this._terminal.cols - 1;\n }\n }\n\n /**\n * CSI Ps b Repeat the preceding graphic character Ps times (REP).\n */\n public repeatPrecedingCharacter(params: number[]): void {\n let param = params[0] || 1\n , line = this._terminal.lines.get(this._terminal.ybase + this._terminal.y)\n , ch = line[this._terminal.x - 1] || [this._terminal.defAttr, ' ', 1];\n\n while (param--) {\n line[this._terminal.x++] = ch;\n }\n }\n\n /**\n * CSI Ps c Send Device Attributes (Primary DA).\n * Ps = 0 or omitted -> request attributes from terminal. The\n * response depends on the decTerminalID resource setting.\n * -> CSI ? 1 ; 2 c (``VT100 with Advanced Video Option'')\n * -> CSI ? 1 ; 0 c (``VT101 with No Options'')\n * -> CSI ? 6 c (``VT102'')\n * -> CSI ? 6 0 ; 1 ; 2 ; 6 ; 8 ; 9 ; 1 5 ; c (``VT220'')\n * The VT100-style response parameters do not mean anything by\n * themselves. VT220 parameters do, telling the host what fea-\n * tures the terminal supports:\n * Ps = 1 -> 132-columns.\n * Ps = 2 -> Printer.\n * Ps = 6 -> Selective erase.\n * Ps = 8 -> User-defined keys.\n * Ps = 9 -> National replacement character sets.\n * Ps = 1 5 -> Technical characters.\n * Ps = 2 2 -> ANSI color, e.g., VT525.\n * Ps = 2 9 -> ANSI text locator (i.e., DEC Locator mode).\n * CSI > Ps c\n * Send Device Attributes (Secondary DA).\n * Ps = 0 or omitted -> request the terminal's identification\n * code. The response depends on the decTerminalID resource set-\n * ting. It should apply only to VT220 and up, but xterm extends\n * this to VT100.\n * -> CSI > Pp ; Pv ; Pc c\n * where Pp denotes the terminal type\n * Pp = 0 -> ``VT100''.\n * Pp = 1 -> ``VT220''.\n * and Pv is the firmware version (for xterm, this was originally\n * the XFree86 patch number, starting with 95). In a DEC termi-\n * nal, Pc indicates the ROM cartridge registration number and is\n * always zero.\n * More information:\n * xterm/charproc.c - line 2012, for more information.\n * vim responds with ^[[?0c or ^[[?1c after the terminal's response (?)\n */\n public sendDeviceAttributes(params: number[]): void {\n if (params[0] > 0) {\n return;\n }\n\n if (!this._terminal.prefix) {\n if (this._terminal.is('xterm') || this._terminal.is('rxvt-unicode') || this._terminal.is('screen')) {\n this._terminal.send(C0.ESC + '[?1;2c');\n } else if (this._terminal.is('linux')) {\n this._terminal.send(C0.ESC + '[?6c');\n }\n } else if (this._terminal.prefix === '>') {\n // xterm and urxvt\n // seem to spit this\n // out around ~370 times (?).\n if (this._terminal.is('xterm')) {\n this._terminal.send(C0.ESC + '[>0;276;0c');\n } else if (this._terminal.is('rxvt-unicode')) {\n this._terminal.send(C0.ESC + '[>85;95;0c');\n } else if (this._terminal.is('linux')) {\n // not supported by linux console.\n // linux console echoes parameters.\n this._terminal.send(params[0] + 'c');\n } else if (this._terminal.is('screen')) {\n this._terminal.send(C0.ESC + '[>83;40003;0c');\n }\n }\n }\n\n /**\n * CSI Pm d Vertical Position Absolute (VPA)\n * [row] (default = [1,column])\n */\n public linePosAbsolute(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.y = param - 1;\n if (this._terminal.y >= this._terminal.rows) {\n this._terminal.y = this._terminal.rows - 1;\n }\n }\n\n /**\n * CSI Pm e Vertical Position Relative (VPR)\n * [rows] (default = [row+1,column])\n * reuse CSI Ps B ?\n */\n public VPositionRelative(params: number[]): void {\n let param = params[0];\n if (param < 1) {\n param = 1;\n }\n this._terminal.y += param;\n if (this._terminal.y >= this._terminal.rows) {\n this._terminal.y = this._terminal.rows - 1;\n }\n // If the end of the line is hit, prevent this action from wrapping around to the next line.\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x--;\n }\n }\n\n /**\n * CSI Ps ; Ps f\n * Horizontal and Vertical Position [row;column] (default =\n * [1,1]) (HVP).\n */\n public HVPosition(params: number[]): void {\n if (params[0] < 1) params[0] = 1;\n if (params[1] < 1) params[1] = 1;\n\n this._terminal.y = params[0] - 1;\n if (this._terminal.y >= this._terminal.rows) {\n this._terminal.y = this._terminal.rows - 1;\n }\n\n this._terminal.x = params[1] - 1;\n if (this._terminal.x >= this._terminal.cols) {\n this._terminal.x = this._terminal.cols - 1;\n }\n }\n\n /**\n * CSI Ps g Tab Clear (TBC).\n * Ps = 0 -> Clear Current Column (default).\n * Ps = 3 -> Clear All.\n * Potentially:\n * Ps = 2 -> Clear Stops on Line.\n * http://vt100.net/annarbor/aaa-ug/section6.html\n */\n public tabClear(params: number[]): void {\n let param = params[0];\n if (param <= 0) {\n delete this._terminal.tabs[this._terminal.x];\n } else if (param === 3) {\n this._terminal.tabs = {};\n }\n }\n\n /**\n * CSI Pm h Set Mode (SM).\n * Ps = 2 -> Keyboard Action Mode (AM).\n * Ps = 4 -> Insert Mode (IRM).\n * Ps = 1 2 -> Send/receive (SRM).\n * Ps = 2 0 -> Automatic Newline (LNM).\n * CSI ? Pm h\n * DEC Private Mode Set (DECSET).\n * Ps = 1 -> Application Cursor Keys (DECCKM).\n * Ps = 2 -> Designate USASCII for character sets G0-G3\n * (DECANM), and set VT100 mode.\n * Ps = 3 -> 132 Column Mode (DECCOLM).\n * Ps = 4 -> Smooth (Slow) Scroll (DECSCLM).\n * Ps = 5 -> Reverse Video (DECSCNM).\n * Ps = 6 -> Origin Mode (DECOM).\n * Ps = 7 -> Wraparound Mode (DECAWM).\n * Ps = 8 -> Auto-repeat Keys (DECARM).\n * Ps = 9 -> Send Mouse X & Y on button press. See the sec-\n * tion Mouse Tracking.\n * Ps = 1 0 -> Show toolbar (rxvt).\n * Ps = 1 2 -> Start Blinking Cursor (att610).\n * Ps = 1 8 -> Print form feed (DECPFF).\n * Ps = 1 9 -> Set print extent to full screen (DECPEX).\n * Ps = 2 5 -> Show Cursor (DECTCEM).\n * Ps = 3 0 -> Show scrollbar (rxvt).\n * Ps = 3 5 -> Enable font-shifting functions (rxvt).\n * Ps = 3 8 -> Enter Tektronix Mode (DECTEK).\n * Ps = 4 0 -> Allow 80 -> 132 Mode.\n * Ps = 4 1 -> more(1) fix (see curses resource).\n * Ps = 4 2 -> Enable Nation Replacement Character sets (DECN-\n * RCM).\n * Ps = 4 4 -> Turn On Margin Bell.\n * Ps = 4 5 -> Reverse-wraparound Mode.\n * Ps = 4 6 -> Start Logging. This is normally disabled by a\n * compile-time option.\n * Ps = 4 7 -> Use Alternate Screen Buffer. (This may be dis-\n * abled by the titeInhibit resource).\n * Ps = 6 6 -> Application keypad (DECNKM).\n * Ps = 6 7 -> Backarrow key sends backspace (DECBKM).\n * Ps = 1 0 0 0 -> Send Mouse X & Y on button press and\n * release. See the section Mouse Tracking.\n * Ps = 1 0 0 1 -> Use Hilite Mouse Tracking.\n * Ps = 1 0 0 2 -> Use Cell Motion Mouse Tracking.\n * Ps = 1 0 0 3 -> Use All Motion Mouse Tracking.\n * Ps = 1 0 0 4 -> Send FocusIn/FocusOut events.\n * Ps = 1 0 0 5 -> Enable Extended Mouse Mode.\n * Ps = 1 0 1 0 -> Scroll to bottom on tty output (rxvt).\n * Ps = 1 0 1 1 -> Scroll to bottom on key press (rxvt).\n * Ps = 1 0 3 4 -> Interpret \"meta\" key, sets eighth bit.\n * (enables the eightBitInput resource).\n * Ps = 1 0 3 5 -> Enable special modifiers for Alt and Num-\n * Lock keys. (This enables the numLock resource).\n * Ps = 1 0 3 6 -> Send ESC when Meta modifies a key. (This\n * enables the metaSendsEscape resource).\n * Ps = 1 0 3 7 -> Send DEL from the editing-keypad Delete\n * key.\n * Ps = 1 0 3 9 -> Send ESC when Alt modifies a key. (This\n * enables the altSendsEscape resource).\n * Ps = 1 0 4 0 -> Keep selection even if not highlighted.\n * (This enables the keepSelection resource).\n * Ps = 1 0 4 1 -> Use the CLIPBOARD selection. (This enables\n * the selectToClipboard resource).\n * Ps = 1 0 4 2 -> Enable Urgency window manager hint when\n * Control-G is received. (This enables the bellIsUrgent\n * resource).\n * Ps = 1 0 4 3 -> Enable raising of the window when Control-G\n * is received. (enables the popOnBell resource).\n * Ps = 1 0 4 7 -> Use Alternate Screen Buffer. (This may be\n * disabled by the titeInhibit resource).\n * Ps = 1 0 4 8 -> Save cursor as in DECSC. (This may be dis-\n * abled by the titeInhibit resource).\n * Ps = 1 0 4 9 -> Save cursor as in DECSC and use Alternate\n * Screen Buffer, clearing it first. (This may be disabled by\n * the titeInhibit resource). This combines the effects of the 1\n * 0 4 7 and 1 0 4 8 modes. Use this with terminfo-based\n * applications rather than the 4 7 mode.\n * Ps = 1 0 5 0 -> Set terminfo/termcap function-key mode.\n * Ps = 1 0 5 1 -> Set Sun function-key mode.\n * Ps = 1 0 5 2 -> Set HP function-key mode.\n * Ps = 1 0 5 3 -> Set SCO function-key mode.\n * Ps = 1 0 6 0 -> Set legacy keyboard emulation (X11R6).\n * Ps = 1 0 6 1 -> Set VT220 keyboard emulation.\n * Ps = 2 0 0 4 -> Set bracketed paste mode.\n * Modes:\n * http: *vt100.net/docs/vt220-rm/chapter4.html\n */\n public setMode(params: number[]): void {\n if (params.length > 1) {\n for (let i = 0; i < params.length; i++) {\n this.setMode([params[i]]);\n }\n\n return;\n }\n\n if (!this._terminal.prefix) {\n switch (params[0]) {\n case 4:\n this._terminal.insertMode = true;\n break;\n case 20:\n // this._terminal.convertEol = true;\n break;\n }\n } else if (this._terminal.prefix === '?') {\n switch (params[0]) {\n case 1:\n this._terminal.applicationCursor = true;\n break;\n case 2:\n this._terminal.setgCharset(0, DEFAULT_CHARSET);\n this._terminal.setgCharset(1, DEFAULT_CHARSET);\n this._terminal.setgCharset(2, DEFAULT_CHARSET);\n this._terminal.setgCharset(3, DEFAULT_CHARSET);\n // set VT100 mode here\n break;\n case 3: // 132 col mode\n this._terminal.savedCols = this._terminal.cols;\n this._terminal.resize(132, this._terminal.rows);\n break;\n case 6:\n this._terminal.originMode = true;\n break;\n case 7:\n this._terminal.wraparoundMode = true;\n break;\n case 12:\n // this.cursorBlink = true;\n break;\n case 66:\n this._terminal.log('Serial port requested application keypad.');\n this._terminal.applicationKeypad = true;\n this._terminal.viewport.syncScrollArea();\n break;\n case 9: // X10 Mouse\n // no release, no motion, no wheel, no modifiers.\n case 1000: // vt200 mouse\n // no motion.\n // no modifiers, except control on the wheel.\n case 1002: // button event mouse\n case 1003: // any event mouse\n // any event - sends motion events,\n // even if there is no button held down.\n\n // TODO: Why are params[0] compares nested within a switch for params[0]?\n\n this._terminal.x10Mouse = params[0] === 9;\n this._terminal.vt200Mouse = params[0] === 1000;\n this._terminal.normalMouse = params[0] > 1000;\n this._terminal.mouseEvents = true;\n this._terminal.element.classList.add('enable-mouse-events');\n this._terminal.selectionManager.disable();\n this._terminal.log('Binding to mouse events.');\n break;\n case 1004: // send focusin/focusout events\n // focusin: ^[[I\n // focusout: ^[[O\n this._terminal.sendFocus = true;\n break;\n case 1005: // utf8 ext mode mouse\n this._terminal.utfMouse = true;\n // for wide terminals\n // simply encodes large values as utf8 characters\n break;\n case 1006: // sgr ext mode mouse\n this._terminal.sgrMouse = true;\n // for wide terminals\n // does not add 32 to fields\n // press: ^[[ Keyboard Action Mode (AM).\n * Ps = 4 -> Replace Mode (IRM).\n * Ps = 1 2 -> Send/receive (SRM).\n * Ps = 2 0 -> Normal Linefeed (LNM).\n * CSI ? Pm l\n * DEC Private Mode Reset (DECRST).\n * Ps = 1 -> Normal Cursor Keys (DECCKM).\n * Ps = 2 -> Designate VT52 mode (DECANM).\n * Ps = 3 -> 80 Column Mode (DECCOLM).\n * Ps = 4 -> Jump (Fast) Scroll (DECSCLM).\n * Ps = 5 -> Normal Video (DECSCNM).\n * Ps = 6 -> Normal Cursor Mode (DECOM).\n * Ps = 7 -> No Wraparound Mode (DECAWM).\n * Ps = 8 -> No Auto-repeat Keys (DECARM).\n * Ps = 9 -> Don't send Mouse X & Y on button press.\n * Ps = 1 0 -> Hide toolbar (rxvt).\n * Ps = 1 2 -> Stop Blinking Cursor (att610).\n * Ps = 1 8 -> Don't print form feed (DECPFF).\n * Ps = 1 9 -> Limit print to scrolling region (DECPEX).\n * Ps = 2 5 -> Hide Cursor (DECTCEM).\n * Ps = 3 0 -> Don't show scrollbar (rxvt).\n * Ps = 3 5 -> Disable font-shifting functions (rxvt).\n * Ps = 4 0 -> Disallow 80 -> 132 Mode.\n * Ps = 4 1 -> No more(1) fix (see curses resource).\n * Ps = 4 2 -> Disable Nation Replacement Character sets (DEC-\n * NRCM).\n * Ps = 4 4 -> Turn Off Margin Bell.\n * Ps = 4 5 -> No Reverse-wraparound Mode.\n * Ps = 4 6 -> Stop Logging. (This is normally disabled by a\n * compile-time option).\n * Ps = 4 7 -> Use Normal Screen Buffer.\n * Ps = 6 6 -> Numeric keypad (DECNKM).\n * Ps = 6 7 -> Backarrow key sends delete (DECBKM).\n * Ps = 1 0 0 0 -> Don't send Mouse X & Y on button press and\n * release. See the section Mouse Tracking.\n * Ps = 1 0 0 1 -> Don't use Hilite Mouse Tracking.\n * Ps = 1 0 0 2 -> Don't use Cell Motion Mouse Tracking.\n * Ps = 1 0 0 3 -> Don't use All Motion Mouse Tracking.\n * Ps = 1 0 0 4 -> Don't send FocusIn/FocusOut events.\n * Ps = 1 0 0 5 -> Disable Extended Mouse Mode.\n * Ps = 1 0 1 0 -> Don't scroll to bottom on tty output\n * (rxvt).\n * Ps = 1 0 1 1 -> Don't scroll to bottom on key press (rxvt).\n * Ps = 1 0 3 4 -> Don't interpret \"meta\" key. (This disables\n * the eightBitInput resource).\n * Ps = 1 0 3 5 -> Disable special modifiers for Alt and Num-\n * Lock keys. (This disables the numLock resource).\n * Ps = 1 0 3 6 -> Don't send ESC when Meta modifies a key.\n * (This disables the metaSendsEscape resource).\n * Ps = 1 0 3 7 -> Send VT220 Remove from the editing-keypad\n * Delete key.\n * Ps = 1 0 3 9 -> Don't send ESC when Alt modifies a key.\n * (This disables the altSendsEscape resource).\n * Ps = 1 0 4 0 -> Do not keep selection when not highlighted.\n * (This disables the keepSelection resource).\n * Ps = 1 0 4 1 -> Use the PRIMARY selection. (This disables\n * the selectToClipboard resource).\n * Ps = 1 0 4 2 -> Disable Urgency window manager hint when\n * Control-G is received. (This disables the bellIsUrgent\n * resource).\n * Ps = 1 0 4 3 -> Disable raising of the window when Control-\n * G is received. (This disables the popOnBell resource).\n * Ps = 1 0 4 7 -> Use Normal Screen Buffer, clearing screen\n * first if in the Alternate Screen. (This may be disabled by\n * the titeInhibit resource).\n * Ps = 1 0 4 8 -> Restore cursor as in DECRC. (This may be\n * disabled by the titeInhibit resource).\n * Ps = 1 0 4 9 -> Use Normal Screen Buffer and restore cursor\n * as in DECRC. (This may be disabled by the titeInhibit\n * resource). This combines the effects of the 1 0 4 7 and 1 0\n * 4 8 modes. Use this with terminfo-based applications rather\n * than the 4 7 mode.\n * Ps = 1 0 5 0 -> Reset terminfo/termcap function-key mode.\n * Ps = 1 0 5 1 -> Reset Sun function-key mode.\n * Ps = 1 0 5 2 -> Reset HP function-key mode.\n * Ps = 1 0 5 3 -> Reset SCO function-key mode.\n * Ps = 1 0 6 0 -> Reset legacy keyboard emulation (X11R6).\n * Ps = 1 0 6 1 -> Reset keyboard emulation to Sun/PC style.\n * Ps = 2 0 0 4 -> Reset bracketed paste mode.\n */\n public resetMode(params: number[]): void {\n if (params.length > 1) {\n for (let i = 0; i < params.length; i++) {\n this.resetMode([params[i]]);\n }\n\n return;\n }\n\n if (!this._terminal.prefix) {\n switch (params[0]) {\n case 4:\n this._terminal.insertMode = false;\n break;\n case 20:\n // this._terminal.convertEol = false;\n break;\n }\n } else if (this._terminal.prefix === '?') {\n switch (params[0]) {\n case 1:\n this._terminal.applicationCursor = false;\n break;\n case 3:\n if (this._terminal.cols === 132 && this._terminal.savedCols) {\n this._terminal.resize(this._terminal.savedCols, this._terminal.rows);\n }\n delete this._terminal.savedCols;\n break;\n case 6:\n this._terminal.originMode = false;\n break;\n case 7:\n this._terminal.wraparoundMode = false;\n break;\n case 12:\n // this.cursorBlink = false;\n break;\n case 66:\n this._terminal.log('Switching back to normal keypad.');\n this._terminal.applicationKeypad = false;\n this._terminal.viewport.syncScrollArea();\n break;\n case 9: // X10 Mouse\n case 1000: // vt200 mouse\n case 1002: // button event mouse\n case 1003: // any event mouse\n this._terminal.x10Mouse = false;\n this._terminal.vt200Mouse = false;\n this._terminal.normalMouse = false;\n this._terminal.mouseEvents = false;\n this._terminal.element.classList.remove('enable-mouse-events');\n this._terminal.selectionManager.enable();\n break;\n case 1004: // send focusin/focusout events\n this._terminal.sendFocus = false;\n break;\n case 1005: // utf8 ext mode mouse\n this._terminal.utfMouse = false;\n break;\n case 1006: // sgr ext mode mouse\n this._terminal.sgrMouse = false;\n break;\n case 1015: // urxvt ext mode mouse\n this._terminal.urxvtMouse = false;\n break;\n case 25: // hide cursor\n this._terminal.cursorHidden = true;\n break;\n case 1049: // alt screen buffer cursor\n ; // FALL-THROUGH\n case 47: // normal screen buffer\n case 1047: // normal screen buffer - clearing it first\n if (this._terminal.normal) {\n this._terminal.lines = this._terminal.normal.lines;\n this._terminal.ybase = this._terminal.normal.ybase;\n this._terminal.ydisp = this._terminal.normal.ydisp;\n this._terminal.x = this._terminal.normal.x;\n this._terminal.y = this._terminal.normal.y;\n this._terminal.scrollTop = this._terminal.normal.scrollTop;\n this._terminal.scrollBottom = this._terminal.normal.scrollBottom;\n this._terminal.tabs = this._terminal.normal.tabs;\n this._terminal.normal = null;\n // Ensure the selection manager has the correct buffer\n this._terminal.selectionManager.setBuffer(this._terminal.lines);\n // if (params === 1049) {\n // this.x = this.savedX;\n // this.y = this.savedY;\n // }\n this._terminal.refresh(0, this._terminal.rows - 1);\n this._terminal.viewport.syncScrollArea();\n this._terminal.showCursor();\n }\n break;\n }\n }\n }\n\n /**\n * CSI Pm m Character Attributes (SGR).\n * Ps = 0 -> Normal (default).\n * Ps = 1 -> Bold.\n * Ps = 4 -> Underlined.\n * Ps = 5 -> Blink (appears as Bold).\n * Ps = 7 -> Inverse.\n * Ps = 8 -> Invisible, i.e., hidden (VT300).\n * Ps = 2 2 -> Normal (neither bold nor faint).\n * Ps = 2 4 -> Not underlined.\n * Ps = 2 5 -> Steady (not blinking).\n * Ps = 2 7 -> Positive (not inverse).\n * Ps = 2 8 -> Visible, i.e., not hidden (VT300).\n * Ps = 3 0 -> Set foreground color to Black.\n * Ps = 3 1 -> Set foreground color to Red.\n * Ps = 3 2 -> Set foreground color to Green.\n * Ps = 3 3 -> Set foreground color to Yellow.\n * Ps = 3 4 -> Set foreground color to Blue.\n * Ps = 3 5 -> Set foreground color to Magenta.\n * Ps = 3 6 -> Set foreground color to Cyan.\n * Ps = 3 7 -> Set foreground color to White.\n * Ps = 3 9 -> Set foreground color to default (original).\n * Ps = 4 0 -> Set background color to Black.\n * Ps = 4 1 -> Set background color to Red.\n * Ps = 4 2 -> Set background color to Green.\n * Ps = 4 3 -> Set background color to Yellow.\n * Ps = 4 4 -> Set background color to Blue.\n * Ps = 4 5 -> Set background color to Magenta.\n * Ps = 4 6 -> Set background color to Cyan.\n * Ps = 4 7 -> Set background color to White.\n * Ps = 4 9 -> Set background color to default (original).\n *\n * If 16-color support is compiled, the following apply. Assume\n * that xterm's resources are set so that the ISO color codes are\n * the first 8 of a set of 16. Then the aixterm colors are the\n * bright versions of the ISO colors:\n * Ps = 9 0 -> Set foreground color to Black.\n * Ps = 9 1 -> Set foreground color to Red.\n * Ps = 9 2 -> Set foreground color to Green.\n * Ps = 9 3 -> Set foreground color to Yellow.\n * Ps = 9 4 -> Set foreground color to Blue.\n * Ps = 9 5 -> Set foreground color to Magenta.\n * Ps = 9 6 -> Set foreground color to Cyan.\n * Ps = 9 7 -> Set foreground color to White.\n * Ps = 1 0 0 -> Set background color to Black.\n * Ps = 1 0 1 -> Set background color to Red.\n * Ps = 1 0 2 -> Set background color to Green.\n * Ps = 1 0 3 -> Set background color to Yellow.\n * Ps = 1 0 4 -> Set background color to Blue.\n * Ps = 1 0 5 -> Set background color to Magenta.\n * Ps = 1 0 6 -> Set background color to Cyan.\n * Ps = 1 0 7 -> Set background color to White.\n *\n * If xterm is compiled with the 16-color support disabled, it\n * supports the following, from rxvt:\n * Ps = 1 0 0 -> Set foreground and background color to\n * default.\n *\n * If 88- or 256-color support is compiled, the following apply.\n * Ps = 3 8 ; 5 ; Ps -> Set foreground color to the second\n * Ps.\n * Ps = 4 8 ; 5 ; Ps -> Set background color to the second\n * Ps.\n */\n public charAttributes(params: number[]): void {\n // Optimize a single SGR0.\n if (params.length === 1 && params[0] === 0) {\n this._terminal.curAttr = this._terminal.defAttr;\n return;\n }\n\n let l = params.length\n , i = 0\n , flags = this._terminal.curAttr >> 18\n , fg = (this._terminal.curAttr >> 9) & 0x1ff\n , bg = this._terminal.curAttr & 0x1ff\n , p;\n\n for (; i < l; i++) {\n p = params[i];\n if (p >= 30 && p <= 37) {\n // fg color 8\n fg = p - 30;\n } else if (p >= 40 && p <= 47) {\n // bg color 8\n bg = p - 40;\n } else if (p >= 90 && p <= 97) {\n // fg color 16\n p += 8;\n fg = p - 90;\n } else if (p >= 100 && p <= 107) {\n // bg color 16\n p += 8;\n bg = p - 100;\n } else if (p === 0) {\n // default\n flags = this._terminal.defAttr >> 18;\n fg = (this._terminal.defAttr >> 9) & 0x1ff;\n bg = this._terminal.defAttr & 0x1ff;\n // flags = 0;\n // fg = 0x1ff;\n // bg = 0x1ff;\n } else if (p === 1) {\n // bold text\n flags |= 1;\n } else if (p === 4) {\n // underlined text\n flags |= 2;\n } else if (p === 5) {\n // blink\n flags |= 4;\n } else if (p === 7) {\n // inverse and positive\n // test with: echo -e '\\e[31m\\e[42mhello\\e[7mworld\\e[27mhi\\e[m'\n flags |= 8;\n } else if (p === 8) {\n // invisible\n flags |= 16;\n } else if (p === 22) {\n // not bold\n flags &= ~1;\n } else if (p === 24) {\n // not underlined\n flags &= ~2;\n } else if (p === 25) {\n // not blink\n flags &= ~4;\n } else if (p === 27) {\n // not inverse\n flags &= ~8;\n } else if (p === 28) {\n // not invisible\n flags &= ~16;\n } else if (p === 39) {\n // reset fg\n fg = (this._terminal.defAttr >> 9) & 0x1ff;\n } else if (p === 49) {\n // reset bg\n bg = this._terminal.defAttr & 0x1ff;\n } else if (p === 38) {\n // fg color 256\n if (params[i + 1] === 2) {\n i += 2;\n fg = this._terminal.matchColor(\n params[i] & 0xff,\n params[i + 1] & 0xff,\n params[i + 2] & 0xff);\n if (fg === -1) fg = 0x1ff;\n i += 2;\n } else if (params[i + 1] === 5) {\n i += 2;\n p = params[i] & 0xff;\n fg = p;\n }\n } else if (p === 48) {\n // bg color 256\n if (params[i + 1] === 2) {\n i += 2;\n bg = this._terminal.matchColor(\n params[i] & 0xff,\n params[i + 1] & 0xff,\n params[i + 2] & 0xff);\n if (bg === -1) bg = 0x1ff;\n i += 2;\n } else if (params[i + 1] === 5) {\n i += 2;\n p = params[i] & 0xff;\n bg = p;\n }\n } else if (p === 100) {\n // reset fg/bg\n fg = (this._terminal.defAttr >> 9) & 0x1ff;\n bg = this._terminal.defAttr & 0x1ff;\n } else {\n this._terminal.error('Unknown SGR attribute: %d.', p);\n }\n }\n\n this._terminal.curAttr = (flags << 18) | (fg << 9) | bg;\n }\n\n /**\n * CSI Ps n Device Status Report (DSR).\n * Ps = 5 -> Status Report. Result (``OK'') is\n * CSI 0 n\n * Ps = 6 -> Report Cursor Position (CPR) [row;column].\n * Result is\n * CSI r ; c R\n * CSI ? Ps n\n * Device Status Report (DSR, DEC-specific).\n * Ps = 6 -> Report Cursor Position (CPR) [row;column] as CSI\n * ? r ; c R (assumes page is zero).\n * Ps = 1 5 -> Report Printer status as CSI ? 1 0 n (ready).\n * or CSI ? 1 1 n (not ready).\n * Ps = 2 5 -> Report UDK status as CSI ? 2 0 n (unlocked)\n * or CSI ? 2 1 n (locked).\n * Ps = 2 6 -> Report Keyboard status as\n * CSI ? 2 7 ; 1 ; 0 ; 0 n (North American).\n * The last two parameters apply to VT400 & up, and denote key-\n * board ready and LK01 respectively.\n * Ps = 5 3 -> Report Locator status as\n * CSI ? 5 3 n Locator available, if compiled-in, or\n * CSI ? 5 0 n No Locator, if not.\n */\n public deviceStatus(params: number[]): void {\n if (!this._terminal.prefix) {\n switch (params[0]) {\n case 5:\n // status report\n this._terminal.send(C0.ESC + '[0n');\n break;\n case 6:\n // cursor position\n this._terminal.send(C0.ESC + '['\n + (this._terminal.y + 1)\n + ';'\n + (this._terminal.x + 1)\n + 'R');\n break;\n }\n } else if (this._terminal.prefix === '?') {\n // modern xterm doesnt seem to\n // respond to any of these except ?6, 6, and 5\n switch (params[0]) {\n case 6:\n // cursor position\n this._terminal.send(C0.ESC + '[?'\n + (this._terminal.y + 1)\n + ';'\n + (this._terminal.x + 1)\n + 'R');\n break;\n case 15:\n // no printer\n // this.send(C0.ESC + '[?11n');\n break;\n case 25:\n // dont support user defined keys\n // this.send(C0.ESC + '[?21n');\n break;\n case 26:\n // north american keyboard\n // this.send(C0.ESC + '[?27;1;0;0n');\n break;\n case 53:\n // no dec locator/mouse\n // this.send(C0.ESC + '[?50n');\n break;\n }\n }\n }\n\n /**\n * CSI ! p Soft terminal reset (DECSTR).\n * http://vt100.net/docs/vt220-rm/table4-10.html\n */\n public softReset(params: number[]): void {\n this._terminal.cursorHidden = false;\n this._terminal.insertMode = false;\n this._terminal.originMode = false;\n this._terminal.wraparoundMode = true; // defaults: xterm - true, vt100 - false\n this._terminal.applicationKeypad = false; // ?\n this._terminal.viewport.syncScrollArea();\n this._terminal.applicationCursor = false;\n this._terminal.scrollTop = 0;\n this._terminal.scrollBottom = this._terminal.rows - 1;\n this._terminal.curAttr = this._terminal.defAttr;\n this._terminal.x = this._terminal.y = 0; // ?\n this._terminal.charset = null;\n this._terminal.glevel = 0; // ??\n this._terminal.charsets = [null]; // ??\n }\n\n /**\n * CSI Ps SP q Set cursor style (DECSCUSR, VT520).\n * Ps = 0 -> blinking block.\n * Ps = 1 -> blinking block (default).\n * Ps = 2 -> steady block.\n * Ps = 3 -> blinking underline.\n * Ps = 4 -> steady underline.\n * Ps = 5 -> blinking bar (xterm).\n * Ps = 6 -> steady bar (xterm).\n */\n public setCursorStyle(params?: number[]): void {\n const param = params[0] < 1 ? 1 : params[0];\n switch (param) {\n case 1:\n case 2:\n this._terminal.setOption('cursorStyle', 'block');\n break;\n case 3:\n case 4:\n this._terminal.setOption('cursorStyle', 'underline');\n break;\n case 5:\n case 6:\n this._terminal.setOption('cursorStyle', 'bar');\n break;\n }\n const isBlinking = param % 2 === 1;\n this._terminal.setOption('cursorBlink', isBlinking);\n }\n\n /**\n * CSI Ps ; Ps r\n * Set Scrolling Region [top;bottom] (default = full size of win-\n * dow) (DECSTBM).\n * CSI ? Pm r\n */\n public setScrollRegion(params: number[]): void {\n if (this._terminal.prefix) return;\n this._terminal.scrollTop = (params[0] || 1) - 1;\n this._terminal.scrollBottom = (params[1] && params[1] <= this._terminal.rows ? params[1] : this._terminal.rows) - 1;\n this._terminal.x = 0;\n this._terminal.y = 0;\n }\n\n\n /**\n * CSI s\n * Save cursor (ANSI.SYS).\n */\n public saveCursor(params: number[]): void {\n this._terminal.savedX = this._terminal.x;\n this._terminal.savedY = this._terminal.y;\n }\n\n\n /**\n * CSI u\n * Restore cursor (ANSI.SYS).\n */\n public restoreCursor(params: number[]): void {\n this._terminal.x = this._terminal.savedX || 0;\n this._terminal.y = this._terminal.savedY || 0;\n }\n}\n\nconst wcwidth = (function(opts) {\n // extracted from https://www.cl.cam.ac.uk/%7Emgk25/ucs/wcwidth.c\n // combining characters\n const COMBINING = [\n [0x0300, 0x036F], [0x0483, 0x0486], [0x0488, 0x0489],\n [0x0591, 0x05BD], [0x05BF, 0x05BF], [0x05C1, 0x05C2],\n [0x05C4, 0x05C5], [0x05C7, 0x05C7], [0x0600, 0x0603],\n [0x0610, 0x0615], [0x064B, 0x065E], [0x0670, 0x0670],\n [0x06D6, 0x06E4], [0x06E7, 0x06E8], [0x06EA, 0x06ED],\n [0x070F, 0x070F], [0x0711, 0x0711], [0x0730, 0x074A],\n [0x07A6, 0x07B0], [0x07EB, 0x07F3], [0x0901, 0x0902],\n [0x093C, 0x093C], [0x0941, 0x0948], [0x094D, 0x094D],\n [0x0951, 0x0954], [0x0962, 0x0963], [0x0981, 0x0981],\n [0x09BC, 0x09BC], [0x09C1, 0x09C4], [0x09CD, 0x09CD],\n [0x09E2, 0x09E3], [0x0A01, 0x0A02], [0x0A3C, 0x0A3C],\n [0x0A41, 0x0A42], [0x0A47, 0x0A48], [0x0A4B, 0x0A4D],\n [0x0A70, 0x0A71], [0x0A81, 0x0A82], [0x0ABC, 0x0ABC],\n [0x0AC1, 0x0AC5], [0x0AC7, 0x0AC8], [0x0ACD, 0x0ACD],\n [0x0AE2, 0x0AE3], [0x0B01, 0x0B01], [0x0B3C, 0x0B3C],\n [0x0B3F, 0x0B3F], [0x0B41, 0x0B43], [0x0B4D, 0x0B4D],\n [0x0B56, 0x0B56], [0x0B82, 0x0B82], [0x0BC0, 0x0BC0],\n [0x0BCD, 0x0BCD], [0x0C3E, 0x0C40], [0x0C46, 0x0C48],\n [0x0C4A, 0x0C4D], [0x0C55, 0x0C56], [0x0CBC, 0x0CBC],\n [0x0CBF, 0x0CBF], [0x0CC6, 0x0CC6], [0x0CCC, 0x0CCD],\n [0x0CE2, 0x0CE3], [0x0D41, 0x0D43], [0x0D4D, 0x0D4D],\n [0x0DCA, 0x0DCA], [0x0DD2, 0x0DD4], [0x0DD6, 0x0DD6],\n [0x0E31, 0x0E31], [0x0E34, 0x0E3A], [0x0E47, 0x0E4E],\n [0x0EB1, 0x0EB1], [0x0EB4, 0x0EB9], [0x0EBB, 0x0EBC],\n [0x0EC8, 0x0ECD], [0x0F18, 0x0F19], [0x0F35, 0x0F35],\n [0x0F37, 0x0F37], [0x0F39, 0x0F39], [0x0F71, 0x0F7E],\n [0x0F80, 0x0F84], [0x0F86, 0x0F87], [0x0F90, 0x0F97],\n [0x0F99, 0x0FBC], [0x0FC6, 0x0FC6], [0x102D, 0x1030],\n [0x1032, 0x1032], [0x1036, 0x1037], [0x1039, 0x1039],\n [0x1058, 0x1059], [0x1160, 0x11FF], [0x135F, 0x135F],\n [0x1712, 0x1714], [0x1732, 0x1734], [0x1752, 0x1753],\n [0x1772, 0x1773], [0x17B4, 0x17B5], [0x17B7, 0x17BD],\n [0x17C6, 0x17C6], [0x17C9, 0x17D3], [0x17DD, 0x17DD],\n [0x180B, 0x180D], [0x18A9, 0x18A9], [0x1920, 0x1922],\n [0x1927, 0x1928], [0x1932, 0x1932], [0x1939, 0x193B],\n [0x1A17, 0x1A18], [0x1B00, 0x1B03], [0x1B34, 0x1B34],\n [0x1B36, 0x1B3A], [0x1B3C, 0x1B3C], [0x1B42, 0x1B42],\n [0x1B6B, 0x1B73], [0x1DC0, 0x1DCA], [0x1DFE, 0x1DFF],\n [0x200B, 0x200F], [0x202A, 0x202E], [0x2060, 0x2063],\n [0x206A, 0x206F], [0x20D0, 0x20EF], [0x302A, 0x302F],\n [0x3099, 0x309A], [0xA806, 0xA806], [0xA80B, 0xA80B],\n [0xA825, 0xA826], [0xFB1E, 0xFB1E], [0xFE00, 0xFE0F],\n [0xFE20, 0xFE23], [0xFEFF, 0xFEFF], [0xFFF9, 0xFFFB],\n [0x10A01, 0x10A03], [0x10A05, 0x10A06], [0x10A0C, 0x10A0F],\n [0x10A38, 0x10A3A], [0x10A3F, 0x10A3F], [0x1D167, 0x1D169],\n [0x1D173, 0x1D182], [0x1D185, 0x1D18B], [0x1D1AA, 0x1D1AD],\n [0x1D242, 0x1D244], [0xE0001, 0xE0001], [0xE0020, 0xE007F],\n [0xE0100, 0xE01EF]\n ];\n // binary search\n function bisearch(ucs) {\n let min = 0;\n let max = COMBINING.length - 1;\n let mid;\n if (ucs < COMBINING[0][0] || ucs > COMBINING[max][1])\n return false;\n while (max >= min) {\n mid = Math.floor((min + max) / 2);\n if (ucs > COMBINING[mid][1])\n min = mid + 1;\n else if (ucs < COMBINING[mid][0])\n max = mid - 1;\n else\n return true;\n }\n return false;\n }\n function wcwidth(ucs) {\n // test for 8-bit control characters\n if (ucs === 0)\n return opts.nul;\n if (ucs < 32 || (ucs >= 0x7f && ucs < 0xa0))\n return opts.control;\n // binary search in table of non-spacing characters\n if (bisearch(ucs))\n return 0;\n // if we arrive here, ucs is not a combining or C0/C1 control character\n if (isWide(ucs)) {\n return 2;\n }\n return 1;\n }\n function isWide(ucs) {\n return (\n ucs >= 0x1100 && (\n ucs <= 0x115f || // Hangul Jamo init. consonants\n ucs === 0x2329 ||\n ucs === 0x232a ||\n (ucs >= 0x2e80 && ucs <= 0xa4cf && ucs !== 0x303f) || // CJK..Yi\n (ucs >= 0xac00 && ucs <= 0xd7a3) || // Hangul Syllables\n (ucs >= 0xf900 && ucs <= 0xfaff) || // CJK Compat Ideographs\n (ucs >= 0xfe10 && ucs <= 0xfe19) || // Vertical forms\n (ucs >= 0xfe30 && ucs <= 0xfe6f) || // CJK Compat Forms\n (ucs >= 0xff00 && ucs <= 0xff60) || // Fullwidth Forms\n (ucs >= 0xffe0 && ucs <= 0xffe6) ||\n (ucs >= 0x20000 && ucs <= 0x2fffd) ||\n (ucs >= 0x30000 && ucs <= 0x3fffd)));\n }\n return wcwidth;\n})({nul: 0, control: 0}); // configurable options\n","/**\n * @license MIT\n */\n\ninterface ListenerType {\n (): void;\n listener?: () => void;\n};\n\nexport class EventEmitter {\n private _events: {[type: string]: ListenerType[]};\n\n constructor() {\n // Restore the previous events if available, this will happen if the\n // constructor is called multiple times on the same object (terminal reset).\n this._events = this._events || {};\n }\n\n public on(type, listener): void {\n this._events[type] = this._events[type] || [];\n this._events[type].push(listener);\n }\n\n public off(type, listener): void {\n if (!this._events[type]) {\n return;\n }\n\n let obj = this._events[type];\n let i = obj.length;\n\n while (i--) {\n if (obj[i] === listener || obj[i].listener === listener) {\n obj.splice(i, 1);\n return;\n }\n }\n }\n\n public removeAllListeners(type): void {\n if (this._events[type]) {\n delete this._events[type];\n }\n }\n\n public once(type, listener): any {\n function on() {\n let args = Array.prototype.slice.call(arguments);\n this.off(type, on);\n return listener.apply(this, args);\n }\n (on).listener = listener;\n return this.on(type, on);\n }\n\n public emit(type: string, ...args: any[]): void {\n if (!this._events[type]) {\n return;\n }\n let obj = this._events[type];\n for (let i = 0; i < obj.length; i++) {\n obj[i].apply(this, args);\n }\n }\n\n public listeners(type): ListenerType[] {\n return this._events[type] || [];\n }\n}\n","/**\n * @license MIT\n */\n\n/**\n * C0 control codes\n * See = https://en.wikipedia.org/wiki/C0_and_C1_control_codes\n */\nexport namespace C0 {\n /** Null (Caret = ^@, C = \\0) */\n export const NUL = '\\x00';\n /** Start of Heading (Caret = ^A) */\n export const SOH = '\\x01';\n /** Start of Text (Caret = ^B) */\n export const STX = '\\x02';\n /** End of Text (Caret = ^C) */\n export const ETX = '\\x03';\n /** End of Transmission (Caret = ^D) */\n export const EOT = '\\x04';\n /** Enquiry (Caret = ^E) */\n export const ENQ = '\\x05';\n /** Acknowledge (Caret = ^F) */\n export const ACK = '\\x06';\n /** Bell (Caret = ^G, C = \\a) */\n export const BEL = '\\x07';\n /** Backspace (Caret = ^H, C = \\b) */\n export const BS = '\\x08';\n /** Character Tabulation, Horizontal Tabulation (Caret = ^I, C = \\t) */\n export const HT = '\\x09';\n /** Line Feed (Caret = ^J, C = \\n) */\n export const LF = '\\x0a';\n /** Line Tabulation, Vertical Tabulation (Caret = ^K, C = \\v) */\n export const VT = '\\x0b';\n /** Form Feed (Caret = ^L, C = \\f) */\n export const FF = '\\x0c';\n /** Carriage Return (Caret = ^M, C = \\r) */\n export const CR = '\\x0d';\n /** Shift Out (Caret = ^N) */\n export const SO = '\\x0e';\n /** Shift In (Caret = ^O) */\n export const SI = '\\x0f';\n /** Data Link Escape (Caret = ^P) */\n export const DLE = '\\x10';\n /** Device Control One (XON) (Caret = ^Q) */\n export const DC1 = '\\x11';\n /** Device Control Two (Caret = ^R) */\n export const DC2 = '\\x12';\n /** Device Control Three (XOFF) (Caret = ^S) */\n export const DC3 = '\\x13';\n /** Device Control Four (Caret = ^T) */\n export const DC4 = '\\x14';\n /** Negative Acknowledge (Caret = ^U) */\n export const NAK = '\\x15';\n /** Synchronous Idle (Caret = ^V) */\n export const SYN = '\\x16';\n /** End of Transmission Block (Caret = ^W) */\n export const ETB = '\\x17';\n /** Cancel (Caret = ^X) */\n export const CAN = '\\x18';\n /** End of Medium (Caret = ^Y) */\n export const EM = '\\x19';\n /** Substitute (Caret = ^Z) */\n export const SUB = '\\x1a';\n /** Escape (Caret = ^[, C = \\e) */\n export const ESC = '\\x1b';\n /** File Separator (Caret = ^\\) */\n export const FS = '\\x1c';\n /** Group Separator (Caret = ^]) */\n export const GS = '\\x1d';\n /** Record Separator (Caret = ^^) */\n export const RS = '\\x1e';\n /** Unit Separator (Caret = ^_) */\n export const US = '\\x1f';\n /** Space */\n export const SP = '\\x20';\n /** Delete (Caret = ^?) */\n export const DEL = '\\x7f';\n};\n","/**\n * @license MIT\n */\n\nimport { ITerminal } from './Interfaces';\n\ninterface IPosition {\n start: number;\n end: number;\n}\n\n/**\n * Encapsulates the logic for handling compositionstart, compositionupdate and compositionend\n * events, displaying the in-progress composition to the UI and forwarding the final composition\n * to the handler.\n */\nexport class CompositionHelper {\n /**\n * Whether input composition is currently happening, eg. via a mobile keyboard, speech input or\n * IME. This variable determines whether the compositionText should be displayed on the UI.\n */\n private isComposing: boolean;\n\n /**\n * The position within the input textarea's value of the current composition.\n */\n private compositionPosition: IPosition;\n\n /**\n * Whether a composition is in the process of being sent, setting this to false will cancel any\n * in-progress composition.\n */\n private isSendingComposition: boolean;\n\n /**\n * Creates a new CompositionHelper.\n * @param textarea The textarea that xterm uses for input.\n * @param compositionView The element to display the in-progress composition in.\n * @param terminal The Terminal to forward the finished composition to.\n */\n constructor(\n private textarea: HTMLTextAreaElement,\n private compositionView: HTMLElement,\n private terminal: ITerminal\n ) {\n this.isComposing = false;\n this.isSendingComposition = false;\n this.compositionPosition = { start: null, end: null };\n }\n\n /**\n * Handles the compositionstart event, activating the composition view.\n */\n public compositionstart() {\n this.isComposing = true;\n this.compositionPosition.start = this.textarea.value.length;\n this.compositionView.textContent = '';\n this.compositionView.classList.add('active');\n }\n\n /**\n * Handles the compositionupdate event, updating the composition view.\n * @param {CompositionEvent} ev The event.\n */\n public compositionupdate(ev: CompositionEvent) {\n this.compositionView.textContent = ev.data;\n this.updateCompositionElements();\n setTimeout(() => {\n this.compositionPosition.end = this.textarea.value.length;\n }, 0);\n }\n\n /**\n * Handles the compositionend event, hiding the composition view and sending the composition to\n * the handler.\n */\n public compositionend() {\n this.finalizeComposition(true);\n }\n\n /**\n * Handles the keydown event, routing any necessary events to the CompositionHelper functions.\n * @param ev The keydown event.\n * @return Whether the Terminal should continue processing the keydown event.\n */\n public keydown(ev: KeyboardEvent) {\n if (this.isComposing || this.isSendingComposition) {\n if (ev.keyCode === 229) {\n // Continue composing if the keyCode is the \"composition character\"\n return false;\n } else if (ev.keyCode === 16 || ev.keyCode === 17 || ev.keyCode === 18) {\n // Continue composing if the keyCode is a modifier key\n return false;\n } else {\n // Finish composition immediately. This is mainly here for the case where enter is\n // pressed and the handler needs to be triggered before the command is executed.\n this.finalizeComposition(false);\n }\n }\n\n if (ev.keyCode === 229) {\n // If the \"composition character\" is used but gets to this point it means a non-composition\n // character (eg. numbers and punctuation) was pressed when the IME was active.\n this.handleAnyTextareaChanges();\n return false;\n }\n\n return true;\n }\n\n /**\n * Finalizes the composition, resuming regular input actions. This is called when a composition\n * is ending.\n * @param waitForPropogation Whether to wait for events to propogate before sending\n * the input. This should be false if a non-composition keystroke is entered before the\n * compositionend event is triggered, such as enter, so that the composition is send before\n * the command is executed.\n */\n private finalizeComposition(waitForPropogation: boolean) {\n this.compositionView.classList.remove('active');\n this.isComposing = false;\n this.clearTextareaPosition();\n\n if (!waitForPropogation) {\n // Cancel any delayed composition send requests and send the input immediately.\n this.isSendingComposition = false;\n const input = this.textarea.value.substring(this.compositionPosition.start, this.compositionPosition.end);\n this.terminal.handler(input);\n } else {\n // Make a deep copy of the composition position here as a new compositionstart event may\n // fire before the setTimeout executes.\n const currentCompositionPosition = {\n start: this.compositionPosition.start,\n end: this.compositionPosition.end,\n };\n\n // Since composition* events happen before the changes take place in the textarea on most\n // browsers, use a setTimeout with 0ms time to allow the native compositionend event to\n // complete. This ensures the correct character is retrieved, this solution was used\n // because:\n // - The compositionend event's data property is unreliable, at least on Chromium\n // - The last compositionupdate event's data property does not always accurately describe\n // the character, a counter example being Korean where an ending consonsant can move to\n // the following character if the following input is a vowel.\n this.isSendingComposition = true;\n setTimeout(() => {\n // Ensure that the input has not already been sent\n if (this.isSendingComposition) {\n this.isSendingComposition = false;\n let input;\n if (this.isComposing) {\n // Use the end position to get the string if a new composition has started.\n input = this.textarea.value.substring(currentCompositionPosition.start, currentCompositionPosition.end);\n } else {\n // Don't use the end position here in order to pick up any characters after the\n // composition has finished, for example when typing a non-composition character\n // (eg. 2) after a composition character.\n input = this.textarea.value.substring(currentCompositionPosition.start);\n }\n this.terminal.handler(input);\n }\n }, 0);\n }\n }\n\n /**\n * Apply any changes made to the textarea after the current event chain is allowed to complete.\n * This should be called when not currently composing but a keydown event with the \"composition\n * character\" (229) is triggered, in order to allow non-composition text to be entered when an\n * IME is active.\n */\n private handleAnyTextareaChanges() {\n const oldValue = this.textarea.value;\n setTimeout(() => {\n // Ignore if a composition has started since the timeout\n if (!this.isComposing) {\n const newValue = this.textarea.value;\n const diff = newValue.replace(oldValue, '');\n if (diff.length > 0) {\n this.terminal.handler(diff);\n }\n }\n }, 0);\n }\n\n /**\n * Positions the composition view on top of the cursor and the textarea just below it (so the\n * IME helper dialog is positioned correctly).\n * @param dontRecurse Whether to use setTimeout to recursively trigger another update, this is\n * necessary as the IME events across browsers are not consistently triggered.\n */\n public updateCompositionElements(dontRecurse?: boolean) {\n if (!this.isComposing) {\n return;\n }\n const cursor = this.terminal.element.querySelector('.terminal-cursor');\n if (cursor) {\n // Take .xterm-rows offsetTop into account as well in case it's positioned absolutely within\n // the .xterm element.\n const xtermRows = this.terminal.element.querySelector('.xterm-rows');\n const cursorTop = xtermRows.offsetTop + cursor.offsetTop;\n\n this.compositionView.style.left = cursor.offsetLeft + 'px';\n this.compositionView.style.top = cursorTop + 'px';\n this.compositionView.style.height = cursor.offsetHeight + 'px';\n this.compositionView.style.lineHeight = cursor.offsetHeight + 'px';\n // Sync the textarea to the exact position of the composition view so the IME knows where the\n // text is.\n const compositionViewBounds = this.compositionView.getBoundingClientRect();\n this.textarea.style.left = cursor.offsetLeft + 'px';\n this.textarea.style.top = cursorTop + 'px';\n this.textarea.style.width = compositionViewBounds.width + 'px';\n this.textarea.style.height = compositionViewBounds.height + 'px';\n this.textarea.style.lineHeight = compositionViewBounds.height + 'px';\n }\n if (!dontRecurse) {\n setTimeout(() => this.updateCompositionElements(true), 0);\n }\n };\n\n /**\n * Clears the textarea's position so that the cursor does not blink on IE.\n * @private\n */\n private clearTextareaPosition() {\n this.textarea.style.left = '';\n this.textarea.style.top = '';\n };\n}\n","/**\n * @license MIT\n */\n\n/**\n * The character sets supported by the terminal. These enable several languages\n * to be represented within the terminal with only 8-bit encoding. See ISO 2022\n * for a discussion on character sets. Only VT100 character sets are supported.\n */\nexport const CHARSETS: {[key: string]: {[key: string]: string}} = {};\n\n/**\n * The default character set, US.\n */\nexport const DEFAULT_CHARSET = CHARSETS['B'];\n\n/**\n * DEC Special Character and Line Drawing Set.\n * Reference: http://vt100.net/docs/vt102-ug/table5-13.html\n * A lot of curses apps use this if they see TERM=xterm.\n * testing: echo -e '\\e(0a\\e(B'\n * The xterm output sometimes seems to conflict with the\n * reference above. xterm seems in line with the reference\n * when running vttest however.\n * The table below now uses xterm's output from vttest.\n */\nCHARSETS['0'] = {\n '`': '\\u25c6', // '◆'\n 'a': '\\u2592', // '▒'\n 'b': '\\u0009', // '\\t'\n 'c': '\\u000c', // '\\f'\n 'd': '\\u000d', // '\\r'\n 'e': '\\u000a', // '\\n'\n 'f': '\\u00b0', // '°'\n 'g': '\\u00b1', // '±'\n 'h': '\\u2424', // '\\u2424' (NL)\n 'i': '\\u000b', // '\\v'\n 'j': '\\u2518', // '┘'\n 'k': '\\u2510', // '┐'\n 'l': '\\u250c', // '┌'\n 'm': '\\u2514', // '└'\n 'n': '\\u253c', // '┼'\n 'o': '\\u23ba', // '⎺'\n 'p': '\\u23bb', // '⎻'\n 'q': '\\u2500', // '─'\n 'r': '\\u23bc', // '⎼'\n 's': '\\u23bd', // '⎽'\n 't': '\\u251c', // '├'\n 'u': '\\u2524', // '┤'\n 'v': '\\u2534', // '┴'\n 'w': '\\u252c', // '┬'\n 'x': '\\u2502', // '│'\n 'y': '\\u2264', // '≤'\n 'z': '\\u2265', // '≥'\n '{': '\\u03c0', // 'π'\n '|': '\\u2260', // '≠'\n '}': '\\u00a3', // '£'\n '~': '\\u00b7' // '·'\n};\n\n/**\n * British character set\n * ESC (A\n * Reference: http://vt100.net/docs/vt220-rm/table2-5.html\n */\nCHARSETS['A'] = {\n '#': '£'\n};\n\n/**\n * United States character set\n * ESC (B\n */\nCHARSETS['B'] = null;\n\n/**\n * Dutch character set\n * ESC (4\n * Reference: http://vt100.net/docs/vt220-rm/table2-6.html\n */\nCHARSETS['4'] = {\n '#': '£',\n '@': '¾',\n '[': 'ij',\n '\\\\': '½',\n ']': '|',\n '{': '¨',\n '|': 'f',\n '}': '¼',\n '~': '´'\n};\n\n/**\n * Finnish character set\n * ESC (C or ESC (5\n * Reference: http://vt100.net/docs/vt220-rm/table2-7.html\n */\nCHARSETS['C'] =\nCHARSETS['5'] = {\n '[': 'Ä',\n '\\\\': 'Ö',\n ']': 'Å',\n '^': 'Ü',\n '`': 'é',\n '{': 'ä',\n '|': 'ö',\n '}': 'å',\n '~': 'ü'\n};\n\n/**\n * French character set\n * ESC (R\n * Reference: http://vt100.net/docs/vt220-rm/table2-8.html\n */\nCHARSETS['R'] = {\n '#': '£',\n '@': 'à',\n '[': '°',\n '\\\\': 'ç',\n ']': '§',\n '{': 'é',\n '|': 'ù',\n '}': 'è',\n '~': '¨'\n};\n\n/**\n * French Canadian character set\n * ESC (Q\n * Reference: http://vt100.net/docs/vt220-rm/table2-9.html\n */\nCHARSETS['Q'] = {\n '@': 'à',\n '[': 'â',\n '\\\\': 'ç',\n ']': 'ê',\n '^': 'î',\n '`': 'ô',\n '{': 'é',\n '|': 'ù',\n '}': 'è',\n '~': 'û'\n};\n\n/**\n * German character set\n * ESC (K\n * Reference: http://vt100.net/docs/vt220-rm/table2-10.html\n */\nCHARSETS['K'] = {\n '@': '§',\n '[': 'Ä',\n '\\\\': 'Ö',\n ']': 'Ü',\n '{': 'ä',\n '|': 'ö',\n '}': 'ü',\n '~': 'ß'\n};\n\n/**\n * Italian character set\n * ESC (Y\n * Reference: http://vt100.net/docs/vt220-rm/table2-11.html\n */\nCHARSETS['Y'] = {\n '#': '£',\n '@': '§',\n '[': '°',\n '\\\\': 'ç',\n ']': 'é',\n '`': 'ù',\n '{': 'à',\n '|': 'ò',\n '}': 'è',\n '~': 'ì'\n};\n\n/**\n * Norwegian/Danish character set\n * ESC (E or ESC (6\n * Reference: http://vt100.net/docs/vt220-rm/table2-12.html\n */\nCHARSETS['E'] =\nCHARSETS['6'] = {\n '@': 'Ä',\n '[': 'Æ',\n '\\\\': 'Ø',\n ']': 'Å',\n '^': 'Ü',\n '`': 'ä',\n '{': 'æ',\n '|': 'ø',\n '}': 'å',\n '~': 'ü'\n};\n\n/**\n * Spanish character set\n * ESC (Z\n * Reference: http://vt100.net/docs/vt220-rm/table2-13.html\n */\nCHARSETS['Z'] = {\n '#': '£',\n '@': '§',\n '[': '¡',\n '\\\\': 'Ñ',\n ']': '¿',\n '{': '°',\n '|': 'ñ',\n '}': 'ç'\n};\n\n/**\n * Swedish character set\n * ESC (H or ESC (7\n * Reference: http://vt100.net/docs/vt220-rm/table2-14.html\n */\nCHARSETS['H'] =\nCHARSETS['7'] = {\n '@': 'É',\n '[': 'Ä',\n '\\\\': 'Ö',\n ']': 'Å',\n '^': 'Ü',\n '`': 'é',\n '{': 'ä',\n '|': 'ö',\n '}': 'å',\n '~': 'ü'\n};\n\n/**\n * Swiss character set\n * ESC (=\n * Reference: http://vt100.net/docs/vt220-rm/table2-15.html\n */\nCHARSETS['='] = {\n '#': 'ù',\n '@': 'à',\n '[': 'é',\n '\\\\': 'ç',\n ']': 'ê',\n '^': 'î',\n '_': 'è',\n '`': 'ô',\n '{': 'ä',\n '|': 'ö',\n '}': 'ü',\n '~': 'û'\n};\n",null],"names":[],"mappings":"AmBAA;;;ADSa;AAKA;AAYb;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AAMA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AD3OA;AAwBA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAMA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAOA;AACA;AACA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAGA;AACA;AACA;AAEA;AAGA;AACA;AACA;AAEA;AACA;AAUA;AAAA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAAA;AAGA;AACA;AACA;AACA;AAUA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAAA;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AAAA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAMA;AACA;AACA;AACA;AAAA;AACA;AAAA;AApNa;;;;;;;ADRb;AAAA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AAEA;AACA;AAAC;;;;;;;ADtEA;AAED;AAGA;AAGA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AAAA;AAAA;;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAAA;AA3Da;;;;;;;ADJb;AACA;AASA;AAEA;AAAA;AAAA;AAEA;AACA;AAGA;AAEA;AACA;AACA;AAEA;AAIA;AAEA;AACA;AAGA;AACA;AAEA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAIA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AAGA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAGA;AAEA;AAGA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAMA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAMA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAOA;AACA;AACA;AAOA;AACA;AACA;AAMA;AACA;AAEA;AACA;AAAA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AAEA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AAcA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAaA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAMA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAMA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAMA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AAIA;AACA;AACA;AACA;AAuCA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AAIA;AACA;AACA;AAAA;AACA;AACA;AAAA;AAGA;AACA;AAAA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAOA;AACA;AAAA;AACA;AAAA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAUA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAwFA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AAKA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAoFA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAkEA;AAEA;AACA;AACA;AACA;AAEA;AAOA;AACA;AACA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AACA;AAAA;AAEA;AACA;AACA;AAAA;AAEA;AACA;AACA;AAIA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAGA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AACA;AAIA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AACA;AACA;AAIA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAyBA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAGA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAGA;AACA;AAGA;AACA;AAGA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AAAA;AAj8Ca;AAm8Cb;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ADljDA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AAKA;AAeA;AAFA;AAGA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AAMA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AAOA;AACA;AACA;AAYA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAOA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAPA;AAAA;AAOA;AACA;AAEA;AACA;AACA;AACA;AAQA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAAA;AAEA;AACA;AACA;AACA;AAEA;AAGA;AACA;AACA;AACA;AAAA;AAEA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AAAA;AAAA;AAAA;;AACA;AACA;AACA;AACA;AACA;AACA;AAWA;AAGA;AACA;AACA;AAKA;AACA;AACA;AAEA;AAEA;AAEA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAhUmB;AANN;;;;;;;ADhCb;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AAKA;AACA;AACA;AAEA;AACA;AAOA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAIA;AACA;AAGA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AAKA;AACA;AAIA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAIA;AACA;AAAA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAEA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AAEA;AACA;AAIA;AACA;AACA;AAEA;AAEA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AAMA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AAEA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AAQA;AACA;AACA;AAQA;AACA;AACA;AAOA;AACA;AACA;AAKA;AACA;AACA;AAMA;AACA;AACA;AACA;AAKA;AACA;AACA;AASA;AAAA;AAvda;;;;;;;ADjKb;AAOA;AAKA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAC;AAED;AAEA;AAQA;AAAA;AANA;AACA;AACA;AAEA;AAKA;AACA;AACA;AACA;AAKA;AAQA;AACA;AACA;AACA;AACA;AACA;AAMA;AAKA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAuBA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAEA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AAAA;AAEA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAAA;AAOA;AAEA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AAGA;AAEA;AACA;AACA;AAEA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AAQA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAlWa;AAsWb;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;ADzYA;AACA;AAGA;AAEA;AAMA;AAKA;AAKA;AAMA;AAMA;AAMA;AAIA;AACA;AAEA;AACA;AAaA;AAAA;AACA;AACA;AACA;AACA;AAUA;AAAA;AA+CA;AAAA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AACA;;AACA;AAKA;AAAA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AAKA;AAKA;AACA;AACA;AAOA;AACA;AACA;AACA;AAKA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAAA;AAKA;AAAA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAIA;AACA;AACA;AAEA;AACA;;;AAAA;AAKA;AACA;AACA;AACA;AACA;AAYA;AAAA;AAAA;AAIA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAIA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAOA;AAAA;AAEA;AACA;AACA;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AAEA;AACA;AAEA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAMA;AAEA;AACA;AACA;AAGA;AAGA;AAEA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AAKA;AAAA;AAEA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AAGA;AACA;AAOA;AAGA;AAGA;AAGA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AACA;AAGA;AAIA;AACA;AACA;AAAA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAGA;AACA;AAGA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAKA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AAOA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AAAA;AA9nBa;;;;;;;ADrEb;AAuBA;AACA;AAEA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAKA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;;;AAAA;AAMA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AAGA;AACA;AACA;AAGA;AAEA;AACA;AACA;AACA;AACA;AACA;;;AAAA;AAKA;AACA;AACA;AACA;AACA;AAOA;AAEA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAAA;AArHa;;;;;;;ADCb;AAaA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AAGA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AAEA;AACA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AAQA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAAA;AAMA;AACA;AACA;AAAA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAAA;AAvIa;;;;;;;ADWb;AACA;AACA;AACA;AACA;AACA;AALA;AAWA;AACA;AACA;AACA;AAAA;AACA;AACA;AAGA;AACA;AATA;AAgBA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAzBA;AAgCA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AApBA;AA4BA;AACA;AAGA;AACA;AACA;AANA;;;;;;;ADvGA;AAEA;AACA;AACA;AAEa;AACA;AAKA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;ADjBb;AAKA;AAAA;AAOA;AAAA;AAEA;AACA;;AACA;AAEA;AAAA;AACA;AACA;;;AAAA;AAEA;AAAA;AACA;AACA;;;AAAA;AAEA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAnDa;;;;;;;;;;;;;;;;;ADJb;AAEA;AAAA;AAKA;AAAA;AAEA;AACA;AACA;;AACA;AAEA;AAAA;AACA;AACA;AAEA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAXA;AAaA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AATA;AAWA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAAA;AAUA;AACA;AACA;AAUA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAMA;AACA;AACA;AAWA;AAAA;AAAA;AAAA;;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AAAA;AAhMa;;;;;;;ADCb;AAYA;AAAA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AAAA;AAhE0B;AAET;AAHJ;;;;;;;ADEb;AACA;AACA;AAFA;AAEC;;;;;;;ADPD;AAEA;AACA;AACA;AAEA;AACA;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AAjBA;AAgCA;AACA;AAGA;AACA;AAGA;AACA;AAEA;AACA;AAZA;AAwBA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AAVA;;;;;;;ADlDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAcA;AAOA;AAMA;AAOA;AAkBA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AAEA;AAAA;AACA;AAEA;AACA;AAEA;AAIA;AAIA;AACA;AACA;AAEA;AACA;AACA;AAMA;AAKA;AAKA;AAKA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AAGA;AACA;AAQA;AAGA;AAGA;AAMA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAGA;AACA;AAEA;AAKA;AAEA;AACA;AAOA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAIA;AACA;AAKA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AAEA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAEA;AAEA;AAEA;AACA;AACA;AACA;AAKA;AACA;AACA;AAMA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AAAA;AACA;AAEA;AACA;AACA;AACA;AAAA;AAAA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AAAA;AACA;AAEA;AACA;AACA;AAGA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AAEA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAKA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAQA;AAAA;AACA;AAEA;AAEA;AACA;AACA;AAGA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAIA;AACA;AACA;AACA;AACA;AAIA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAIA;AACA;AACA;AACA;AACA;AACA;AAGA;AAIA;AAMA;AACA;AAEA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAGA;AACA;AACA;AACA;AAIA;AAOA;AACA;AAQA;AACA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AAYA;AACA;AAKA;AACA;AAIA;AAGA;AACA;AAAA;AAEA;AAEA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AAIA;AACA;AACA;AAIA;AACA;AAGA;AACA;AAAA;AAIA;AAEA;AACA;AAIA;AACA;AACA;AAAA;AACA;AAAA;AACA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAQA;AAOA;AAGA;AACA;AACA;AACA;AACA;AAAA;AACA;AAAA;AACA;AAAA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAAA;AACA;AAAA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AAEA;AACA;AAEA;AACA;AAaA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAIA;AACA;AACA;AACA;AAGA;AAEA;AACA;AAAA;AACA;AACA;AAGA;AAEA;AACA;AAEA;AACA;AAAA;AAGA;AAGA;AAIA;AACA;AACA;AACA;AACA;AAGA;AAAA;AAGA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AAMA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAIA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAAA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAQA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AAOA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AACA;AAGA;AAGA;AAEA;AAEA;AACA;AAAA;AAEA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AAQA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AAEA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAMA;AACA;AACA;AAKA;AACA;AACA;AAKA;AACA;AACA;AAMA;AACA;AAKA;AAGA;AACA;AACA;AAEA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAIA;AACA;AACA;AACA;AAEA;AACA;AAOA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAMA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AAUA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AAEA;AACA;AAQA;AACA;AACA;AACA;AACA;AAEA;AACA;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AACA;AAMA;AACA;AACA;AAKA;AACA;AACA;AAKA;AACA;AACA;AAQA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AAEA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AASA;AACA;AAGA;AAEA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAIA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AAIA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AAGA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AAAA;AAEA;AACA;AACA;AAAA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAMA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AACA;AACA;AAQA;AACA;AAEA;AACA;AACA;AAEA;AAEA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AAAA;AACA;AACA;AAEA;AAGA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAEA;AACA;AAMA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAMA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AAKA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AAKA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AAOA;AACA;AACA;AAEA;AAAA;AACA;AAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AACA;AACA;AACA;AAEA;AACA;AAGA;AACA;AACA;AAEA;AACA;AACA;AAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAEA;AAEA;AAEA;AAEA;AACA;AACA;AAMA;AACA;AAAA;AACA;AAAA;AAOA;AAKA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAOA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AACA;AAOA;AACA;AAAA;AACA;AAAA;AACA;AACA;AACA;AACA;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAKA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAQA;AACA;AACA;AACA;AAEA;AAMA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AAOA;AACA;AACA;AACA;AACA;AAOA;AACA;AACA;AACA;AAOA;AAEA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AAOA;AAOA;AACA;AAUA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAQA;AACA;AACA;AAIA;AACA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AAAA;AACA;AACA;AACA;AAEA;AACA;AAEA;AAEA;AACA;AACA;AAGA;AACA;AAGA;AAEA;AACA;AAEA;AACA;AACA;AAEA;AASA;AACA;AACA;AACA;AACA;AAEA;AAEA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAEA;AAGA;AACA;AACA;AACA;AACA;AAEA;AACA;AAAA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AAEA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAMA;AACA;AAQA;AACA;AACA;AAEA;;;"} \ No newline at end of file diff --git a/server/www/teleport/static/js/maintenance/install.js b/server/www/teleport/static/js/maintenance/install.js new file mode 100644 index 0000000..7ed5f4a --- /dev/null +++ b/server/www/teleport/static/js/maintenance/install.js @@ -0,0 +1,197 @@ +"use strict"; + +$app.on_init = function (cb_stack, cb_args) { + $app.dom = { + btn_config: $('#btn-config'), + steps_detail: $('#steps-detail'), + db_info: $('#db-info'), + account: $('#sysadmin-account'), + email: $('#sysadmin-email'), + password: $('#password'), + password2: $('#password-again'), + message: $('#message'), + step2: $('#step2') + }; + + $app._make_info = function (key, value) { + return '' + key + ':' + value + ''; + }; + + var html = []; + if ($app.options.db.type === DB_TYPE_SQLITE) { + html.push($app._make_info('数据库类型', 'SQLite')); + html.push($app._make_info('数据库文件', $app.options.db.sqlite_file)); + } else if ($app.options.db.type === DB_TYPE_MYSQL) { + html.push($app._make_info('数据库类型', 'MySQL')); + html.push($app._make_info('MySQL主机', $app.options.db.mysql_host)); + html.push($app._make_info('MySQL端口', $app.options.db.mysql_port)); + html.push($app._make_info('数据库名称', $app.options.db.mysql_db)); + html.push($app._make_info('用户名', $app.options.db.mysql_user)); + + var _t = []; + _t.push('
'); + _t.push('注意:请确保您在执行后续创建操作之前,已经在MySQL数据库中创建了数据库"'); + _t.push($app.options.db.mysql_db); + _t.push('"和用户"'); + _t.push($app.options.db.mysql_user); + _t.push('",并为用户"'); + _t.push($app.options.db.mysql_user); + _t.push('"设置了在数据库"'); + _t.push($app.options.db.mysql_db); + _t.push('"创建表的权限!'); + _t.push('
'); + $app.dom.db_info.after(_t.join('')); + } else { + html.push($app._make_info('数据库类型', '未知的数据库类型,请检查您的配置文件!')); + $app.dom.btn_config.attr('disabled', 'disabled').hide(); + } + $app.dom.db_info.append(html.join('')); + + $app.hide_op_box = function () { + $app.dom.message.hide(); + }; + + $app.show_op_box = function (op_type, op_msg) { + $app.dom.message.html(op_msg); + $app.dom.message.removeClass().addClass('op_box op_' + op_type); + $app.dom.message.show(); + }; + + $app.dom.btn_config.click(function () { + var str_account = $app.dom.account.val(); + var str_email = $app.dom.email.val(); + var str_password = $app.dom.password.val(); + var str_password2 = $app.dom.password2.val(); + + if (str_account.length === 0) { + $app.show_op_box('error', '请填写系统管理员登录账号名称!'); + $app.dom.account.focus(); + return; + } + if (str_email.length === 0) { + $app.show_op_box('error', '请填写系统管理员的电子邮件地址!'); + $app.dom.email.focus(); + return; + } + if(!tp_check_email(str_email)) { + $app.show_op_box('error', '电子邮件地址格式错啦,你会收不到邮件的!'); + $app.dom.email.focus(); + return; + } + if (str_password.length === 0) { + $app.show_op_box('error', '请设置系统管理员登录密码!'); + $app.dom.password.focus(); + return; + } + if (str_password2.length === 0) { + $app.show_op_box('error', '请再次输入系统管理员登录密码!'); + $app.dom.password.focus(); + return; + } + if (str_password !== str_password2) { + $app.show_op_box('error', '两次输入的密码不一致!'); + $app.dom.password2.focus().select(); + return; + } + + $app.dom.btn_config.attr('disabled', 'disabled').hide(); + $app.hide_op_box(); + $app.dom.steps_detail.show(); + + $tp.ajax_post_json('/maintenance/rpc', {cmd: 'install', sysadmin: str_account, email: str_email, password: str_password}, + function (ret) { + if (ret.code === TPE_OK) { + + var cb_stack = CALLBACK_STACK.create(); + cb_stack + .add_delay(500, $app.get_task_ret, {task_id: ret.data.task_id}) + .exec(); + } + + }, + function () { +// $app.show_message('error', '无法连接到服务器!'); + $app.show_op_box('error', '无法连接到服务器!'); + } + ); + + }); + + $app.get_task_ret = function (cb_stack, cb_args) { + var task_id = cb_args.task_id || 0; + if (task_id === 0) { + console.log('task-id', task_id); + return; + } + + $tp.ajax_post_json('/maintenance/rpc', {cmd: 'get_task_ret', 'tid': task_id}, + function (ret) { + if (ret.code === TPE_OK) { + + // show step progress. + var all_ok = true; + var steps = ret.data.steps; + $app.dom.steps_detail.empty(); + + var html = []; + var icon_class = ''; + var err_class = ''; + for (var i = 0; i < steps.length; ++i) { + if (steps[i].stat === 0) + icon_class = 'fa-check'; + else + icon_class = 'fa-cog fa-spin'; + + if (steps[i].code !== 0) { + icon_class = 'fa-exclamation-circle'; + err_class = ' class="error"'; + steps[i].msg += ' 失败!'; + all_ok = false; + } + else { + err_class = ''; + } + + html.push(' '); + html.push(steps[i].msg); + html.push('

') + } + $app.dom.steps_detail.html(html.join('')); + + if (!ret.data.running) { + if (all_ok) { + + $tp.ajax_post_json('/auth/do-logout', {}, + function () { + }, + function () { + } + ); + + $app.dom.step2.show('fast', function () { + // 确保页面滚动到最低端,使得下一步提示能够被看到。 + document.body.scrollTop = document.body.scrollHeight; + }); + } + return; + } + + cb_stack + .add_delay(500, $app.get_task_ret, {task_id: task_id}) + .exec(); + } + + }, + function () { + $app.show_op_box('error', '无法连接到服务器!'); + } + ); + + }; + + cb_stack.exec(); +}; \ No newline at end of file diff --git a/server/www/teleport/static/js/ops/auz-info.js b/server/www/teleport/static/js/ops/auz-info.js new file mode 100644 index 0000000..23436e3 --- /dev/null +++ b/server/www/teleport/static/js/ops/auz-info.js @@ -0,0 +1,1990 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + area_operator: $('#area-operator'), + area_asset: $('#area-asset'), + + btn_refresh_operator: $('#btn-refresh-operator'), + btn_add_user: $('#btn-add-user'), + btn_add_user_group: $('#btn-add-user-group'), + select_all_operator: $('#table-operator-select-all'), + btn_remove_operator: $('#btn-remove-operator'), + + btn_refresh_asset: $('#btn-refresh-asset'), + btn_add_acc: $('#btn-add-acc'), + btn_add_acc_group: $('#btn-add-acc-group'), + btn_add_host: $('#btn-add-host'), + btn_add_host_group: $('#btn-add-host-group'), + select_all_asset: $('#table-asset-select-all'), + btn_remove_asset: $('#btn-remove-asset'), + + flag_checkboxes: $('#tab-config div.tp-checkbox.tp-editable'), + flag_record_allow_replay: $('#record-allow-replay'), + flag_rdp_allow_clipboard: $('#rdp-allow-clipboard'), + flag_rdp_allow_disk: $('#rdp-allow-disk'), + flag_rdp_allow_console: $('#rdp-allow-console'), + flag_ssh_allow_shell: $('#ssh-allow-shell'), + flag_ssh_allow_sftp: $('#ssh-allow-sftp'), + btn_save_flags: $('#btn-save-flags') + }; + + $app.init_flags(); + + if ($app.options.policy_id !== 0) { + window.onresize = $app.on_win_resize; + cb_stack + .add($app.sync_height) + .add($app.create_controls); + } + + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 操作者列表表格 + //------------------------------- + var table_operator_options = { + dom_id: 'table-operator', + data_source: { + type: 'ajax-post', + url: '/ops/policy/get-operators' + }, + message_no_data: '还没有授权的操作者...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '
', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: '类型', + key: 'rtype', + sort: true, + width: 80, + render: 'ref_type', + fields: {rtype: 'rtype'} + }, + { + title: '操作者', + key: 'name', + sort: true, + header_render: 'filter_search', + fields: {name: 'name'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_operator_header_created, + on_render_created: $app.on_table_operator_render_created, + on_cell_created: $app.on_table_operator_cell_created + }; + + $app.table_operator = $tp.create_table(table_operator_options); + cb_stack + .add($app.table_operator.load_data) + .add($app.table_operator.init); + + $tp.create_table_header_filter_search($app.table_operator, { + name: 'search', + place_holder: '搜索:用户名/用户组名' + }); + $tp.create_table_filter_fixed_value($app.table_operator, {policy_id: $app.options.policy_id}); + + $tp.create_table_paging($app.table_operator, 'table-operator-paging', + { + per_page: Cookies.get($app.page_id('ops_auz') + '_operator_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz') + '_operator_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_operator, 'table-operator-pagination'); + + $app.dom.btn_refresh_operator.click(function () { + $app.table_operator.load_data(); + }); + $app.dom.select_all_operator.click(function () { + var _objects = $('#' + $app.table_operator.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + $app.dom.btn_remove_operator.click($app.on_btn_remove_operator_click); + + //------------------------------- + // 资产列表表格 + //------------------------------- + var table_asset_options = { + dom_id: 'table-asset', + data_source: { + type: 'ajax-post', + url: '/ops/policy/get-asset' + }, + message_no_data: '还没有分配被授权访问的资产哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: '类型', + key: 'rtype', + sort: true, + width: 80, + render: 'ref_type', + fields: {rtype: 'rtype'} + }, + { + title: '资产', + key: 'name', + sort: true, + header_render: 'filter_search', + fields: {name: 'name'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_asset_header_created, + on_render_created: $app.on_table_asset_render_created, + on_cell_created: $app.on_table_asset_cell_created + }; + + $app.table_asset = $tp.create_table(table_asset_options); + cb_stack + .add($app.table_asset.load_data) + .add($app.table_asset.init); + + $tp.create_table_header_filter_search($app.table_asset, { + name: 'search', + place_holder: '搜索:账号名/账号组名/主机名/主机组名' + }); + $tp.create_table_filter_fixed_value($app.table_asset, {policy_id: $app.options.policy_id}); + + $tp.create_table_paging($app.table_asset, 'table-asset-paging', + { + per_page: Cookies.get($app.page_id('ops_auz') + '_asset_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz') + '_asset_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_asset, 'table-asset-pagination'); + + $app.dom.btn_refresh_asset.click(function () { + $app.table_asset.load_data(); + }); + $app.dom.select_all_asset.click(function () { + var _objects = $('#' + $app.table_asset.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + $app.dom.btn_remove_asset.click($app.on_btn_remove_asset_click); + + //------------------------------- + // 选择用户对话框 + //------------------------------- + var table_sel_user_options = { + dom_id: 'table-sel-user', + data_source: { + type: 'ajax-post', + url: '/user/get-users', + exclude: {'ops_policy_id': $app.options.policy_id} + }, + message_no_data: '所有用户都被授权了哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "用户", + key: "username", + sort: true, + header_render: 'filter_search', + render: 'user_info', + fields: {id: 'id', username: 'username', surname: 'surname', email: 'email'} + }, + { + title: "角色", + key: "role_id", + width: 120, + sort: true, + header_render: 'filter_role', + render: 'role', + fields: {role_id: 'role_id'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 120, + align: 'center', + header_render: 'filter_state', + render: 'state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_sel_user_header_created, + on_render_created: $app.on_table_sel_user_render_created, + on_cell_created: $app.on_table_sel_user_cell_created + }; + $app.table_sel_user = $tp.create_table(table_sel_user_options); + cb_stack.add($app.table_sel_user.init); + + $tp.create_table_header_filter_search($app.table_sel_user, { + name: 'search', + place_holder: '搜索:用户账号/姓名/邮箱/描述/等等...' + }); + $tp.create_table_filter_role($app.table_sel_user, $app.role_list); + $tp.create_table_header_filter_state($app.table_sel_user, 'state', $app.obj_states); + + $tp.create_table_paging($app.table_sel_user, 'table-sel-user-paging', + { + per_page: Cookies.get($app.page_id('ops_auz_detail') + '_sel_user_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz_detail') + '_sel_user_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_sel_user, 'table-sel-user-pagination'); + + $app.dlg_sel_user = $app.create_dlg_sel_user(); + cb_stack.add($app.dlg_sel_user.init); + cb_stack.add($app.load_role_list); + + + //------------------------------- + // 选择用户组对话框 + //------------------------------- + var table_sel_user_group_options = { + dom_id: 'table-sel-user-group', + data_source: { + type: 'ajax-post', + url: '/group/get-groups', + exclude: {'ops_policy_id': {pid: $app.options.policy_id, gtype: TP_GROUP_USER}} // 排除指定成员 + }, + message_no_data: '所有用户组都被授权了哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "用户组", + key: "name", + sort: true, + header_render: 'filter_search', + render: 'name', + fields: {name: 'name', desc: 'desc'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_sel_user_group_header_created, + on_render_created: $app.on_table_sel_user_group_render_created, + on_cell_created: $app.on_table_sel_user_group_cell_created + }; + $app.table_sel_user_group = $tp.create_table(table_sel_user_group_options); + cb_stack.add($app.table_sel_user_group.init); + + $tp.create_table_header_filter_search($app.table_sel_user_group, { + name: 'search', + place_holder: '搜索:用户组名称/描述/等等...' + }); + $tp.create_table_filter_fixed_value($app.table_sel_user_group, {type: TP_GROUP_USER}); + $tp.create_table_paging($app.table_sel_user_group, 'table-sel-user-group-paging', + { + per_page: Cookies.get($app.page_id('ops_auz_detail') + '_user_group_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz_detail') + '_user_group_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_sel_user_group, 'table-sel-user-group-pagination'); + + $app.dlg_sel_user_group = $app.create_dlg_sel_user_group(); + cb_stack.add($app.dlg_sel_user_group.init); + + + //------------------------------- + // 选择账号对话框 + //------------------------------- + var table_sel_acc_options = { + dom_id: 'table-sel-acc', + data_source: { + type: 'ajax-post', + url: '/asset/get-accounts', + exclude: {'ops_policy_id': $app.options.policy_id} // 排除指定成员 + }, + message_no_data: '所有账号都被授权了哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "账号", + key: "username", + sort: true, + header_render: 'filter_search', + render: 'acc_info', + fields: {id: 'id', username: 'username', host_ip: 'host_ip', router_ip: 'router_ip', router_port: 'router_port'} + }, + { + title: "远程连接协议", + key: "protocol_type", + sort: true, + width: 120, + align: 'center', + render: 'protocol', + fields: {protocol_type: 'protocol_type'} + }, + { + title: "认证方式", + key: "auth_type", + width: 80, + align: 'center', + render: 'auth_type', + fields: {auth_type: 'auth_type'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 80, + align: 'center', + render: 'state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_sel_acc_header_created, + on_render_created: $app.on_table_sel_acc_render_created, + on_cell_created: $app.on_table_sel_acc_cell_created + }; + $app.table_sel_acc = $tp.create_table(table_sel_acc_options); + cb_stack.add($app.table_sel_acc.init); + + $tp.create_table_header_filter_search($app.table_sel_acc, { + name: 'search', + place_holder: '搜索:账号/主机IP/等等...' + }); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_sel_acc, 'table-sel-acc-paging', + { + per_page: Cookies.get($app.page_id('ops_auz_detail') + '_sel_acc_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz_detail') + '_sel_acc_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_sel_acc, 'table-sel-acc-pagination'); + + $app.dlg_sel_acc = $app.create_dlg_sel_acc(); + cb_stack.add($app.dlg_sel_acc.init); + + + //------------------------------- + // 选择账号组对话框 + //------------------------------- + var table_sel_acc_group_options = { + dom_id: 'table-sel-acc-group', + data_source: { + type: 'ajax-post', + url: '/group/get-groups', + exclude: {'ops_policy_id': {pid: $app.options.policy_id, gtype: TP_GROUP_ACCOUNT}} // 排除指定成员 + }, + message_no_data: '所有账号组都被授权了哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "账号组", + key: "name", + sort: true, + header_render: 'filter_search', + render: 'name', + fields: {name: 'name', desc: 'desc'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_sel_acc_group_header_created, + on_render_created: $app.on_table_sel_acc_group_render_created, + on_cell_created: $app.on_table_sel_acc_group_cell_created + }; + $app.table_sel_acc_group = $tp.create_table(table_sel_acc_group_options); + cb_stack.add($app.table_sel_acc_group.init); + + $tp.create_table_header_filter_search($app.table_sel_acc_group, { + name: 'search', + place_holder: '搜索:账号组名称/描述/等等...' + }); + $tp.create_table_filter_fixed_value($app.table_sel_acc_group, {type: TP_GROUP_ACCOUNT}); + $tp.create_table_paging($app.table_sel_acc_group, 'table-sel-acc-group-paging', + { + per_page: Cookies.get($app.page_id('ops_auz_detail') + '_acc_group_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz_detail') + '_acc_group_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_sel_acc_group, 'table-sel-acc-group-pagination'); + + $app.dlg_sel_acc_group = $app.create_dlg_sel_acc_group(); + cb_stack.add($app.dlg_sel_acc_group.init); + + + //------------------------------- + // 选择主机对话框 + //------------------------------- + var table_sel_host_options = { + dom_id: 'table-sel-host', + data_source: { + type: 'ajax-post', + url: '/asset/get-hosts', + exclude: {'ops_policy_id': $app.options.policy_id} // 排除指定成员 + }, + message_no_data: '所有主机都被授权了哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "主机", + key: "ip", + sort: true, + // width: 240, + header_render: 'filter_search', + render: 'host_info', + fields: {id: 'id', ip: 'ip', name: 'name', router_ip: 'router_ip', router_port: 'router_port'} + }, + { + title: "系统", + key: "os_type", + width: 36, + align: 'center', + sort: true, + render: 'os_type', + fields: {os_type: 'os_type'} + }, + { + title: "资产编号", + key: "cid", + sort: true, + // width: 80, + // align: 'center', + //render: 'auth_type', + fields: {cid: 'cid'} + }, + { + title: "状态", + key: "state", + sort: true, + width: 90, + align: 'center', + render: 'state', + fields: {state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_sel_host_header_created, + on_render_created: $app.on_table_sel_host_render_created, + on_cell_created: $app.on_table_sel_host_cell_created + }; + $app.table_sel_host = $tp.create_table(table_sel_host_options); + cb_stack.add($app.table_sel_host.init); + + $tp.create_table_header_filter_search($app.table_sel_host, { + name: 'search', + place_holder: '搜索:主机IP/名称/等等...' + }); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_sel_host, 'table-sel-host-paging', + { + per_page: Cookies.get($app.page_id('ops_auz_detail') + '_sel_host_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz_detail') + '_sel_host_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_sel_acc, 'table-sel-host-pagination'); + + $app.dlg_sel_host = $app.create_dlg_sel_host(); + cb_stack.add($app.dlg_sel_host.init); + + + //------------------------------- + // 选择主机组对话框 + //------------------------------- + var table_sel_host_group_options = { + dom_id: 'table-sel-host-group', + data_source: { + type: 'ajax-post', + url: '/group/get-groups', + exclude: {'ops_policy_id': {pid: $app.options.policy_id, gtype: TP_GROUP_HOST}} // 排除指定成员 + }, + message_no_data: '所有主机组都被授权了哦...', + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: "主机组", + key: "name", + sort: true, + header_render: 'filter_search', + render: 'name', + fields: {name: 'name', desc: 'desc'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_sel_host_group_header_created, + on_render_created: $app.on_table_sel_host_group_render_created, + on_cell_created: $app.on_table_sel_host_group_cell_created + }; + $app.table_sel_host_group = $tp.create_table(table_sel_host_group_options); + cb_stack.add($app.table_sel_host_group.init); + + $tp.create_table_header_filter_search($app.table_sel_host_group, { + name: 'search', + place_holder: '搜索:主机组名称/描述/等等...' + }); + $tp.create_table_filter_fixed_value($app.table_sel_host_group, {type: TP_GROUP_HOST}); + $tp.create_table_paging($app.table_sel_host_group, 'table-sel-host-group-paging', + { + per_page: Cookies.get($app.page_id('ops_auz_detail') + '_host_group_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz_detail') + '_host_group_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_sel_host_group, 'table-sel-host-group-pagination'); + + $app.dlg_sel_host_group = $app.create_dlg_sel_host_group(); + cb_stack.add($app.dlg_sel_host_group.init); + + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_add_user.click(function () { + $app.dlg_sel_user.show(); + }); + $app.dom.btn_add_user_group.click(function () { + $app.dlg_sel_user_group.show(); + }); + $app.dom.btn_add_acc.click(function () { + $app.dlg_sel_acc.show(); + }); + $app.dom.btn_add_acc_group.click(function () { + $app.dlg_sel_acc_group.show(); + }); + $app.dom.btn_add_host.click(function () { + $app.dlg_sel_host.show(); + }); + $app.dom.btn_add_host_group.click(function () { + $app.dlg_sel_host_group.show(); + }); + + $app.dom.flag_checkboxes.click(function (e) { + $app.on_click_flag(e); + }); + $app.dom.btn_save_flags.click(function () { + $app.on_save_flags(); + }); + + cb_stack.exec(); +}; + +// 为保证界面美观,两个表格的高度不一致时,自动调整到一致。 +$app.on_win_resize = function () { + $app.sync_height(); +}; +$app.sync_height = function (cb_stack) { + var o_top = $app.dom.area_operator.offset().top; + var a_top = $app.dom.area_asset.offset().top; + + // 如果两个表格的top不一致,说明是页面宽度缩小到一定程度后,两个表格上下排列了。 + if (o_top !== a_top) { + $app.dom.area_operator.css({height: 'auto', minHeight: 'auto'}); + $app.dom.area_asset.css({height: 'auto', minHeight: 'auto'}); + return; + } + + $app.dom.area_operator.css({height: 'auto', minHeight: 'auto'}); + $app.dom.area_asset.css({height: 'auto', minHeight: 'auto'}); + + var o_height = $app.dom.area_operator.height(); + var a_height = $app.dom.area_asset.height(); + + var h = _.max([o_height, a_height]); + + if (o_height <= h) { + $app.dom.area_operator.css({minHeight: h}); + } else { + $app.dom.area_operator.css({height: 'auto', minHeight: 'auto'}); + } + if (a_height <= h) { + $app.dom.area_asset.css({minHeight: h}); + } else { + $app.dom.area_asset.css({height: 'auto', minHeight: 'auto'}); + } + + if (cb_stack) + cb_stack.exec(); +}; + +//------------------------------- +// 通用渲染器 +//------------------------------- +$app._add_common_render = function (render) { + render.filter_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.filter_state = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('state'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.state = function (row_id, fields) { + var _style, _state; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === fields.state) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + break; + } + } + if (i === $app.obj_states.length) { + _style = 'info'; + _state = ' 未知'; + } + + return '' + _state + '' + }; + + render.ref_type = function (row_id, fields) { + switch (fields.rtype) { + case TP_USER: + return ' 用户'; + case TP_GROUP_USER: + return ' 用户组'; + case TP_ACCOUNT: + return ' 账号'; + case TP_GROUP_ACCOUNT: + return ' 账号组'; + case TP_HOST: + return ' 主机'; + case TP_GROUP_HOST: + return ' 主机组'; + default: + return '未知' + } + }; +}; + +//------------------------------- +// 操作者列表 +//------------------------------- + +$app.check_operator_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_operator.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.select_all_operator.prop('checked', true); + } else { + $app.dom.select_all_operator.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_operator_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_operator_all_selected(); + }); + } +}; + +$app.on_table_operator_render_created = function (render) { + + $app._add_common_render(render); + +}; + +$app.on_table_operator_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.get_selected_operator = function (tbl) { + var items = []; + var _objs = $('#' + $app.table_operator.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + items.push(_row_data.id); + } + }); + return items; +}; + +$app.on_btn_remove_operator_click = function () { + var items = $app.get_selected_operator($app.table_operator); + if (items.length === 0) { + $tp.notify_error('请选择要移除的操作者!'); + return; + } + + var _fn_sure = function (cb_stack) { + $tp.ajax_post_json('/ops/policy/remove-members', {policy_id: $app.options.policy_id, policy_type: TP_POLICY_OPERATOR, ids: items}, + function (ret) { + if (ret.code === TPE_OK) { + cb_stack + .add($app.sync_height) + .add($app.check_operator_all_selected) + .add($app.check_operator_all_selected) + .add($app.table_operator.load_data); + $tp.notify_success('移除授权操作者成功!'); + } else { + $tp.notify_error('移除授权操作者失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,移除授权操作者失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '

注意:移除操作不可恢复!!

您确定要移除选定的' + items.length + '个授权操作者吗?

', + fn_yes: _fn_sure + }); + +}; + + +//------------------------------- +// 资产列表 +//------------------------------- + +$app.check_asset_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_asset.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.select_all_asset.prop('checked', true); + } else { + $app.dom.select_all_asset.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_asset_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_asset_all_selected(); + }); + } +}; + +$app.on_table_asset_render_created = function (render) { + $app._add_common_render(render); +}; + +$app.on_table_asset_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.get_selected_asset = function (tbl) { + var items = []; + var _objs = $('#' + $app.table_asset.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + items.push(_row_data.id); + } + }); + return items; +}; + +$app.on_btn_remove_asset_click = function () { + var items = $app.get_selected_asset($app.table_asset); + if (items.length === 0) { + $tp.notify_error('请选择要移除的被授权资产!'); + return; + } + + var _fn_sure = function (cb_stack) { + $tp.ajax_post_json('/ops/policy/remove-members', {policy_id: $app.options.policy_id, policy_type: TP_POLICY_ASSET, ids: items}, + function (ret) { + if (ret.code === TPE_OK) { + cb_stack + .add($app.sync_height) + .add($app.check_asset_all_selected) + .add($app.check_asset_all_selected) + .add($app.table_asset.load_data); + $tp.notify_success('移除被授权资产成功!'); + } else { + $tp.notify_error('移除被授权资产失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,移除被授权资产失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '

注意:移除操作不可恢复!!

您确定要移除选定的' + items.length + '个被授权资产吗?

', + fn_yes: _fn_sure + }); + +}; + + +//------------------------------- +// 选择用户对话框 +//------------------------------- + +$app.on_table_sel_user_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.dlg_sel_user.check_all_selected(); + }); + } +}; + +$app.on_table_sel_user_render_created = function (render) { + $app._add_common_render(render); + + render.filter_role = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('role'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.user_info = function (row_id, fields) { + var ret = []; + ret.push('' + fields.surname + ''); + ret.push(''); + ret.push(fields.username); + if (fields.email.length > 0) + ret.push(' <' + fields.email + '>'); + ret.push(''); + return ret.join('') + }; + + render.role = function (row_id, fields) { + for (var i = 0; i < $app.role_list.length; ++i) { + if ($app.role_list[i].id === fields.role_id) + return $app.role_list[i].name; + } + return ' 未设置'; + }; +}; + +$app.on_table_sel_user_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + header._table_ctrl.get_filter_ctrl('role').on_created(); + header._table_ctrl.get_filter_ctrl('state').on_created(); +}; + +$app.create_dlg_sel_user = function () { + var dlg = {}; + dlg.dom_id = 'dlg-sel-user'; + dlg.field_id = -1; + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_sel_all: $('#' + dlg.dom_id + ' input[data-action="sel-all"]'), + btn_add: $('#' + dlg.dom_id + ' button[data-action="use-selected"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + dlg.dom.btn_sel_all.click(dlg.on_sel_all); + cb_stack.exec(); + }; + + dlg.show = function () { + $app.table_sel_user.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.on_sel_all = function () { + var _objects = $('#' + $app.table_sel_user.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }; + + dlg.check_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + dlg.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + dlg.dom.btn_sel_all.prop('checked', true); + } else { + dlg.dom.btn_sel_all.prop('checked', false); + } + if (cb_stack) + cb_stack.exec(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_sel_user.get_row(_obj); + + var name = _row_data.username; + if (_row_data.surname.length > 0 && _row_data.surname !== name) + name += '(' + _row_data.surname + ')'; + + items.push({id: _row_data.id, name: name}); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + + $tp.ajax_post_json('/ops/policy/add-members', { + policy_id: $app.options.policy_id, + type: TP_POLICY_OPERATOR, // 授权操作者 + rtype: TP_USER, // 用户 + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('授权操作者添加成功!'); + CALLBACK_STACK.create() + .add($app.sync_height) + .add(dlg.check_all_selected) + .add($app.table_operator.load_data) + .add($app.table_sel_user.load_data) + .exec(); + } else { + $tp.notify_error('授权操作者添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,授权操作者添加失败!'); + } + ); + + }; + + return dlg; +}; + + +//------------------------------- +// 选择用户组对话框 +//------------------------------- + +$app.on_table_sel_user_group_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.dlg_sel_user_group.check_all_selected(); + }); + } +}; + +$app.on_table_sel_user_group_render_created = function (render) { + + $app._add_common_render(render); + + render.name = function (row_id, fields) { + return '' + fields.name + '' + fields.desc + ''; + }; +}; + +$app.on_table_sel_user_group_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.create_dlg_sel_user_group = function () { + var dlg = {}; + dlg.dom_id = 'dlg-sel-user-group'; + dlg.field_id = -1; // 用户id + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_sel_all: $('#' + dlg.dom_id + ' input[data-action="sel-all"]'), + btn_add: $('#' + dlg.dom_id + ' button[data-action="use-selected"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + dlg.dom.btn_sel_all.click(dlg.on_sel_all); + cb_stack.exec(); + }; + + dlg.show = function () { + $app.table_sel_user_group.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.on_sel_all = function () { + var _objects = $('#' + $app.table_sel_user_group.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }; + + dlg.check_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + dlg.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + dlg.dom.btn_sel_all.prop('checked', true); + } else { + dlg.dom.btn_sel_all.prop('checked', false); + } + if (cb_stack) + cb_stack.exec(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_sel_user_group.get_row(_obj); + items.push({id: _row_data.id, name: _row_data.name}); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + + $tp.ajax_post_json('/ops/policy/add-members', { + policy_id: $app.options.policy_id, + type: TP_POLICY_OPERATOR, // 授权操作者 + rtype: TP_GROUP_USER, // 用户组 + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('授权操作者添加成功!'); + CALLBACK_STACK.create() + .add($app.sync_height) + .add(dlg.check_all_selected) + .add($app.table_operator.load_data) + .add($app.table_sel_user_group.load_data) + .exec(); + } else { + $tp.notify_error('授权操作者添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,授权操作者添加失败!'); + } + ); + + }; + + return dlg; +}; + +//------------------------------- +// 选择账号对话框 +//------------------------------- + +$app.on_table_sel_acc_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.dlg_sel_acc.check_all_selected(); + }); + } +}; + +$app.on_table_sel_acc_render_created = function (render) { + + $app._add_common_render(render); + + render.acc_info = function (row_id, fields) { + var ret = []; + + ret.push('' + fields.username + '@' + fields.host_ip + ''); + if (fields.router_ip.length > 0) + ret.push('由 ' + fields.router_ip + ':' + fields.router_port + ' 路由'); + + return ret.join(''); + }; + + render.protocol = function (row_id, fields) { + switch (fields.protocol_type) { + case TP_PROTOCOL_TYPE_RDP: + return ' RDP'; + case TP_PROTOCOL_TYPE_SSH: + return ' SSH'; + case TP_PROTOCOL_TYPE_TELNET: + return ' TELNET'; + default: + return ' 未设置'; + } + }; + + render.auth_type = function (row_id, fields) { + switch (fields.auth_type) { + case TP_AUTH_TYPE_NONE: + return ''; + case TP_AUTH_TYPE_PASSWORD: + return '密码'; + case TP_AUTH_TYPE_PRIVATE_KEY: + return '私钥'; + default: + return '未设置'; + } + }; +}; + +$app.on_table_sel_acc_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.create_dlg_sel_acc = function () { + var dlg = {}; + dlg.dom_id = 'dlg-sel-acc'; + dlg.field_id = -1; + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_sel_all: $('#' + dlg.dom_id + ' input[data-action="sel-all"]'), + btn_add: $('#' + dlg.dom_id + ' button[data-action="use-selected"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + dlg.dom.btn_sel_all.click(dlg.on_sel_all); + cb_stack.exec(); + }; + + dlg.show = function () { + // dlg.init_fields(); + $app.table_sel_acc.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.on_sel_all = function () { + var _objects = $('#' + $app.table_sel_acc.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }; + + dlg.check_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + dlg.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + dlg.dom.btn_sel_all.prop('checked', true); + } else { + dlg.dom.btn_sel_all.prop('checked', false); + } + if (cb_stack) + cb_stack.exec(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_sel_acc.get_row(_obj); + + var name = _row_data.username + '@' + _row_data.host_ip; + if (_row_data.router_ip.length > 0) + name += ' (由 ' + _row_data.router_ip + ':' + _row_data.router_port + ' 路由)'; + + + items.push({id: _row_data.id, name: name}); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + + $tp.ajax_post_json('/ops/policy/add-members', { + policy_id: $app.options.policy_id, + type: TP_POLICY_ASSET, // 被授权资产 + rtype: TP_ACCOUNT, // 账号 + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('被授权资产添加成功!'); + CALLBACK_STACK.create() + .add($app.sync_height) + .add(dlg.check_all_selected) + .add($app.table_asset.load_data) + .add($app.table_sel_acc.load_data) + .exec(); + } else { + $tp.notify_error('被授权资产添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,被授权资产添加失败!'); + } + ); + + }; + + return dlg; +}; + +//------------------------------- +// 选择账号组对话框 +//------------------------------- + +$app.on_table_sel_acc_group_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + // $app.check_users_all_selected(); + $app.dlg_sel_acc_group.check_all_selected(); + }); + } +}; + +$app.on_table_sel_acc_group_render_created = function (render) { + + $app._add_common_render(render); + + render.name = function (row_id, fields) { + return '' + fields.name + '' + fields.desc + ''; + }; +}; + +$app.on_table_sel_acc_group_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.create_dlg_sel_acc_group = function () { + var dlg = {}; + dlg.dom_id = 'dlg-sel-acc-group'; + dlg.field_id = -1; // 用户id + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_sel_all: $('#' + dlg.dom_id + ' input[data-action="sel-all"]'), + btn_add: $('#' + dlg.dom_id + ' button[data-action="use-selected"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + dlg.dom.btn_sel_all.click(dlg.on_sel_all); + cb_stack.exec(); + }; + + dlg.show = function () { + // dlg.init_fields(); + $app.table_sel_acc_group.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.on_sel_all = function () { + var _objects = $('#' + $app.table_sel_acc_group.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }; + + dlg.check_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + dlg.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + dlg.dom.btn_sel_all.prop('checked', true); + } else { + dlg.dom.btn_sel_all.prop('checked', false); + } + if (cb_stack) + cb_stack.exec(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_sel_acc_group.get_row(_obj); + items.push({id: _row_data.id, name: _row_data.name}); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + + $tp.ajax_post_json('/ops/policy/add-members', { + policy_id: $app.options.policy_id, + type: TP_POLICY_ASSET, // 授权操作者 + rtype: TP_GROUP_ACCOUNT, // 账号组 + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('被授权资产添加成功!'); + CALLBACK_STACK.create() + .add($app.sync_height) + .add(dlg.check_all_selected) + .add($app.table_asset.load_data) + .add($app.table_sel_acc_group.load_data) + .exec(); + } else { + $tp.notify_error('被授权资产添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,被授权资产添加失败!'); + } + ); + + }; + + return dlg; +}; + +//------------------------------- +// 选择主机对话框 +//------------------------------- + +$app.on_table_sel_host_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.dlg_sel_host.check_all_selected(); + }); + } +}; + +$app.on_table_sel_host_render_created = function (render) { + + $app._add_common_render(render); + + render.host_info = function (row_id, fields) { + var ret = []; + + var name = fields.name; + if (name.length === 0) + name = fields.ip; + var ip = fields.ip; + ret.push('' + name + '
[' + ip + ']'); + if (fields.router_ip.length > 0) + ret.push(' 由 ' + fields.router_ip + ':' + fields.router_port + ' 路由'); + ret.push('
'); + + return ret.join(''); + }; + +}; + +$app.on_table_sel_host_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.create_dlg_sel_host = function () { + var dlg = {}; + dlg.dom_id = 'dlg-sel-host'; + dlg.field_id = -1; + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_sel_all: $('#' + dlg.dom_id + ' input[data-action="sel-all"]'), + btn_add: $('#' + dlg.dom_id + ' button[data-action="use-selected"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + dlg.dom.btn_sel_all.click(dlg.on_sel_all); + cb_stack.exec(); + }; + + dlg.show = function () { + $app.table_sel_host.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.on_sel_all = function () { + var _objects = $('#' + $app.table_sel_host.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }; + + dlg.check_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + dlg.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + dlg.dom.btn_sel_all.prop('checked', true); + } else { + dlg.dom.btn_sel_all.prop('checked', false); + } + if (cb_stack) + cb_stack.exec(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_sel_host.get_row(_obj); + + var name = ''; + if (_row_data.name.length > 0) + name = _row_data.name + ' [' + _row_data.ip + ']'; + else + name = _row_data.ip; + + if (_row_data.router_ip.length > 0) + name += ' (由 ' + _row_data.router_ip + ':' + _row_data.router_port + ' 路由)'; + + + items.push({id: _row_data.id, name: name}); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + + $tp.ajax_post_json('/ops/policy/add-members', { + policy_id: $app.options.policy_id, + type: TP_POLICY_ASSET, // 被授权资产 + rtype: TP_HOST, // 主机 + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('被授权资产添加成功!'); + CALLBACK_STACK.create() + .add($app.sync_height) + .add(dlg.check_all_selected) + .add($app.table_asset.load_data) + .add($app.table_sel_host.load_data) + .exec(); + } else { + $tp.notify_error('被授权资产添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,被授权资产添加失败!'); + } + ); + + }; + + return dlg; +}; + +//------------------------------- +// 选择主机组对话框 +//------------------------------- + +$app.on_table_sel_host_group_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + // $app.check_users_all_selected(); + $app.dlg_sel_host_group.check_all_selected(); + }); + } +}; + +$app.on_table_sel_host_group_render_created = function (render) { + + $app._add_common_render(render); + + render.name = function (row_id, fields) { + return '' + fields.name + '' + fields.desc + ''; + }; +}; + +$app.on_table_sel_host_group_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); +}; + +$app.create_dlg_sel_host_group = function () { + var dlg = {}; + dlg.dom_id = 'dlg-sel-host-group'; + dlg.field_id = -1; + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + btn_sel_all: $('#' + dlg.dom_id + ' input[data-action="sel-all"]'), + btn_add: $('#' + dlg.dom_id + ' button[data-action="use-selected"]') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_add.click(dlg.on_add); + dlg.dom.btn_sel_all.click(dlg.on_sel_all); + cb_stack.exec(); + }; + + dlg.show = function () { + $app.table_sel_host_group.load_data(); + dlg.dom.dialog.modal(); + }; + + dlg.on_sel_all = function () { + var _objects = $('#' + $app.table_sel_host_group.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }; + + dlg.check_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + dlg.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + dlg.dom.btn_sel_all.prop('checked', true); + } else { + dlg.dom.btn_sel_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); + }; + + dlg.get_selected_items = function () { + var items = []; + var _objs = $('#' + dlg.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = $app.table_sel_host_group.get_row(_obj); + items.push({id: _row_data.id, name: _row_data.name}); + } + }); + + return items; + }; + + dlg.on_add = function () { + var items = dlg.get_selected_items(); + + $tp.ajax_post_json('/ops/policy/add-members', { + policy_id: $app.options.policy_id, + type: TP_POLICY_ASSET, // 授权操作者 + rtype: TP_GROUP_HOST, // 主机组 + members: items + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('被授权资产添加成功!'); + CALLBACK_STACK.create() + .add($app.sync_height) + .add(dlg.check_all_selected) + .add($app.table_asset.load_data) + .add($app.table_sel_host_group.load_data) + .exec(); + } else { + $tp.notify_error('被授权资产添加失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,被授权资产添加失败!'); + } + ); + + }; + + return dlg; +}; + +$app.init_flags = function() { + console.log($app.options); + + if(($app.options.policy_flags.record & TP_FLAG_RECORD_REPLAY) !== 0) + $app.dom.flag_record_allow_replay.addClass('tp-selected'); + + if(($app.options.policy_flags.rdp & TP_FLAG_RDP_CLIPBOARD) !== 0) + $app.dom.flag_rdp_allow_clipboard.addClass('tp-selected'); + if(($app.options.policy_flags.rdp & TP_FLAG_RDP_DISK) !== 0) + $app.dom.flag_rdp_allow_disk.addClass('tp-selected'); + if(($app.options.policy_flags.rdp & TP_FLAG_RDP_CONSOLE) !== 0) + $app.dom.flag_rdp_allow_console.addClass('tp-selected'); + + if(($app.options.policy_flags.ssh & TP_FLAG_SSH_SHELL) !== 0) + $app.dom.flag_ssh_allow_shell.addClass('tp-selected'); + if(($app.options.policy_flags.ssh & TP_FLAG_SSH_SFTP) !== 0) + $app.dom.flag_ssh_allow_sftp.addClass('tp-selected'); +}; + +$app.on_click_flag = function (e) { + var obj = $(e.target); + if (obj.hasClass('tp-selected')) { + obj.removeClass('tp-selected'); + } else { + obj.addClass('tp-selected'); + } +}; + +$app.on_save_flags = function () { + var flag_record = 0; + flag_record |= TP_FLAG_RECORD_REAL_TIME; // not implement, set this flag for default. + if ($app.dom.flag_record_allow_replay.hasClass('tp-selected')) + flag_record |= TP_FLAG_RECORD_REPLAY; + + var flag_rdp = 0; + flag_rdp |= TP_FLAG_RDP_DESKTOP; // before support remote-app, remote-desktop is the only way to access remote host. + if ($app.dom.flag_rdp_allow_clipboard.hasClass('tp-selected')) + flag_rdp |= TP_FLAG_RDP_CLIPBOARD; + if ($app.dom.flag_rdp_allow_disk.hasClass('tp-selected')) + flag_rdp |= TP_FLAG_RDP_DISK; + if ($app.dom.flag_rdp_allow_console.hasClass('tp-selected')) + flag_rdp |= TP_FLAG_RDP_CONSOLE; + + var flag_ssh = 0; + if ($app.dom.flag_ssh_allow_shell.hasClass('tp-selected')) + flag_ssh |= TP_FLAG_SSH_SHELL; + if ($app.dom.flag_ssh_allow_sftp.hasClass('tp-selected')) + flag_ssh |= TP_FLAG_SSH_SFTP; + + if (flag_ssh === 0) { + $tp.notify_error('SSH选项都未选择,无法进行SSH连接哦!'); + return; + } + + + $tp.ajax_post_json('/ops/policy/set-flags', { + policy_id: $app.options.policy_id, + flag_record: flag_record, + flag_rdp: flag_rdp, + flag_ssh: flag_ssh + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('选项设置成功!'); + } else { + $tp.notify_error('选项设置失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,选项设置失败!'); + } + ); +}; diff --git a/server/www/teleport/static/js/ops/auz-list.js b/server/www/teleport/static/js/ops/auz-list.js new file mode 100644 index 0000000..db745b8 --- /dev/null +++ b/server/www/teleport/static/js/ops/auz-list.js @@ -0,0 +1,633 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_policy: $('#btn-refresh-policy'), + btn_create_policy: $('#btn-create-policy'), + select_all_policy: $('#table-auz-select-all'), + + btn_lock: $('#btn-lock'), + btn_unlock: $('#btn-unlock'), + btn_remove: $('#btn-remove') + }; + + $app.drag = { + dragging: false, + drag_row_id: '0', + hover_row_id: '0', + drag_index: -1, + hover_index: -1, + insert_before: true, // 是插入到拖放目标之前还是之后 + items: [], + + dom: {} + }; + + $('#btn-test').click(function () { + $app.on_test(); + }); + + // $app.dragging = false; + // $app.drag_row_id = 0; + // $app.drag_to_insert = []; + $(document).mousemove(function (e) { + $app.on_dragging(e); + }).mouseup(function (e) { + $app.on_drag_end(e); + }); + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +$app.on_test = function () { + $tp.ajax_post_json('/ops/build-auz-map', {}, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('重建授权映射成功!'); + } else { + $tp.notify_error('重建授权映射成功失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,重建授权映射成功失败!'); + } + ); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 资产列表表格 + //------------------------------- + var table_policy_options = { + dom_id: 'table-policy', + data_source: { + type: 'ajax-post', + url: '/ops/get-policies' + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + //title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: '顺序', + key: 'rank', + // sort: true, + align: 'center', + width: 60, + // header_render: 'filter_search', + render: 'rank', + fields: {rank: 'rank'} + }, + { + title: '授权策略', + key: 'name', + // sort: true, + // header_render: 'filter_search', + render: 'policy_info', + fields: {id: 'id', name: 'name', desc: 'desc'} + }, + { + title: "状态", + key: "state", + // sort: true, + width: 90, + align: 'center', + //header_render: 'filter_state', + render: 'state', + fields: {state: 'state'} + }, + { + title: '', + key: 'action', + // sort: false, + align: 'center', + width: 80, + render: 'make_action_btn', + fields: {id: 'id', state: 'state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_policy_header_created, + on_render_created: $app.on_table_policy_render_created, + on_cell_created: $app.on_table_policy_cell_created + }; + + $app.table_policy = $tp.create_table(table_policy_options); + cb_stack + .add($app.table_policy.load_data) + .add($app.table_policy.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_policy, { + name: 'search', + place_holder: '搜索:授权策略名称/描述/等等...' + }); + $tp.create_table_header_filter_state($app.table_policy, 'state', $app.obj_states, [TP_STATE_LOCKED]); + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_policy, 'table-auz-paging', + { + per_page: Cookies.get($app.page_id('ops_auz') + '_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_auz') + '_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_policy, 'table-auz-pagination'); + + //------------------------------- + // 对话框 + //------------------------------- + $app.dlg_edit_policy = $app.create_dlg_edit_policy(); + cb_stack.add($app.dlg_edit_policy.init); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_create_policy.click(function () { + // $app.dom.dlg_edit_user.modal(); + $app.dlg_edit_policy.show_add(); + }); + $app.dom.btn_refresh_policy.click(function () { + $app.table_policy.load_data(); + }); + $app.dom.select_all_policy.click(function () { + var _objects = $('#' + $app.table_policy.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + $app.dom.btn_lock.click($app.on_btn_lock_click); + $app.dom.btn_unlock.click($app.on_btn_unlock_click); + $app.dom.btn_remove.click($app.on_btn_remove_click); + + cb_stack.exec(); +}; + +$app.on_table_policy_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_host_all_selected(); + }); + } else if (col_key === 'rank') { + cell_obj.find('.reorder').mousedown(function (e) { + $app.on_drag_begin(e, row_id); + }); + } else if (col_key === 'action') { + // 绑定系统选择框事件 + cell_obj.find('[data-action]').click(function () { + var action = $(this).attr('data-action'); + if (action === 'edit') { + $app.dlg_edit_policy.show_edit(row_id); + // } else if (action === 'account') { + // $app.dlg_accounts.show(row_id); + } + }); + } else if (col_key === 'name') { + cell_obj.find('[data-action="edit-policy"]').click(function () { + $app.dlg_accounts.show(row_id); + }); + } +}; + +$app.on_drag_begin = function (e, row_id) { + $(document).bind('selectstart', function () { + return false; + }); + + $app.drag = { + dragging: false, + drag_row_id: '0', + hover_row_id: '0', + drag_index: -1, + hover_index: -1, + items: [], + + dom: {} + }; + + $app.drag.drag_row_id = row_id; + + var body = $('body'); + // create a drag-div + var policy = $app.table_policy.get_row(row_id); + + body.after($('')); + + $app.drag.dom.move_box = $('#tp-drag-move-box'); + $app.drag.move_box_height = $app.drag.dom.move_box.height(); + $app.drag.dom.move_box.css({left: e.pageX - 5, top: e.pageY - $app.drag.move_box_height / 2}).show(); + + // create a location-pointer + body.after($('')); + $app.drag.dom.loc_insert = $('#tp-drag-insert'); + + var tr_item = $('tr[data-row-id]'); + for (var i = 0; i < tr_item.length; ++i) { + var item = $(tr_item[i]); + var _row_id = item.attr('data-row-id'); + if (_row_id === row_id) + $app.drag.drag_index = i; + $app.drag.items.push([item.offset().top, item.offset().top + item.height(), _row_id]); + } + + $app.drag.dragging = true; +}; + +$app.on_dragging = function (e) { + if (!$app.drag.dragging) + return; + + $app.drag.dom.move_box.css({left: e.pageX - 5, top: e.pageY - $app.drag.move_box_height / 2}); + + // check which we are moving on. + $app.drag.hover_row_id = null; + for (var i = 0; i < $app.drag.items.length; ++i) { + if (e.pageY < $app.drag.items[i][0]) + continue; + if (e.pageY > $app.drag.items[i][1]) + continue; + if ($app.drag_row_id === $app.drag.items[i][2]) + continue; + + if ($app.drag.drag_row_id === $app.drag.items[i][2]) + break; + + var idx = -1; + if (e.pageY <= $app.drag.items[i][0] + ($app.drag.items[i][1] - $app.drag.items[i][0]) / 2) { + $app.drag.insert_before = true; + idx = i - 1; + } + else { + $app.drag.insert_before = false; + idx = i + 1; + } + + if (idx === $app.drag.drag_index) + break; + + $app.drag.hover_row_id = $app.drag.items[i][2]; + + break; + } + + if ($app.drag.hover_row_id === null) { + $app.drag.dom.loc_insert.hide(); + return; + } else { + $app.drag.dom.loc_insert.show(); + } + + var hover_obj = $('tr[data-row-id="' + $app.drag.hover_row_id + '"]'); + + + var x = hover_obj.offset().left - $app.drag.dom.loc_insert.width(); + var y = 0; + if ($app.drag.insert_before) + y = hover_obj.offset().top - $app.drag.dom.loc_insert.height() / 2; + else + y = hover_obj.offset().top + hover_obj.height() - $app.drag.dom.loc_insert.height() / 2; + $app.drag.dom.loc_insert.css({left: x, top: y}); +}; + +$app.on_drag_end = function (e) { + if (!$app.drag.dragging) + return; + + $app.drag.dom.move_box.remove(); + $app.drag.dom.loc_insert.remove(); + $(document).unbind('selectstart'); + $app.drag.dragging = false; + + if ($app.drag.hover_row_id === null) + return; + + var policy_drag = $app.table_policy.get_row($app.drag.drag_row_id); + var policy_target = $app.table_policy.get_row($app.drag.hover_row_id); + + var direct = -1; // 移动方向,-1=向前移动,1=向后移动 + var start_rank = 0, end_rank = 0; // 导致rank变化的范围: start_rank <= rank <= end_rank + var new_rank = 0;//policy_target.rank; // 被移动的条目的新rank + + if (policy_drag.rank > policy_target.rank) { + // 这是向前移动 + direct = 1; + end_rank = policy_drag.rank - 1; + if ($app.drag.insert_before) { + new_rank = policy_target.rank; + start_rank = policy_target.rank; + } + else { + new_rank = policy_target.rank + 1; + start_rank = policy_target.rank + 1; + } + } else { + // 这是向后移动 + direct = -1; + start_rank = policy_drag.rank + 1; + if ($app.drag.insert_before) { + new_rank = policy_target.rank - 1; + end_rank = policy_target.rank - 1; + } + else { + new_rank = policy_target.rank; + end_rank = policy_target.rank; + } + } + + $tp.ajax_post_json('/ops/policy/rank-reorder', { + pid: policy_drag.id, + new_rank: new_rank, + start_rank: start_rank, + end_rank: end_rank, + direct: direct + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('授权策略顺序调整成功!'); + $app.table_policy.load_data(); + } else { + $tp.notify_error('授权策略顺序调整失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,授权策略顺序调整失败!'); + } + ); +}; + +$app.check_host_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_policy.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.select_all_policy.prop('checked', true); + } else { + $app.dom.select_all_policy.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_policy_render_created = function (render) { + + // render.filter_search = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + // + // render.filter_state = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('state'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + + render.rank = function (row_id, fields) { + return ' ' + fields.rank + '' + }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.policy_info = function (row_id, fields) { + return '' + fields.name + '' + fields.desc + '' + }; + + render.state = function (row_id, fields) { + var _style, _state; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === fields.state) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + break; + } + } + if (i === $app.obj_states.length) { + _style = 'info'; + _state = ' 未知'; + } + + return '' + _state + '' + }; + + render.make_action_btn = function (row_id, fields) { + var ret = []; + ret.push('
'); + ret.push(' 编辑'); + // ret.push(' 禁用'); + // ret.push(' 删除'); + ret.push('
'); + return ret.join(''); + }; +}; + +$app.on_table_policy_header_created = function (header) { + // $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + // CALLBACK_STACK.create() + // .add(header._table_ctrl.load_data) + // .add(header._table_ctrl.reset_filters) + // .exec(); + // }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + // header._table_ctrl.get_filter_ctrl('search').on_created(); + // header._table_ctrl.get_filter_ctrl('state').on_created(); +}; + +$app.get_selected_policy = function (tbl) { + var users = []; + var _objs = $('#' + $app.table_policy.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + // _all_checked = false; + users.push(_row_data.id); + } + }); + return users; +}; + +$app.on_btn_lock_click = function () { +}; + +$app.on_btn_unlock_click = function () { +}; + +$app.on_btn_remove_click = function () { + var items = $app.get_selected_policy($app.table_policy); + if (items.length === 0) { + $tp.notify_error('请选择要删除的主机!'); + return; + } + + var _fn_sure = function (cb_stack, cb_args) { + $tp.ajax_post_json('/asset/remove-hosts', {hosts: items}, + function (ret) { + if (ret.code === TPE_OK) { + cb_stack.add($app.check_host_all_selected); + cb_stack.add($app.table_policy.load_data); + $tp.notify_success('删除主机操作成功!'); + } else { + $tp.notify_error('删除主机操作失败:' + tp_error_msg(ret.code, ret.message)); + } + + cb_stack.exec(); + }, + function () { + $tp.notify_error('网络故障,删除主机操作失败!'); + cb_stack.exec(); + } + ); + }; + + var cb_stack = CALLBACK_STACK.create(); + $tp.dlg_confirm(cb_stack, { + msg: '

注意:删除操作不可恢复!!

删除主机将同时删除与之相关的账号,并将主机和账号从所在分组中移除,同时删除所有相关授权!

如果您希望临时禁止登录指定主机,可将其“禁用”!

您确定要移除选定的' + items.length + '个主机吗?

', + fn_yes: _fn_sure + }); + +}; + +$app.create_dlg_edit_policy = function () { + var dlg = {}; + dlg.dom_id = 'dlg-edit-policy'; + dlg.field_id = -1; + dlg.field_name = ''; + dlg.field_desc = ''; + + dlg.dom = { + dialog: $('#' + dlg.dom_id), + dlg_title: $('#' + dlg.dom_id + ' [data-field="dlg-title"]'), + edit_name: $('#edit-name'), + edit_desc: $('#edit-desc'), + btn_save: $('#btn-edit-policy-save') + }; + + dlg.init = function (cb_stack) { + dlg.dom.btn_save.click(dlg.on_save); + cb_stack.exec(); + }; + + dlg.init_fields = function (policy) { + dlg.field_id = -1; + dlg.field_os_type = -1; + + if (_.isUndefined(policy)) { + dlg.dom.dlg_title.html('创建授权策略'); + + dlg.dom.edit_name.val(''); + dlg.dom.edit_desc.val(''); + } else { + dlg.field_id = policy.id; + dlg.dom.dlg_title.html('编辑授权策略:'); + dlg.dom.edit_name.val(policy.name); + dlg.dom.edit_desc.val(policy.desc); + } + }; + + dlg.show_add = function () { + dlg.init_fields(); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.show_edit = function (row_id) { + var host = $app.table_policy.get_row(row_id); + dlg.init_fields(host); + dlg.dom.dialog.modal({backdrop: 'static'}); + }; + + dlg.check_input = function () { + dlg.field_name = dlg.dom.edit_name.val(); + dlg.field_desc = dlg.dom.edit_desc.val(); + + if (dlg.field_name.length === 0) { + dlg.dom.edit_name.focus(); + $tp.notify_error('请设定授权策略名称!'); + return false; + } + + return true; + }; + + dlg.on_save = function () { + if (!dlg.check_input()) + return; + + var action = (dlg.field_id === -1) ? '添加' : '更新'; + + // 如果id为-1表示创建,否则表示更新 + $tp.ajax_post_json('/ops/policy/update', { + id: dlg.field_id, + name: dlg.field_name, + desc: dlg.field_desc + }, + function (ret) { + if (ret.code === TPE_OK) { + $tp.notify_success('授权策略' + action + '成功!'); + $app.table_policy.load_data(); + dlg.dom.dialog.modal('hide'); + } else { + $tp.notify_error('授权策略' + action + '失败:' + tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $tp.notify_error('网络故障,授权策略' + action + '失败!'); + } + ); + }; + + return dlg; +}; diff --git a/server/www/teleport/static/js/ops/remote-list.js b/server/www/teleport/static/js/ops/remote-list.js new file mode 100644 index 0000000..ddc88af --- /dev/null +++ b/server/www/teleport/static/js/ops/remote-list.js @@ -0,0 +1,434 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_host: $('#btn-refresh-host'), + btn_add_user: $('#btn-add-host'), + chkbox_host_select_all: $('#table-host-select-all') + }; + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 资产列表表格 + //------------------------------- + var table_host_options = { + dom_id: 'table-host', + data_source: { + type: 'ajax-post', + url: '/ops/get-remotes' + }, + column_default: {sort: false, align: 'left'}, + columns: [ + // { + // // title: '', + // title: '', + // key: 'chkbox', + // sort: false, + // width: 36, + // align: 'center', + // render: 'make_check_box', + // fields: {id: 'id'} + // }, + { + title: '主机', + key: 'host', + // sort: true, + // header_render: 'filter_search', + width: 300, + render: 'host_info', + fields: {ip: 'ip', router_ip: 'router_ip', router_port: 'router_port', h_name: 'h_name'} + }, + { + title: '远程账号', + key: 'account', + width: 100, + header_align: 'center', + cell_align: 'right', + render: 'account', + fields: {accs: 'accounts_', h_state: 'h_state', gh_state: 'gh_state'} + }, + { + title: '远程连接', + key: 'action', + render: 'action', + fields: {accs: 'accounts_', h_state: 'h_state', gh_state: 'gh_state'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_host_header_created, + on_render_created: $app.on_table_host_render_created, + on_cell_created: $app.on_table_host_cell_created + }; + + $app.table_host = $tp.create_table(table_host_options); + cb_stack + .add($app.table_host.load_data) + .add($app.table_host.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $tp.create_table_header_filter_search($app.table_host, { + name: 'search', + place_holder: '搜索:主机IP/名称/描述/资产编号/等等...' + }); + // $app.table_host_role_filter = $tp.create_table_filter_role($app.table_host, $app.role_list); + // 主机没有“临时锁定”状态,因此要排除掉 + // $tp.create_table_header_filter_state($app.table_host, 'state', $app.obj_states, [TP_STATE_LOCKED]); + + // 从cookie中读取用户分页限制的选择 + $tp.create_table_paging($app.table_host, 'table-host-paging', + { + per_page: Cookies.get($app.page_id('asset_host') + '_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('asset_host') + '_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_host, 'table-host-pagination'); + + //------------------------------- + // 对话框 + //------------------------------- + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_refresh_host.click(function () { + $app.table_host.load_data(); + }); + + cb_stack.exec(); +}; + +$app.on_table_host_cell_created = function (tbl, row_id, col_key, cell_obj) { + + // if (col_key === 'chkbox') { + // cell_obj.find('[data-check-box]').click(function () { + // $app.check_host_all_selected(); + // }); + // } else + if (col_key === 'action') { + // 绑定系统选择框事件 + cell_obj.find('[data-action]').click(function () { + var action = $(this).attr('data-action'); + var protocol_sub_type = $(this).attr('data-sub-protocol'); + var uni_id = $(this).attr('data-id'); + + console.log(uni_id, protocol_sub_type); + + if (action === 'rdp') { + $tp.notify_error('尚未实现!'); + } else if (action === 'ssh') { + $app.connect_remote(uni_id, TP_PROTOCOL_TYPE_SSH, protocol_sub_type); + } else if (action === 'telnet') { + $tp.notify_error('尚未实现!'); + } + }); + } +}; + +// $app.check_host_all_selected = function (cb_stack) { +// var _all_checked = true; +// var _objs = $('#' + $app.table_host.dom_id + ' tbody').find('[data-check-box]'); +// if (_objs.length === 0) { +// _all_checked = false; +// } else { +// $.each(_objs, function (i, _obj) { +// if (!$(_obj).is(':checked')) { +// _all_checked = false; +// return false; +// } +// }); +// } +// +// if (_all_checked) { +// $app.dom.chkbox_host_select_all.prop('checked', true); +// } else { +// $app.dom.chkbox_host_select_all.prop('checked', false); +// } +// +// if (cb_stack) +// cb_stack.exec(); +// }; + +$app.on_table_host_render_created = function (render) { + // render.filter_role = function (header, title, col) { + // var _ret = ['
']; + // _ret.push('
'); + // _ret.push('
' + title + '
'); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('role'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
'); + // + // return _ret.join(''); + // }; + // render.filter_os = function (header, title, col) { + // return ''; + // }; + + render.filter_state = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('state'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + render.filter_search = function (header, title, col) { + var _ret = ['
']; + _ret.push('
'); + _ret.push('
' + title + '
'); + + // 表格内嵌过滤器的DOM实体在这时生成 + var filter_ctrl = header._table_ctrl.get_filter_ctrl('search'); + _ret.push(filter_ctrl.render()); + + _ret.push('
'); + + return _ret.join(''); + }; + + // render.make_check_box = function (row_id, fields) { + // return ''; + // }; + // + render.host_info = function (row_id, fields) { + var title, sub_title; + + title = fields.h_name; + sub_title = fields.ip; + + if (title.length === 0) { + title = fields.ip; + } + + // title = fields.a_name + '@' + title; + + var desc = []; + // if (fields.desc.length > 0) { + // desc.push(fields.desc.replace(/\r/ig, "").replace(/\n/ig, "
")); + // } + if (fields.router_ip.length > 0) { + sub_title += ',由 ' + fields.router_ip + ':' + fields.router_port + ' 路由'; + } + + var ret = []; + // ret.push('
' + title + ''); + // if (desc.length > 0) { + // ret.push(''); + // } + + if (desc.length > 0) { + ret.push('
' + title + ''); + } else { + ret.push('
' + title + ''); + } + + ret.push('
' + sub_title + '
'); + return ret.join(''); + }; + + render.account = function (row_id, fields) { + var h = []; + for (var i = 0; i < fields.accs.length; ++i) { + var acc = fields.accs[i]; + h.push('
    '); + h.push('
  • ' + acc.a_name + '
  • '); + h.push('
'); + } + return h.join(''); + }; + render.action = function (row_id, fields) { + console.log(fields); + var h = []; + for (var i = 0; i < fields.accs.length; ++i) { + var acc = fields.accs[i]; + var act_btn = []; + + var disabled = ''; + if (acc.a_state !== TP_STATE_NORMAL) + disabled = '账号已禁用'; + if (disabled.length === 0 && (acc.policy_auth_type === TP_POLICY_AUTH_USER_gACC || acc.policy_auth_type === TP_POLICY_AUTH_gUSER_gACC) && acc.ga_state !== TP_STATE_NORMAL) + disabled = '账号所在组已禁用'; + if (disabled.length === 0 && fields.h_state !== TP_STATE_NORMAL) + disabled = '主机已禁用'; + if (disabled.length === 0 && (acc.policy_auth_type === TP_POLICY_AUTH_USER_gHOST || acc.policy_auth_type === TP_POLICY_AUTH_gUSER_gHOST) && fields.gh_state !== TP_STATE_NORMAL) + disabled = '主机所在组已禁用'; + + if (disabled.length > 0) { + act_btn.push('
  • '); + act_btn.push(' ' + disabled); + act_btn.push('
  • '); + } else { + if (acc.protocol_type === TP_PROTOCOL_TYPE_RDP) { + if ((acc.policy_.flag_rdp & TP_FLAG_RDP_DESKTOP) !== 0) { + act_btn.push('
  • '); + act_btn.push(''); + act_btn.push('
  • '); + } + } else if (acc.protocol_type === TP_PROTOCOL_TYPE_SSH) { + if ((acc.policy_.flag_ssh & TP_FLAG_SSH_SHELL) !== 0) { + act_btn.push('
  • '); + act_btn.push(''); + act_btn.push('
  • '); + } + + if ((acc.policy_.flag_ssh & TP_FLAG_SSH_SFTP) !== 0) { + act_btn.push('
  • '); + act_btn.push(''); + act_btn.push('
  • '); + } + } else if (acc.protocol_type === TP_PROTOCOL_TYPE_TELNET) { + act_btn.push('
  • '); + act_btn.push(''); + act_btn.push('
  • '); + } + } + + h.push('
      '); + h.push(act_btn.join('')); + h.push('
    '); + } + return h.join(''); + }; + + render.state = function (row_id, fields) { + console.log(fields); + var _prompt, _style, _state; + + if ((fields.h_state === TP_STATE_NORMAL || fields.h_state === 0) + && (fields.gh_state === TP_STATE_NORMAL || fields.gh_state === 0) + // && (fields.a_state === TP_STATE_NORMAL || fields.a_state === 0) + // && (fields.ga_state === TP_STATE_NORMAL || fields.ga_state === 0) + ) { + return '正常' + } + + var states = [ + {n: '主机', s: fields.h_state}, + {n: '主机组', s: fields.gh_state}, + // {n: '账号', s: fields.a_state}, + // {n: '账号组', s: fields.ga_state} + ]; + + for (var j = 0; j < states.length; ++j) { + if (states[j].s === TP_STATE_NORMAL) + continue; + + for (var i = 0; i < $app.obj_states.length; ++i) { + if ($app.obj_states[i].id === states[j].s) { + _style = $app.obj_states[i].style; + _state = $app.obj_states[i].name; + _prompt = states[j].n; + return '' + _prompt + '被' + _state + '' + } + } + } + + return ' 未知' + }; + + // render.make_host_action_btn = function (row_id, fields) { + // var h = []; + // h.push('
    '); + // h.push(''); + // h.push(''); + // h.push('
    '); + // + // return h.join(''); + // }; +}; + +$app.on_table_host_header_created = function (header) { + $app.dom.btn_table_host_reset_filter = $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]'); + $app.dom.btn_table_host_reset_filter.click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // TODO: 当过滤器不是默认值时,让“重置过滤器按钮”有呼吸效果,避免用户混淆 - 实验性质 + // var t1 = function(){ + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 1.0, function(){ + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 0.2, t1); + // }); + // }; + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 0.2, t1); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + header._table_ctrl.get_filter_ctrl('search').on_created(); + // header._table_ctrl.get_filter_ctrl('role').on_created(); + // header._table_ctrl.get_filter_ctrl('state').on_created(); +}; + +$app.get_selected_user = function (tbl) { + var users = []; + var _objs = $('#' + $app.table_host.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + // _all_checked = false; + users.push(_row_data.id); + } + }); + return users; +}; + +$app.connect_remote = function (uni_id, protocol_type, protocol_sub_type) { + $assist.do_teleport( + { + auth_id: uni_id, + protocol_type: protocol_type, + protocol_sub_type: protocol_sub_type + }, + function () { + // func_success + //$tp.notify_success('远程连接测试通过!'); + }, + function (code, message) { + if (code === TPE_NO_ASSIST) + $assist.alert_assist_not_found(); + else + $tp.notify_error('远程连接失败:' + tp_error_msg(code, message)); + } + ); +}; diff --git a/server/www/teleport/static/js/ops/session-list.js b/server/www/teleport/static/js/ops/session-list.js new file mode 100644 index 0000000..5033f5f --- /dev/null +++ b/server/www/teleport/static/js/ops/session-list.js @@ -0,0 +1,436 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_session: $('#btn-refresh-session'), + chkbox_session_select_all: $('#table-session-select-all'), + btn_kill_sessions: $('#btn-kill-sessions') + }; + + cb_stack.add($app.create_controls); + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 资产列表表格 + //------------------------------- + var table_session_options = { + dom_id: 'table-session', + data_source: { + type: 'ajax-post', + url: '/audit/get-records', + restrict: {'state': [TP_SESS_STAT_RUNNING, TP_SESS_STAT_STARTED]} + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + // title: '', + title: '', + key: 'chkbox', + sort: false, + width: 36, + align: 'center', + render: 'make_check_box', + fields: {id: 'id'} + }, + { + title: 'ID', + key: 'id', + fields: {id: 'id'} + }, + { + title: '用户', + key: 'user', + //sort: true, + //header_render: 'filter_search_host', + render: 'user', + fields: {user_name: 'user_name', user_surname: 'user_surname'} + }, + { + title: '来源', + key: 'client_ip', + //sort: true, + //header_render: 'filter_search_host', + //render: 'host_info', + fields: {client_ip: 'client_ip'} + }, + { + title: '远程连接', + key: 'remote', + //sort: true, + //header_render: 'filter_search_host', + render: 'remote', + fields: {account_name: 'account_name', real_host_ip: 'real_host_ip', host_ip: 'host_ip', host_port: 'host_port'} + }, + { + title: '远程协议', + key: 'protocol_type', + align: 'center', + width: 80, + // align: 'center', + // width: 36, + //sort: true + // header_render: 'filter_os', + render: 'protocol', + fields: {protocol_type: 'protocol_type', protocol_sub_type: 'protocol_sub_type'} + }, + { + title: '开始时间', + key: 'time_begin', + //sort: true, + //sort_asc: false, + render: 'time_begin', + fields: {time_begin: 'time_begin'} + }, + { + title: '耗时', + key: 'time_cost', + render: 'time_cost', + fields: {time_begin: 'time_begin', time_end: 'time_end', state: 'state'} + }, + { + title: "状态", + key: "state", + //sort: true, + width: 90, + align: 'center', + //header_render: 'filter_host_state', + render: 'state', + fields: {state: 'state'} + }, + { + title: '', + key: 'action', + //sort: false, + //align: 'center', + width: 160, + render: 'record_action', + fields: {id: 'id', state: 'state', time_end: 'time_end', protocol_sub_type: 'protocol_sub_type'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_session_header_created, + on_render_created: $app.on_table_session_render_created, + on_cell_created: $app.on_table_session_cell_created + }; + + $app.table_session = $tp.create_table(table_session_options); + cb_stack + .add($app.table_session.load_data) + .add($app.table_session.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $app.table_session_filter_search_host = $tp.create_table_header_filter_search($app.table_session, { + name: 'search_host', + place_holder: '搜索:主机IP/名称/描述/资产编号/等等...' + }); + // 从cookie中读取用户分页限制的选择 + var _per_page = Cookies.get($app.page_id('ops_session') + '_per_page'); + $app.table_session_paging = $tp.create_table_paging($app.table_session, 'table-session-paging', + { + per_page: _per_page, + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('ops_session') + '_per_page', per_page, {expires: 365}); + } + }); + $app.table_session_pagination = $tp.create_table_pagination($app.table_session, 'table-session-pagination'); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_refresh_session.click(function () { + $app.table_session.load_data(); + }); + $app.dom.chkbox_session_select_all.click(function () { + var _objects = $('#' + $app.table_session.dom_id + ' tbody').find('[data-check-box]'); + if ($(this).is(':checked')) { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', true); + }); + } else { + $.each(_objects, function (i, _obj) { + $(_obj).prop('checked', false); + }); + } + }); + $app.dom.btn_kill_sessions.click($app.on_btn_kill_sessions_click); + + cb_stack.exec(); +}; + +$app.on_table_session_cell_created = function (tbl, row_id, col_key, cell_obj) { + if (col_key === 'chkbox') { + cell_obj.find('[data-check-box]').click(function () { + $app.check_host_all_selected(); + }); + } + // else if (col_key === 'action') { + // // 绑定系统选择框事件 + // cell_obj.find('[data-action]').click(function () { + // var action = $(this).attr('data-action'); + // if (action === 'edit') { + // $app.dlg_edit_host.show_edit(row_id); + // } else if (action === 'account') { + // $app.dlg_accounts.show(row_id); + // } + // }); + // } else if (col_key === 'ip') { + // cell_obj.find('[data-toggle="popover"]').popover({trigger: 'hover'}); + // // } else if (col_key === 'account') { + // // cell_obj.find('[data-action="add-account"]').click(function () { + // // $app.dlg_accounts.show(row_id); + // // }); + // } else if (col_key === 'account_count') { + // cell_obj.find('[data-action="edit-account"]').click(function () { + // $app.dlg_accounts.show(row_id); + // }); + // } +}; + +$app.check_host_all_selected = function (cb_stack) { + var _all_checked = true; + var _objs = $('#' + $app.table_session.dom_id + ' tbody').find('[data-check-box]'); + if (_objs.length === 0) { + _all_checked = false; + } else { + $.each(_objs, function (i, _obj) { + if (!$(_obj).is(':checked')) { + _all_checked = false; + return false; + } + }); + } + + if (_all_checked) { + $app.dom.chkbox_session_select_all.prop('checked', true); + } else { + $app.dom.chkbox_session_select_all.prop('checked', false); + } + + if (cb_stack) + cb_stack.exec(); +}; + +$app.on_table_session_render_created = function (render) { + // render.filter_host_state = function (header, title, col) { + // var _ret = ['
    ']; + // _ret.push('
    '); + // _ret.push('
    ' + title + '
    '); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('host_state'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
    '); + // + // return _ret.join(''); + // }; + // + // render.filter_search_host = function (header, title, col) { + // var _ret = ['
    ']; + // _ret.push('
    '); + // _ret.push('
    ' + title + '
    '); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('search_host'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
    '); + // + // return _ret.join(''); + // }; + + render.make_check_box = function (row_id, fields) { + return ''; + }; + + render.user = function (row_id, fields) { + if (_.isNull(fields.user_surname) || fields.user_surname.length === 0 || fields.user_name === fields.user_surname) { + return fields.user_name; + } else { + return fields.user_name + ' (' + fields.user_surname + ')'; + } + }; + + render.remote = function (row_id, fields) { + if (fields.real_host_ip === fields.host_ip) { + return fields.account_name + '@' + fields.real_host_ip;// + ':' + fields.host_port; + } else { + return fields.account_name + '@' + fields.real_host_ip;// + '(' + fields.host_ip + ':' + fields.host_port + ')'; + } + }; + + // fields: {protocol_type: 'protocol_type', protocol_sub_type: 'protocol_sub_type'} + render.protocol = function (row_id, fields) { + switch (fields.protocol_sub_type) { + case 100: + return 'RDP'; + case 200: + return 'SSH'; + case 201: + return 'SFTP'; + case 300: + return 'TELNET'; + default: + return '未知'; + } + }; + + render.time_begin = function (row_id, fields) { + return tp_format_datetime(tp_utc2local(fields.time_begin), 'MM-dd HH:mm:ss'); + }; + + render.time_cost = function (row_id, fields) { + if (fields.time_end === 0) { + var _style = 'info'; + if (fields.state === TP_SESS_STAT_RUNNING) + _style = 'warning'; + else if (fields.state === TP_SESS_STAT_STARTED) + _style = 'primary'; + return ' ' + tp_second2str(tp_local2utc() - fields.time_begin) + ''; + } else { + return tp_second2str(fields.time_end - fields.time_begin); + } + }; + + render.state = function (row_id, fields) { + var msg = ''; + switch (fields.state) { + case TP_SESS_STAT_RUNNING: + return '正在连接'; + case TP_SESS_STAT_STARTED: + return '使用中'; + case TP_SESS_STAT_END: + return '已结束'; + case TP_SESS_STAT_ERR_AUTH_DENIED: + msg = '认证失败'; + break; + case TP_SESS_STAT_ERR_CONNECT: + msg = '连接失败'; + break; + case TP_SESS_STAT_ERR_BAD_SSH_KEY: + msg = '私钥错误'; + break; + case TP_SESS_STAT_ERR_START_INTERNAL: + case TP_SESS_STAT_ERR_INTERNAL: + msg = '内部错误'; + break; + case TP_SESS_STAT_ERR_UNSUPPORT_PROTOCOL: + msg = '协议不支持'; + break; + case TP_SESS_STAT_ERR_BAD_PKG: + case TP_SESS_STAT_ERR_START_BAD_PKG: + msg = '数据格式错误'; + break; + case TP_SESS_STAT_ERR_RESET: + case TP_SESS_STAT_ERR_START_RESET: + msg = '核心服务重置'; + break; + case TP_SESS_STAT_ERR_IO: + case TP_SESS_STAT_ERR_START_IO: + msg = '网络通讯故障'; + break; + case TP_SESS_STAT_ERR_SESSION: + msg = '无效会话'; + break; + default: + msg = '未知状态 [' + fields.state + ']'; + } + + return '' + msg + ''; + }; + + render.record_action = function (row_id, fields) { + var ret = []; + + if (fields.state >= TP_SESS_STAT_STARTED) { + if (fields.time_end === 0) { + ret.push(' 同步 '); + } else { + ret.push(' 播放 '); + } + if (fields.protocol_sub_type !== TP_PROTOCOL_TYPE_RDP_DESKTOP) { + ret.push(' 日志 '); + } + } + + return ret.join(''); + }; +}; + +$app.on_table_session_header_created = function (header) { + // $app.dom.btn_table_host_reset_filter = $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]'); + // $app.dom.btn_table_host_reset_filter.click(function () { + // CALLBACK_STACK.create() + // .add(header._table_ctrl.load_data) + // .add(header._table_ctrl.reset_filters) + // .exec(); + // }); + + // TODO: 当过滤器不是默认值时,让“重置过滤器按钮”有呼吸效果,避免用户混淆 - 实验性质 + // var t1 = function(){ + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 1.0, function(){ + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 0.2, t1); + // }); + // }; + // $app.dom.btn_table_host_reset_filter.fadeTo(1000, 0.2, t1); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + //header._table_ctrl.get_filter_ctrl('search_host').on_created(); + // header._table_ctrl.get_filter_ctrl('role').on_created(); + //header._table_ctrl.get_filter_ctrl('host_state').on_created(); +}; + +$app.get_selected_session = function (tbl) { + var records = []; + var _objs = $('#' + $app.table_session.dom_id + ' tbody tr td input[data-check-box]'); + $.each(_objs, function (i, _obj) { + if ($(_obj).is(':checked')) { + var _row_data = tbl.get_row(_obj); + records.push(_row_data.id); + } + }); + return records; +}; + +$app.on_btn_kill_sessions_click = function () { + // var records = $app.get_selected_session($app.table_session); + // if (records.length === 0) { + // $tp.notify_error('请选择要删除的会话记录!'); + // return; + // } + // + // var _fn_sure = function (cb_stack, cb_args) { + // $tp.ajax_post_json('/user/remove-user', {users: users}, + // function (ret) { + // if (ret.code === TPE_OK) { + // cb_stack.add($app.check_host_all_selected); + // cb_stack.add($app.table_session.load_data); + // $tp.notify_success('删除用户账号操作成功!'); + // } else { + // $tp.notify_error('删除用户账号操作失败:' + tp_error_msg(ret.code, ret.message)); + // } + // + // cb_stack.exec(); + // }, + // function () { + // $tp.notify_error('网络故障,删除用户账号操作失败!'); + // cb_stack.exec(); + // } + // ); + // }; + // + // var cb_stack = CALLBACK_STACK.create(); + // $tp.dlg_confirm(cb_stack, { + // msg: '

    注意:删除操作不可恢复!!

    删除用户账号将同时将其从所在用户组中移除,并且删除所有分配给此用户的授权!

    如果您希望禁止某个用户登录本系统,可对其进行“禁用”操作!

    您确定要移除所有选定的 ' + user_list.length + '个 用户账号吗?

    ', + // fn_yes: _fn_sure + // }); +}; diff --git a/server/www/teleport/static/js/system/config.js b/server/www/teleport/static/js/system/config.js new file mode 100644 index 0000000..01ac1b8 --- /dev/null +++ b/server/www/teleport/static/js/system/config.js @@ -0,0 +1,215 @@ +"use strict"; + +$app.on_init = function () { + $app.dom = { + // btn_reset_oath_code: $('#btn-reset-oath-code'), + // btn_verify_oath_code: $('#btn-verify-oath-code'), + // btn_verify_oath_code_and_save: $('#btn-verify-oath-and-save'), + // btn_modify_password: $('#btn-modify-password'), + // btn_toggle_oath_download: $('#toggle-oath-download'), + // + // oath_app_download_box: $('#oath-app-download-box'), + // + // input_current_password: $('#current-password'), + // input_new_password: $('#new-password-1'), + // input_new_password_confirm: $('#new-password-2'), + // input_oath_code: $('#oath-code'), + // input_oath_code_verify: $('#oath-code-verify'), + // + // dlg_reset_oath_code: $('#dialog-reset-oath-code'), + // oath_secret_image: $('#oath-secret-qrcode'), + // tmp_oath_secret: $('#tmp-oath-secret'), + + // 邮件系统设置 + mail: { + smtp_server: $('#smtp-server-info'), + smtp_port: $('#smtp-port-info'), + smtp_ssl: $('#smtp-ssl-info'), + smtp_sender: $('#smtp-sender-info'), + btn_edit_mail_config: $('#btn-edit-mail-config'), + + dlg_edit_mail_config: $('#dlg-edit-mail-config'), + edit_smtp_server: $('#edit-smtp-server'), + edit_smtp_port: $('#edit-smtp-port'), + edit_smtp_ssl: $('#edit-smtp-ssl'), + edit_smtp_sender: $('#edit-smtp-sender'), + edit_smtp_password: $('#edit-smtp-password'), + edit_smtp_test_recipient: $('#edit-smtp-test-recipient'), + btn_send_test_mail: $('#btn-send-test-mail'), + msg_send_test_mail: $('#msg-send-test-mail'), + btn_save_mail_config: $('#btn-save-mail-config') + } + }; + +// $app.dom.tmp_oath_secret.text($app.page_options.tmp_oath_secret); + + //========================================= + // 邮件系统配置相关 + //========================================= + $app.update_mail_info = function (smtp_info) { + var not_set = '未设置'; + if (0 === smtp_info.server.length) + $app.dom.mail.smtp_server.html(not_set); + else + $app.dom.mail.smtp_server.html(smtp_info.server); + + if (-1 === smtp_info.port) + $app.dom.mail.smtp_port.html(not_set); + else + $app.dom.mail.smtp_port.html(smtp_info.port); + + if (-1 === smtp_info.ssl) + $app.dom.mail.smtp_ssl.html(not_set); + else if (0 === smtp_info.ssl) + $app.dom.mail.smtp_ssl.html('否'); + else + $app.dom.mail.smtp_ssl.html('是'); + + if (0 === smtp_info.sender.length) + $app.dom.mail.smtp_sender.html(not_set); + else + $app.dom.mail.smtp_sender.html(smtp_info.sender); + }; + + $app.update_mail_info($app.options.sys_cfg.smtp); + + $app.dom.mail.btn_edit_mail_config.click(function () { + var smtp_info = $app.options.sys_cfg.smtp; + + $app.dom.mail.edit_smtp_server.val(smtp_info.server); + + if(smtp_info.port === -1) + $app.dom.mail.edit_smtp_port.val(''); + else + $app.dom.mail.edit_smtp_port.val(smtp_info.port); + + if (-1 === smtp_info.ssl || 0 === smtp_info.ssl) + $app.dom.mail.edit_smtp_ssl.removeClass('tp-selected'); + else + $app.dom.mail.edit_smtp_ssl.removeClass('tp-selected').addClass('tp-selected'); + + $app.dom.mail.edit_smtp_sender.val(smtp_info.sender); + $app.dom.mail.edit_smtp_password.val(''); + + $app.dom.mail.dlg_edit_mail_config.modal(); + }); + // $app.dom.mail.btn_edit_mail_config.trigger('click'); + $app.dom.mail.edit_smtp_ssl.click(function () { + if ($app.dom.mail.edit_smtp_ssl.hasClass('tp-selected')) + $app.dom.mail.edit_smtp_ssl.removeClass('tp-selected'); + else + $app.dom.mail.edit_smtp_ssl.addClass('tp-selected'); + }); + $app.dom.mail.btn_send_test_mail.click($app._on_btn_send_test_mail); + $app.dom.mail.btn_save_mail_config.click($app._on_btn_save_mail_config); +}; + +$app._edit_mail_config_check = function (_server, _port, _sender, _password) { + if(_server.length === 0) { + $app.dom.mail.edit_smtp_server.focus(); + $tp.notify_error('请填写SMTP服务器地址!'); + return false; + } + if(_port.length === 0) { + $app.dom.mail.edit_smtp_port.focus(); + $tp.notify_error('请填写SMTP服务器端口!'); + return false; + } + if(_sender.length === 0) { + $app.dom.mail.edit_smtp_sender.focus(); + $tp.notify_error('请填写发件人邮箱!'); + return false; + } + if(_password.length === 0) { + $app.dom.mail.edit_smtp_password.focus(); + $tp.notify_error('请填写发件人邮箱密码!'); + return false; + } + + return true; +}; + +$app._on_btn_send_test_mail = function () { + var _server = $app.dom.mail.edit_smtp_server.val(); + var _port = $app.dom.mail.edit_smtp_port.val(); + var _sender = $app.dom.mail.edit_smtp_sender.val(); + var _password = $app.dom.mail.edit_smtp_password.val(); + var _recipient = $app.dom.mail.edit_smtp_test_recipient.val(); + var _ssl = ($app.dom.mail.edit_smtp_ssl.hasClass('tp-selected')) ? 1 : 0; + + if(!$app._edit_mail_config_check(_server, _port, _sender, _password)) + return; + if(_recipient.length === 0) { + $app.dom.mail.edit_smtp_test_recipient.focus(); + $tp.notify_error('请填写测试收件人邮箱!'); + return; + } + + $app.dom.mail.btn_send_test_mail.attr('disabled', 'disabled'); + + $tp.ajax_post_json('/system/send-test-mail', + { + smtp_server: _server, + smtp_port: _port, + smtp_ssl: _ssl, + smtp_sender: _sender, + smtp_password: _password, + smtp_recipient: _recipient + }, + function (ret) { + $app.dom.mail.btn_send_test_mail.removeAttr('disabled'); + if (ret.code === TPE_OK) { + $app.dom.mail.msg_send_test_mail.slideDown('fast'); + } else { + $tp.notify_error(tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $app.dom.mail.btn_send_test_mail.removeAttr('disabled'); + $tp.notify_error('网路故障,无法连接到服务器!'); + }, + 15000 + ); +}; + +$app._on_btn_save_mail_config = function () { + var _server = $app.dom.mail.edit_smtp_server.val(); + var _port = $app.dom.mail.edit_smtp_port.val(); + var _sender = $app.dom.mail.edit_smtp_sender.val(); + var _password = $app.dom.mail.edit_smtp_password.val(); + var _ssl = ($app.dom.mail.edit_smtp_ssl.hasClass('tp-selected')) ? 1 : 0; + + if(!$app._edit_mail_config_check(_server, _port, _sender, _password)) + return; + + $app.dom.mail.btn_save_mail_config.attr('disabled', 'disabled'); + $tp.ajax_post_json('/system/save-mail-config', + { + smtp_server: _server, + smtp_port: _port, + smtp_ssl: _ssl, + smtp_sender: _sender, + smtp_password: _password + }, + function (ret) { + $app.dom.mail.btn_save_mail_config.removeAttr('disabled'); + if (ret.code === TPE_OK) { + $app.dom.mail.edit_smtp_password.val(''); + // 更新一下界面上显示的配置信息 + $app.options.sys_cfg.smtp.server = _server; + $app.options.sys_cfg.smtp.port = _port; + $app.options.sys_cfg.smtp.ssl = _ssl; + $app.options.sys_cfg.smtp.sender = _sender; + $app.update_mail_info($app.options.sys_cfg.smtp); + + $app.dom.mail.dlg_edit_mail_config.modal('hide'); + } else { + $tp.notify_error(tp_error_msg(ret.code, ret.message)); + } + }, + function () { + $app.dom.mail.btn_save_mail_config.removeAttr('disabled'); + $tp.notify_error('网路故障,无法连接到服务器!'); + } + ); +}; diff --git a/server/www/teleport/static/js/system/role.js b/server/www/teleport/static/js/system/role.js new file mode 100644 index 0000000..35625c0 --- /dev/null +++ b/server/www/teleport/static/js/system/role.js @@ -0,0 +1,207 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.last_role_id = 0; + $app.selected_role_id = 0; + $app.edit_mode = false; + + $app.dom = { + role_list: $('#role-list'), + btn_edit_role: $('#btn-edit-role'), + btn_del_role: $('#btn-delete-role'), + btn_save_role: $('#btn-save-role'), + btn_cancel_edit_role: $('#btn-cancel-edit-role'), + // btn_verify_oath_code: $('#btn-verify-oath-code'), + // btn_verify_oath_code_and_save: $('#btn-verify-oath-and-save'), + // btn_modify_password: $('#btn-modify-password'), + // btn_toggle_oath_download: $('#toggle-oath-download'), + // + // oath_app_download_box: $('#oath-app-download-box'), + // + // input_role_name: $('#input-role-name'), + // input_new_password: $('#new-password-1'), + // input_new_password_confirm: $('#new-password-2'), + // input_oath_code: $('#oath-code'), + // input_oath_code_verify: $('#oath-code-verify'), + // + // dlg_reset_oath_code: $('#dialog-reset-oath-code'), + // oath_secret_image: $('#oath-secret-qrcode'), + // tmp_oath_secret: $('#tmp-oath-secret'), + + role_info: $('#role-info'), + privilege_list: $('#privilege-list') + }; + + $app.dom.role = { + label_area: $('#label-role-name-area'), + input_area: $('#input-role-name-area'), + save_area: $('#save-area'), + label_role_name: $app.dom.role_info.find('span[data-role-name]'), + input_role_name: $('#input-role-name') + }; + + cb_stack + .add($app.create_controls) + .add($app.load_role_list); + + cb_stack.exec(); +}; + +$app.create_controls = function () { + var nodes = []; + var selected_role_id = 0; + for (var i = 0; i < $app.role_list.length; ++i) { + nodes.push('
  • '); + nodes.push($app.role_list[i].name); + nodes.push('
  • '); + } + // 增加一个“创建角色”的项 + nodes.push('
  • 创建角色
  • '); + + $app.dom.role_list.append($(nodes.join(''))); + $app.dom.btn_create_role = $('#btn-create-role'); + + + var privileges = [ + { + t: '资产', i: [ + {n: '主机信息创建/编辑', p: TP_PRIVILEGE_ASSET_CREATE}, + {n: '删除主机信息', p: TP_PRIVILEGE_ASSET_DELETE}, + {n: '主机禁用/解禁', p: TP_PRIVILEGE_ASSET_LOCK}, + {n: '主机分组管理', p: TP_PRIVILEGE_ASSET_GROUP}, + {n: '主机账号管理', p: TP_PRIVILEGE_ACCOUNT}, + {n: '主机账号分组管理', p: TP_PRIVILEGE_ACCOUNT_GROUP}] + }, + { + t: '用户', i: [ + {n: '登录WEB系统', p: TP_PRIVILEGE_LOGIN_WEB}, + {n: '用户创建/编辑', p: TP_PRIVILEGE_USER_CREATE}, + {n: '删除用户', p: TP_PRIVILEGE_USER_DELETE}, + {n: '用户禁用/解禁', p: TP_PRIVILEGE_USER_LOCK}, + {n: '用户分组管理', p: TP_PRIVILEGE_USER_GROUP}] + }, + { + t: '运维', i: [ + {n: '远程主机运维', p: TP_PRIVILEGE_OPS}, + {n: '运维授权管理', p: TP_PRIVILEGE_OPS_AUZ}, + {n: '查看在线会话', p: TP_PRIVILEGE_SESSION_VIEW}, + {n: '阻断在线会话', p: TP_PRIVILEGE_SESSION_BLOCK}] + }, + { + t: '审计', i: [ + {n: '审计(查看历史会话)', p: TP_PRIVILEGE_AUDIT_OPS_HISTORY}, + {n: '审计授权管理', p: TP_PRIVILEGE_AUDIT_AUZ}] + }, + { + t: '系统', i: [ + {n: '角色管理', p: TP_PRIVILEGE_SYS_ROLE}, + {n: '系统配置与维护', p: TP_PRIVILEGE_SYS_CONFIG}, + {n: '历史会话管理', p: TP_PRIVILEGE_SYS_OPS_HISTORY}, + {n: '系统日志管理', p: TP_PRIVILEGE_SYS_LOG}] + } + ]; + nodes = []; + $.each(privileges, function (_, ps) { + nodes.push('
    ' + ps.t + '
      '); + $.each(ps.i, function (_, p) { + nodes.push('
    • ' + p.n + '
    • '); + }); + nodes.push('
    '); + }); + $app.dom.privilege_list.append($(nodes.join(''))); + + $app.show_role(selected_role_id, false); + + //=================================================== + // 绑定事件 + //=================================================== + $app.dom.role_list.find('[data-role-id]').click(function () { + var obj = $(this); + if (obj.hasClass('active')) { + return; + } + var role_id = parseInt(obj.attr('data-role-id')); + $app.show_role(role_id, false); + }); + $app.dom.privilege_list.find('[data-privilege]').click(function () { + var obj = $(this); + if (obj.hasClass('enabled')) { + obj.removeClass('enabled'); + } else { + obj.addClass('enabled'); + } + + if (!$app.edit_mode) { + $app.edit_mode = true; + $app.dom.role.save_area.slideDown(); + } + }); + + $app.dom.btn_edit_role.click(function () { + $app.show_role($app.selected_role_id, true); + }); + $app.dom.btn_cancel_edit_role.click(function () { + if ($app.selected_role_id !== 0) + $app.show_role($app.selected_role_id, false); + else + $app.show_role($app.last_role_id, false); + }); +}; + +$app.show_role = function (role_id, edit_mode) { + var edit = edit_mode || false; + var role = null; + + if (role_id === 0) { + role = {id: 0, name: '', privilege: 0}; + edit = true; + } else { + for (var i = 0; i < $app.role_list.length; ++i) { + if ($app.role_list[i].id === role_id) { + role = $app.role_list[i]; + break; + } + } + + if (_.isNull(role)) + return; + } + + $app.dom.role_list.find('[data-role-id="' + $app.selected_role_id + '"]').removeClass('active'); + $app.dom.role_list.find('[data-role-id="' + role_id + '"]').addClass('active'); + + $app.dom.role.label_role_name.text(role.name); + $app.dom.role.input_role_name.val(role.name); + if (edit) { + $app.edit_mode = true; + $app.dom.role.label_area.hide(); + $app.dom.role.input_area.show(); + $app.dom.role.input_role_name.focus(); + $app.dom.role.save_area.slideDown(); + } else { + $app.edit_mode = false; + $app.dom.role.input_area.hide(); + $app.dom.role.label_area.show(); + $app.dom.role.save_area.slideUp(); + } + + var privilege_objs = $('#role-info').find('[data-privilege]'); + $.each(privilege_objs, function (i, j) { + var obj = $(j); + var p = parseInt(obj.attr('data-privilege')); + obj.removeClass('enabled'); + if (p & role.privilege) { + obj.addClass('enabled'); + } + }); + + $app.selected_role_id = role_id; + if (role_id !== 0) + $app.last_role_id = role_id; +}; diff --git a/server/www/teleport/static/js/system/syslog.js b/server/www/teleport/static/js/system/syslog.js new file mode 100644 index 0000000..90efb48 --- /dev/null +++ b/server/www/teleport/static/js/system/syslog.js @@ -0,0 +1,225 @@ +"use strict"; + +$app.on_init = function (cb_stack) { + $app.dom = { + btn_refresh_log: $('#btn-refresh-log'), + }; + + cb_stack.add($app.create_controls); + cb_stack.exec(); +}; + +//=================================== +// 创建页面控件对象 +//=================================== +$app.create_controls = function (cb_stack) { + + //------------------------------- + // 日志表格 + //------------------------------- + var table_log_options = { + dom_id: 'table-log', + data_source: { + type: 'ajax-post', + url: '/system/get-logs' + }, + column_default: {sort: false, align: 'left'}, + columns: [ + { + title: '时间', + key: 'log_time', + sort: true, + sort_asc: false, + width: 160, + //header_render: 'filter_search_host', + render: 'log_time', + fields: {log_time: 'log_time'} + }, + // { + // title: 'ID', + // key: 'id', + // width:80, + // fields: {id: 'id'} + // }, + { + title: '用户', + key: 'user', + width: 160, + //sort: true, + //header_render: 'filter_search_host', + render: 'user', + fields: {user_name: 'user_name', user_surname: 'user_surname'} + }, + { + title: '来源', + key: 'client_ip', + width: 100, + //sort: true, + //header_render: 'filter_search_host', + //render: 'host_info', + fields: {client_ip: 'client_ip'} + }, + { + title: '操作', + key: 'remote', + //sort: true, + //header_render: 'filter_search_host', + render: 'message', + fields: {code: 'code', message: 'message'} + } + ], + + // 重载回调函数 + on_header_created: $app.on_table_host_header_created, + on_render_created: $app.on_table_host_render_created + // on_cell_created: $app.on_table_host_cell_created + }; + + $app.table_log = $tp.create_table(table_log_options); + cb_stack + .add($app.table_log.load_data) + .add($app.table_log.init); + + //------------------------------- + // 用户列表相关过滤器 + //------------------------------- + $app.table_log_filter_search_host = $tp.create_table_header_filter_search($app.table_log, { + name: 'search_host', + place_holder: '搜索:主机IP/名称/描述/资产编号/等等...' + }); + // $app.table_log_role_filter = $tp.create_table_filter_role($app.table_log, $app.role_list); + //$tp.create_table_header_filter_state($app.table_log, 'state', $app.obj_states, [TP_STATE_LOCKED]); + + $app.table_log_paging = $tp.create_table_paging($app.table_log, 'table-log-paging', + { + per_page: Cookies.get($app.page_id('system_log') + '_per_page'), + on_per_page_changed: function (per_page) { + Cookies.set($app.page_id('system_log') + '_per_page', per_page, {expires: 365}); + } + }); + $tp.create_table_pagination($app.table_log, 'table-log-pagination'); + + //------------------------------- + // 页面控件事件绑定 + //------------------------------- + $app.dom.btn_refresh_log.click(function () { + $app.table_log.load_data(); + }); + + cb_stack.exec(); +}; + +// $app.on_table_host_cell_created = function (tbl, row_id, col_key, cell_obj) { +// if (col_key === 'chkbox') { +// cell_obj.find('[data-check-box]').click(function () { +// $app.check_host_all_selected(); +// }); +// } else if (col_key === 'action') { +// // 绑定系统选择框事件 +// cell_obj.find('[data-action]').click(function () { +// var action = $(this).attr('data-action'); +// if (action === 'edit') { +// $app.dlg_edit_host.show_edit(row_id); +// } else if (action === 'account') { +// $app.dlg_accounts.show(row_id); +// } +// }); +// } else if (col_key === 'ip') { +// cell_obj.find('[data-toggle="popover"]').popover({trigger: 'hover'}); +// // } else if (col_key === 'account') { +// // cell_obj.find('[data-action="add-account"]').click(function () { +// // $app.dlg_accounts.show(row_id); +// // }); +// } else if (col_key === 'account_count') { +// cell_obj.find('[data-action="edit-account"]').click(function () { +// $app.dlg_accounts.show(row_id); +// }); +// } +// }; + +$app.on_table_host_render_created = function (render) { + // render.filter_role = function (header, title, col) { + // var _ret = ['
    ']; + // _ret.push('
    '); + // _ret.push('
    ' + title + '
    '); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('role'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
    '); + // + // return _ret.join(''); + // }; + // render.filter_os = function (header, title, col) { + // return ''; + // }; + + // render.filter_host_state = function (header, title, col) { + // var _ret = ['
    ']; + // _ret.push('
    '); + // _ret.push('
    ' + title + '
    '); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('host_state'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
    '); + // + // return _ret.join(''); + // }; + + // render.filter_search_host = function (header, title, col) { + // var _ret = ['
    ']; + // _ret.push('
    '); + // _ret.push('
    ' + title + '
    '); + // + // // 表格内嵌过滤器的DOM实体在这时生成 + // var filter_ctrl = header._table_ctrl.get_filter_ctrl('search_host'); + // _ret.push(filter_ctrl.render()); + // + // _ret.push('
    '); + // + // return _ret.join(''); + // }; + + render.log_time = function (row_id, fields) { + return tp_format_datetime(tp_utc2local(fields.log_time)); + }; + + render.user = function (row, fields) { + if (_.isNull(fields.user_surname) || fields.user_surname.length === 0 || fields.user_name === fields.user_surname) { + return fields.user_name; + } else { + return fields.user_name + ' (' + fields.user_surname + ')'; + } + }; + + render.message = function (row_id, fields) { + if(fields.code === TPE_OK) + return fields.message; + + return ''+fields.message+''; + }; + + // render.os = function (row_id, fields) { + // return fields.os; + // }; + // + +}; + +$app.on_table_host_header_created = function (header) { + $('#' + header._table_ctrl.dom_id + ' a[data-reset-filter]').click(function () { + CALLBACK_STACK.create() + .add(header._table_ctrl.load_data) + .add(header._table_ctrl.reset_filters) + .exec(); + }); + + // 表格内嵌过滤器的事件绑定在这时进行(也可以延期到整个表格创建完成时进行) + // header._table_ctrl.get_filter_ctrl('search_host').on_created(); + // // header._table_ctrl.get_filter_ctrl('role').on_created(); + // header._table_ctrl.get_filter_ctrl('host_state').on_created(); +}; + diff --git a/server/www/teleport/static/js/teleport.js b/server/www/teleport/static/js/teleport.js new file mode 100644 index 0000000..ba33d5c --- /dev/null +++ b/server/www/teleport/static/js/teleport.js @@ -0,0 +1,280 @@ +"use strict"; + +// 构造一个回调函数栈,遵循先进后出的原则进行顺序调用。 +var CALLBACK_STACK = { + create: function () { + var self = {}; + + self.cb_stack = []; + + // 加入一个函数到栈上等待被调用 + self.add = function (cb_func, cb_args) { + if (!_.isFunction(cb_func)) { + console.error('need callable function.'); + } + cb_args = cb_args || {}; + self.cb_stack.push({func: cb_func, args: cb_args}); + + return self; // 支持链式调用 + }; + + // 加入一个函数到栈上等待被调用,但是该函数被调用前会等待指定时间(非阻塞式等待) + self.add_delay = function (delay_ms, cb_func, cb_args) { + // 先将要调用的函数入栈 + self.add(cb_func, cb_args); + // 然后加一个定时器来做等待 + self.add(function (cb_stack, cb_args) { + var _delay_ms = cb_args.delay_ms || 500; + setTimeout(function () { + cb_stack.exec(); + }, _delay_ms); + }, {delay_ms: delay_ms}); + + return self; // 支持链式调用 + }; + + self.exec = function (ex_args) { + if (self.cb_stack.length > 0) { + var cb = self.cb_stack.pop(); + var ex_ = ex_args || {}; + cb.func(self, cb.args, ex_); + } + }; + + self.pop = function () { + if (self.cb_stack.length === 0) { + return null; + } else { + return self.cb_stack.pop(); + } + }; + + // self.check_error = function() { + // if (self.cb_stack.length > 0) { + // console.error('callback stack have '+ self.cb_stack.length + ' function not called.'); + // } + // }; + + return self; + } +}; + +// Teleport核心JS +var $tp = { + server_host: window.location.hostname || '', + server_port: (window.location.port === "") ? 80 : parseInt(window.location.port), + + // Teleport页面应用对象,放置页面自身特有的属性和函数 + app: { + options: {}, + on_init: function (cb_stack) { + cb_stack.exec(); + } // should be overwrite. + } +}; + +$tp.init = function () { + $app.obj_states = [ + {id: TP_STATE_NORMAL, name: '正常', style: 'success'}, + {id: TP_STATE_DISABLED, name: '禁用', style: 'danger'}, + {id: TP_STATE_LOCKED, name: '临时锁定', style: 'warning'} + ]; + + // $app.user_states = [ + // {id: 1, name: '正常', style: 'success'}, + // {id: 2, name: '临时锁定', style: 'warning'}, + // {id: 3, name: '禁用', style: 'danger'} + // ]; + // + // $app.acc_states = [ + // {id: 1, name: '正常', style: 'success'}, + // {id: 2, name: '临时锁定', style: 'warning'}, + // {id: 3, name: '禁用', style: 'danger'} + // ]; + // + // $app.host_states = [ + // {id: 1, name: '正常', style: 'success'}, + // {id: 2, name: '禁用', style: 'danger'} + // ]; + // + // $app.policy_states = [ + // {id: 1, name: '正常', style: 'success'}, + // {id: 2, name: '禁用', style: 'danger'} + // ]; + + $app.host_types = [ + {id: 1, name: '物理主机', style: 'success'}, + {id: 2, name: '虚拟主机', style: 'info'}, + {id: 3, name: '路由器', style: 'info'}, + {id: 4, name: '其它', style: 'default'} + ]; + + $app.host_os_type = [ + {id: 1, name: 'Windows', style: 'success'}, + {id: 2, name: 'Linux/Unix', style: 'info'} + // {id: 3, name: '其它', style: 'info'} + ]; + + var cs = CALLBACK_STACK.create(); + cs.add($tp.app.init); + + if(!_.isUndefined($tp.assist)) { + cs.add($tp.assist.init); + } + + cs.exec(); +}; + +$tp.logout = function () { + window.location.href = '/auth/logout'; +}; + +$tp.ajax_post_json = function (url, args, success, error, timeout) { + var timeout_ = timeout || 3000; + var _args = JSON.stringify(args); + + $.ajax({ + url: url, + type: 'POST', + timeout: timeout_, + data: {_xsrf: tp_get_cookie('_xsrf'), args: _args}, + dataType: 'json', + success: success, + error: error + }); +}; + +// $app 是 $tp.app 的别名,方便使用。 +var $app = $tp.app; + +$app.add_options = function (options) { + _.extend($app.options, options); +}; + +$app.init = function (cb_stack) { + cb_stack.add($app.on_init); + + if (!_.isUndefined($app.sidebar_menu)) { + cb_stack.add($app.sidebar_menu.init_active); + } + + cb_stack.exec(); +}; + +$app.active_menu = function (menu_id) { + if (_.isUndefined($app._make_sidebar_menu)) { + $app._make_sidebar_menu = function (menu_id) { + var _menu = {}; + _menu.active_menu_id = menu_id; + _menu.current_expand_menu_id = ''; + + _menu.toggle_submenu = function (id_) { + var obj = $('#sidebar_menu_' + id_); + if (obj.hasClass('expand')) { + obj.removeClass('expand'); + $('#sidebar_submenu_' + id_).slideUp(300); + } + else { + obj.addClass('expand'); + $('#sidebar_submenu_' + id_).slideDown(300); + } + + if (_menu.current_expand_menu_id !== id_) { + if (_menu.current_expand_menu_id.length > 0) { + $('#sidebar_menu_' + _menu.current_expand_menu_id).removeClass('expand'); + $('#sidebar_submenu_' + _menu.current_expand_menu_id).slideUp(300); + } + } + + _menu.current_expand_menu_id = id_; + }; + + _menu.init_active = function (cb_stack) { + if (_menu.active_menu_id.length === 1) { + $('#sidebar_menu_' + _menu.active_menu_id[0]).addClass('active'); + $('#sidebar_menu_' + _menu.active_menu_id[0] + ' a').addClass('active'); + } else if (_menu.active_menu_id.length === 2) { + $('#sidebar_menu_' + _menu.active_menu_id[0]).addClass('active expand'); + $('#sidebar_menu_' + _menu.active_menu_id[0] + ' a').addClass('selected'); + $('#sidebar_submenu_' + _menu.active_menu_id[0]).show(); + $('#sidebar_menu_' + _menu.active_menu_id[0] + '_' + _menu.active_menu_id[1] + ' a').addClass('active'); + } + _menu.current_expand_menu_id = _menu.active_menu_id[0]; + + cb_stack.exec(); + }; + + return _menu; + }; + } + + $app.sidebar_menu = $app._make_sidebar_menu(menu_id); + + // 绑定侧边栏导航栏的退出按钮点击事件 + $('#btn-sidebar-menu-logout').click($tp.logout); + + $('#page-sidebar').mCustomScrollbar({ + axis: "y", + theme: 'minimal' + }); +}; + +$app.has_sidebar = function () { + return !_.isUndefined($app.sidebar_menu); +}; + +$app.page_id = function (default_value) { + if (!$app.has_sidebar()) + return default_value; + return $app.sidebar_menu.active_menu_id.join('_'); +}; + +$app.load_role_list = function (cb_stack) { + $tp.ajax_post_json('/user/get-role-list', {}, + function (ret) { + if (ret.code === TPE_OK) { + $app.role_list = ret.data; + } else { + console.error('无法获取角色列表:' + tp_error_msg(ret.code, ret.message)); + } + cb_stack.exec(); + }, + function () { + console.error('网络故障,无法获取角色列表!'); + cb_stack.exec(); + } + ); +}; + +$app.id2name = function(_list, _id) { + if (_.isUndefined(_list)) { + console.error('_list not loaded.'); + return undefined; + } + + for (var i = 0; i < _list.length; ++i) { + if (_list[i].id === _id) + return _list[i].name; + } + + return undefined; +}; + +$app.role_id2name = function (id) { + if (_.isUndefined($app.role_list)) { + console.error('role list not loaded, call load_role_list() first.'); + return undefined; + } + + for (var i = 0; i < $app.role_list.length; ++i) { + if ($app.role_list[i].id === id) + return $app.role_list[i].name; + } + + return undefined; +}; + +// 页面加载完成后,自动初始化核心JS功能。 +$(function () { + $tp.init(); +}); diff --git a/server/www/teleport/static/js/ui/common.js b/server/www/teleport/static/js/teleport/common.js similarity index 69% rename from server/www/teleport/static/js/ui/common.js rename to server/www/teleport/static/js/teleport/common.js index e61e86e..935e8e5 100644 --- a/server/www/teleport/static/js/ui/common.js +++ b/server/www/teleport/static/js/teleport/common.js @@ -1,8 +1,10 @@ -/*! ywl v1.0.1, (c)2015 eomsoft.net */ +//=================================================== +// basic and common functions. +//=================================================== + "use strict"; - -ywl.notify_error = function (message_, title_) { +$tp.notify_error = function (message_, title_) { var _title = title_ || ''; $.gritter.add({ //sticky:true, @@ -11,94 +13,26 @@ ywl.notify_error = function (message_, title_) { title: ' 错误:' + _title, text: message_ }); + console.error('错误', _title, message_); }; -ywl.notify_success = function (message_, title_) { +$tp.notify_success = function (message_, title_) { var _title = title_ || null; if (_title !== null) _title = ' ' + _title; $.gritter.add({ //sticky:true, class_name: 'gritter-success', - time: 10000, + time: 5000, title: _title, text: message_ }); }; - -function get_host_group_by_id(gid) { - var _all = {id: 0, group_name: '全部'}; - return _all; -} - -function get_user_info_by_id(user_id) { - var _all = {id: 0, nickname: '未知'}; - return _all; -} - -function get_event_code_by_id(e_id) { - var _all = {id: 0, e_desc: '未知'}; - var ret = ywl.assist.get_cache_by_id(CACHE_TYPE_EVENT_CODE, e_id); - - if (ret == null) - return _all; - else - return ret; -} -function get_current_system_group() { - return get_system_group_by_id(0); -} - -function get_system_group_by_id(gid) { - var _all = {id: 0, name: '全部'}; - var ret = null; - - $.each(system_group, function (i, group) { - if (group.id == gid) { - ret = group; - return false; - } - }); - - if (ret == null) - return _all; - else - return ret; -} - - -//function get_command_name_by_id(cmd_id) { -// return ywl.assist.get_cache_by_id(CACHE_TYPE_COMMAND, cmd_id); -//} - -//function notify_error(message_, title_) { -// var _title = title_ || ''; -// $.gritter.add({ -// sticky: true, -// class_name: 'gritter-error', -// time: 10000, -// title: ' 错误:' + _title, -// text: message_ -// }); -//} -//function notify_success(message_, title_) { -// var _title = title_ || null; -// if (_title !== null) -// _title = ' ' + _title; -// $.gritter.add({ -// //sticky:true, -// class_name: 'gritter-success', -// time: 10000, -// title: _title, -// text: message_ -// }); -//} - // 切换一个dom节点显示与否 -ywl.toggle_display = function (selector) { +$tp.toggle_display = function (selector) { var obj = $(selector); - if (typeof(obj) == 'undefined') + if (_.isUndefined(obj)) return; if (obj.is(':hidden')) { @@ -108,24 +42,73 @@ ywl.toggle_display = function (selector) { } }; +$tp.disable_dom = function (dom_selector, message) { + // 计算被禁用的DOM对象的位置和大小 + var obj = $(dom_selector); + var pad_left = parseInt(obj.css("padding-left"), 10); + var pad_right = parseInt(obj.css("padding-right"), 10); + var pad_top = parseInt(obj.css("padding-top"), 10); + var pad_bottom = parseInt(obj.css("padding-bottom"), 10); + var w = obj.width() + pad_left + pad_right; + var h = obj.height() + pad_top + pad_bottom; + + // var html = '
    '; + var html = []; + html.push('
    '); + var has_message = false; + if (!_.isUndefined(message) && !_.isNull(message) && message.length > 0) { + html.push('
    ' + message + '
    '); + has_message = true; + } + + $('body').append($(html.join(''))); + + $('#tp-dom-disable-overlay').css({ + 'left': obj.offset().left, 'top': obj.offset().top, + 'width': w, 'height': h + } + ); + + if (has_message) { + var obj_msg = $('#tp-dom-disable-message'); + var _pad_left = parseInt(obj_msg.css("padding-left"), 10); + var _pad_right = parseInt(obj_msg.css("padding-right"), 10); + var _pad_top = parseInt(obj_msg.css("padding-top"), 10); + var _pad_bottom = parseInt(obj_msg.css("padding-bottom"), 10); + var _w = obj_msg.width() + _pad_left + _pad_right; + var _h = obj_msg.height() + _pad_top + _pad_bottom; + + console.log(_w, _h); + + obj_msg.css({ + 'left': obj.offset().left + (w-_w)/2, 'top': obj.offset().top + (h-_h)*2/7 + // 'width': w, 'height': h + } + ); + + } + +}; + //====================================================== // Dialog-box for confirm operation. //====================================================== -ywl.dlg_confirm = function (cb_stack, cb_args) { +$tp.dlg_confirm = function (cb_stack, cb_args) { var self = {}; self._cb_stack = cb_stack; - self._title = cb_args.title || '操作确认:'; + self._title = cb_args.title || ' 操作确认'; self._msg = cb_args.msg || ''; self._btn_yes = cb_args.btn_yes || '确定'; self._btn_no = cb_args.btn_no || '取消'; self._fn_yes = cb_args.fn_yes || null; self._fn_no = cb_args.fn_no || null; self._dlg_id = _.uniqueId('dlg-confirm-'); - self._cb_args = cb_args.cb_args || {}; + self._cb_args = cb_args || {}; + self.dom = {}; self._make_message_box = function () { var _html = [ - '