add ldap module for python on macOS.

pull/130/head
Apex Liu 2018-10-29 03:11:08 +08:00
parent 626659e940
commit f775cdb4f6
141 changed files with 37000 additions and 0 deletions

View File

@ -0,0 +1,146 @@
"""
"""
# Created on 2013.05.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from types import GeneratorType
# authentication
ANONYMOUS = 'ANONYMOUS'
SIMPLE = 'SIMPLE'
SASL = 'SASL'
NTLM = 'NTLM'
# SASL MECHANISMS
EXTERNAL = 'EXTERNAL'
DIGEST_MD5 = 'DIGEST-MD5'
KERBEROS = GSSAPI = 'GSSAPI'
PLAIN = 'PLAIN'
AUTO_BIND_DEFAULT = 'DEFAULT' # binds connection whens using "with" context manager
AUTO_BIND_NONE = 'NONE' # same as False
AUTO_BIND_NO_TLS = 'NO_TLS' # same as True
AUTO_BIND_TLS_BEFORE_BIND = 'TLS_BEFORE_BIND'
AUTO_BIND_TLS_AFTER_BIND = 'TLS_AFTER_BIND'
# server IP dual stack mode
IP_SYSTEM_DEFAULT = 'IP_SYSTEM_DEFAULT'
IP_V4_ONLY = 'IP_V4_ONLY'
IP_V6_ONLY = 'IP_V6_ONLY'
IP_V4_PREFERRED = 'IP_V4_PREFERRED'
IP_V6_PREFERRED = 'IP_V6_PREFERRED'
# search scope
BASE = 'BASE'
LEVEL = 'LEVEL'
SUBTREE = 'SUBTREE'
# search alias
DEREF_NEVER = 'NEVER'
DEREF_SEARCH = 'SEARCH'
DEREF_BASE = 'FINDING_BASE'
DEREF_ALWAYS = 'ALWAYS'
# search attributes
ALL_ATTRIBUTES = '*'
NO_ATTRIBUTES = '1.1' # as per RFC 4511
ALL_OPERATIONAL_ATTRIBUTES = '+' # as per RFC 3673
# modify type
MODIFY_ADD = 'MODIFY_ADD'
MODIFY_DELETE = 'MODIFY_DELETE'
MODIFY_REPLACE = 'MODIFY_REPLACE'
MODIFY_INCREMENT = 'MODIFY_INCREMENT'
# client strategies
SYNC = 'SYNC'
ASYNC = 'ASYNC'
LDIF = 'LDIF'
RESTARTABLE = 'RESTARTABLE'
REUSABLE = 'REUSABLE'
MOCK_SYNC = 'MOCK_SYNC'
MOCK_ASYNC = 'MOCK_ASYNC'
ASYNC_STREAM = 'ASYNC_STREAM'
# get rootDSE info
NONE = 'NO_INFO'
DSA = 'DSA'
SCHEMA = 'SCHEMA'
ALL = 'ALL'
OFFLINE_EDIR_8_8_8 = 'EDIR_8_8_8'
OFFLINE_AD_2012_R2 = 'AD_2012_R2'
OFFLINE_SLAPD_2_4 = 'SLAPD_2_4'
OFFLINE_DS389_1_3_3 = 'DS389_1_3_3'
# server pooling
FIRST = 'FIRST'
ROUND_ROBIN = 'ROUND_ROBIN'
RANDOM = 'RANDOM'
# Hashed password
HASHED_NONE = 'PLAIN'
HASHED_SHA = 'SHA'
HASHED_SHA256 = 'SHA256'
HASHED_SHA384 = 'SHA384'
HASHED_SHA512 = 'SHA512'
HASHED_MD5 = 'MD5'
HASHED_SALTED_SHA = 'SALTED_SHA'
HASHED_SALTED_SHA256 = 'SALTED_SHA256'
HASHED_SALTED_SHA384 = 'SALTED_SHA384'
HASHED_SALTED_SHA512 = 'SALTED_SHA512'
HASHED_SALTED_MD5 = 'SALTED_MD5'
if str is not bytes: # Python 3
NUMERIC_TYPES = (int, float)
INTEGER_TYPES = (int, )
else:
NUMERIC_TYPES = (int, long, float)
INTEGER_TYPES = (int, long)
# types for string and sequence
if str is not bytes: # Python 3
STRING_TYPES = (str, )
SEQUENCE_TYPES = (set, list, tuple, GeneratorType, type(dict().keys())) # dict.keys() is a iterable memoryview in Python 3
else: # Python 2
try:
from future.types.newstr import newstr
except ImportError:
pass
STRING_TYPES = (str, unicode)
SEQUENCE_TYPES = (set, list, tuple, GeneratorType)
# centralized imports # must be at the end of the __init__.py file
from .version import __author__, __version__, __email__, __description__, __status__, __license__, __url__
from .utils.config import get_config_parameter, set_config_parameter
from .core.server import Server
from .core.connection import Connection
from .core.tls import Tls
from .core.pooling import ServerPool
from .abstract.objectDef import ObjectDef
from .abstract.attrDef import AttrDef
from .abstract.attribute import Attribute, WritableAttribute, OperationalAttribute
from .abstract.entry import Entry, WritableEntry
from .abstract.cursor import Reader, Writer
from .protocol.rfc4512 import DsaInfo, SchemaInfo

View File

@ -0,0 +1,50 @@
"""
"""
# Created on 2016.08.31
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
STATUS_INIT = 'Initialized' # The entry object is initialized
STATUS_VIRTUAL = 'Virtual' # The entry is a new writable entry, still empty
STATUS_MANDATORY_MISSING = 'Missing mandatory attributes' # The entry has some mandatory attributes missing
STATUS_READ = 'Read' # The entry has been read
STATUS_WRITABLE = 'Writable' # The entry has been made writable, still no changes
STATUS_PENDING_CHANGES = 'Pending changes' # The entry has some changes to commit, mandatory attributes are present
STATUS_COMMITTED = 'Committed' # The entry changes has been committed
STATUS_READY_FOR_DELETION = 'Ready for deletion' # The entry is set to be deleted
STATUS_READY_FOR_MOVING = 'Ready for moving' # The entry is set to be moved in the DIT
STATUS_READY_FOR_RENAMING = 'Ready for renaming' # The entry is set to be renamed
STATUS_DELETED = 'Deleted' # The entry has been deleted
STATUSES = [STATUS_INIT,
STATUS_VIRTUAL,
STATUS_MANDATORY_MISSING,
STATUS_READ,
STATUS_WRITABLE,
STATUS_PENDING_CHANGES,
STATUS_COMMITTED,
STATUS_READY_FOR_DELETION,
STATUS_READY_FOR_MOVING,
STATUS_READY_FOR_RENAMING,
STATUS_DELETED]
INITIAL_STATUSES = [STATUS_READ, STATUS_WRITABLE, STATUS_VIRTUAL]

View File

@ -0,0 +1,121 @@
"""
"""
# Created on 2014.01.11
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from os import linesep
from .. import SEQUENCE_TYPES
from ..core.exceptions import LDAPKeyError
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
class AttrDef(object):
"""Hold the definition of an attribute
:param name: the real attribute name
:type name: string
:param key: the friendly name to use in queries and when accessing the attribute, default to the real attribute name
:type key: string
:param validate: called to check if the value in the query is valid, the callable is called with the value parameter
:type validate: callable
:param pre_query: called to transform values returned by search
:type pre_query: callable
:param post_query: called to transform values returned by search
:type post_query: callable
:param default: value returned when the attribute is absent (defaults to NotImplemented to allow use of None as default)
:type default: string, integer
:param dereference_dn: reference to an ObjectDef instance. When the attribute value contains a dn it will be searched and substituted in the entry
:type dereference_dn: ObjectDef
:param description: custom attribute description
:type description: string
:param mandatory: specify if attribute is defined as mandatory in LDAP schema
:type mandatory: boolean
"""
def __init__(self, name, key=None, validate=None, pre_query=None, post_query=None, default=NotImplemented, dereference_dn=None, description=None, mandatory=False, single_value=None, alias=None):
self.name = name
self.key = ''.join(key.split()) if key else name # key set to name if not present
self.validate = validate
self.pre_query = pre_query
self.post_query = post_query
self.default = default
self.dereference_dn = dereference_dn
self.description = description
self.mandatory = mandatory
self.single_value = single_value
self.oid_info = None
if not alias:
self.other_names = None
elif isinstance(alias, SEQUENCE_TYPES): # multiple aliases
self.\
other_names = set(alias)
else: # single alias
self.other_names = set([alias]) # python 2 compatibility
if log_enabled(BASIC):
log(BASIC, 'instantiated AttrDef: <%r>', self)
def __repr__(self):
r = 'ATTR: ' + ', '.join([self.key] + list(self.other_names)) if self.other_names else self.key
r += '' if self.name == self.key else ' [' + self.name + ']'
r += '' if self.default is NotImplemented else ' - default: ' + str(self.default)
r += '' if self.mandatory is None else ' - mandatory: ' + str(self.mandatory)
r += '' if self.single_value is None else ' - single_value: ' + str(self.single_value)
r += '' if not self.dereference_dn else ' - dereference_dn: ' + str(self.dereference_dn)
r += '' if not self.description else ' - description: ' + str(self.description)
if self.oid_info:
for line in str(self.oid_info).split(linesep):
r += linesep + ' ' + line
return r
def __str__(self):
return self.__repr__()
def __eq__(self, other):
if isinstance(other, AttrDef):
return self.key == other.key
return False
def __lt__(self, other):
if isinstance(other, AttrDef):
return self.key < other.key
return False
def __hash__(self):
if self.key:
return hash(self.key)
else:
return id(self) # unique for each instance
def __setattr__(self, key, value):
if hasattr(self, 'key') and key == 'key': # key cannot be changed because is being used for __hash__
error_message = 'key \'%s\' already set' % key
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPKeyError(error_message)
else:
object.__setattr__(self, key, value)

View File

@ -0,0 +1,285 @@
"""
"""
# Created on 2014.01.06
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from os import linesep
from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES
from ..core.exceptions import LDAPCursorError
from ..utils.repr import to_stdout_encoding
from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
# noinspection PyUnresolvedReferences
class Attribute(object):
"""Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry
Attribute object is read only
- values: contain the processed attribute values
- raw_values': contain the unprocessed attribute values
"""
def __init__(self, attr_def, entry, cursor):
self.key = attr_def.key
self.definition = attr_def
self.values = []
self.raw_values = []
self.response = None
self.entry = entry
self.cursor = cursor
other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None
self.other_names = set(other_names) if other_names else None # self.other_names is None if there are no short names, else is a set of secondary names
def __repr__(self):
if len(self.values) == 1:
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
elif len(self.values) > 1:
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
filler = ' ' * (len(self.key) + 6)
for value in self.values[1:]:
r += linesep + filler + to_stdout_encoding(value)
else:
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('<no value>')
return r
def __str__(self):
if len(self.values) == 1:
return to_stdout_encoding(self.values[0])
else:
return to_stdout_encoding(self.values)
def __len__(self):
return len(self.values)
def __iter__(self):
return self.values.__iter__()
def __getitem__(self, item):
return self.values[item]
def __eq__(self, other):
try:
if self.value == other:
return True
except Exception:
return False
def __ne__(self, other):
return not self == other
@property
def value(self):
"""
:return: The single value or a list of values of the attribute.
"""
if not self.values:
return None
return self.values[0] if len(self.values) == 1 else self.values
class OperationalAttribute(Attribute):
"""Operational attribute/values object. Include the search result of an
operational attribute in an entry
OperationalAttribute object is read only
- values: contains the processed attribute values
- raw_values: contains the unprocessed attribute values
It may not have an AttrDef
"""
def __repr__(self):
if len(self.values) == 1:
r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
elif len(self.values) > 1:
r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
filler = ' ' * (len(self.key) + 6)
for value in sorted(self.values[1:]):
r += linesep + filler + to_stdout_encoding(value)
else:
r = ''
return r
class WritableAttribute(Attribute):
def __repr__(self):
filler = ' ' * (len(self.key) + 6)
if len(self.values) == 1:
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
elif len(self.values) > 1:
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
for value in self.values[1:]:
r += linesep + filler + to_stdout_encoding(value)
else:
r = to_stdout_encoding(self.key) + to_stdout_encoding(': <Virtual>')
if self.definition.name in self.entry._changes:
r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name])
return r
def __iadd__(self, other):
self.add(other)
return Ellipsis # hack to avoid calling set() in entry __setattr__
def __isub__(self, other):
self.delete(other)
return Ellipsis # hack to avoid calling set_value in entry __setattr__
def _update_changes(self, changes, remove_old=False):
# checks for friendly key in AttrDef and uses the real attribute name
if self.definition and self.definition.name:
key = self.definition.name
else:
key = self.key
if key not in self.entry._changes or remove_old: # remove old changes (for removing attribute)
self.entry._changes[key] = []
self.entry._changes[key].append(changes)
if log_enabled(PROTOCOL):
log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn)
self.entry._state.set_status(STATUS_PENDING_CHANGES)
def add(self, values):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
# new value for attribute to commit with a MODIFY_ADD
if self.entry._state._initial_status == STATUS_VIRTUAL:
error_message = 'cannot add an attribute value in a new entry'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
error_message = self.entry.entry_status + ' - cannot add attributes'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if values is None:
error_message = 'value to add cannot be None'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if values is not None:
validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values
if validated is False:
error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
elif validated is not True: # a valid LDAP value equivalent to the actual values
values = validated
self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values]))
def set(self, values):
# new value for attribute to commit with a MODIFY_REPLACE, old values are deleted
if log_enabled(PROTOCOL):
log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
error_message = self.entry.entry_status + ' - cannot set attributes'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if values is None:
error_message = 'new value cannot be None'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values
if validated is False:
error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
elif validated is not True: # a valid LDAP value equivalent to the actual values
values = validated
self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True)
def delete(self, values):
# value for attribute to delete in commit with a MODIFY_DELETE
if log_enabled(PROTOCOL):
log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
if self.entry._state._initial_status == STATUS_VIRTUAL:
error_message = 'cannot delete an attribute value in a new entry'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
error_message = self.entry.entry_status + ' - cannot delete attributes'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if values is None:
error_message = 'value to delete cannot be None'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if not isinstance(values, SEQUENCE_TYPES):
values = [values]
for single_value in values:
if single_value not in self.values:
error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values))
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self._update_changes((MODIFY_DELETE, values))
def remove(self):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
if self.entry._state._initial_status == STATUS_VIRTUAL:
error_message = 'cannot remove an attribute in a new entry'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
error_message = self.entry.entry_status + ' - cannot remove attributes'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self._update_changes((MODIFY_REPLACE, []), True)
def discard(self):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
del self.entry._changes[self.key]
if not self.entry._changes:
self.entry._state.set_status(self.entry._state._initial_status)
@property
def virtual(self):
return False if len(self.values) else True
@property
def changes(self):
if self.key in self.entry._changes:
return self.entry._changes[self.key]
return None

View File

@ -0,0 +1,904 @@
"""
"""
# Created on 2014.01.06
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from collections import namedtuple
from copy import deepcopy
from datetime import datetime
from os import linesep
from time import sleep
from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE
from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
from ..abstract import STATUS_PENDING_CHANGES
from .attribute import Attribute, OperationalAttribute, WritableAttribute
from .attrDef import AttrDef
from .objectDef import ObjectDef
from .entry import Entry, WritableEntry
from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError
from ..core.results import RESULT_SUCCESS
from ..utils.ciDict import CaseInsensitiveWithAliasDict
from ..utils.dn import safe_dn, safe_rdn
from ..utils.conv import to_raw
from ..utils.config import get_config_parameter
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY
Operation = namedtuple('Operation', ('request', 'result', 'response'))
def _ret_search_value(value):
return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value
def _create_query_dict(query_text):
"""
Create a dictionary with query key:value definitions
query_text is a comma delimited key:value sequence
"""
query_dict = dict()
if query_text:
for arg_value_str in query_text.split(','):
if ':' in arg_value_str:
arg_value_list = arg_value_str.split(':')
query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip()
return query_dict
class Cursor(object):
# entry_class and attribute_class define the type of entry and attribute used by the cursor
# entry_initial_status defines the initial status of a entry
# entry_class = Entry, must be defined in subclasses
# attribute_class = Attribute, must be defined in subclasses
# entry_initial_status = STATUS, must be defined in subclasses
def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
self.connection = connection
self.get_operational_attributes = get_operational_attributes
if connection._deferred_bind or connection._deferred_open: # probably a lazy connection, tries to bind
connection._fire_deferred()
if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)):
object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class)
self.definition = object_def
if attributes: # checks if requested attributes are defined in ObjectDef
not_defined_attributes = []
if isinstance(attributes, STRING_TYPES):
attributes = [attributes]
for attribute in attributes:
if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def:
not_defined_attributes.append(attribute)
if not_defined_attributes:
error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition])
self.controls = controls
self.execution_time = None
self.entries = []
self.schema = self.connection.server.schema
self._do_not_reset = False # used for refreshing entry in entry_refresh() without removing all entries from the Cursor
self._operation_history = list() # a list storing all the requests, results and responses for the last cursor operation
def __repr__(self):
r = 'CURSOR : ' + self.__class__.__name__ + linesep
r += 'CONN : ' + str(self.connection) + linesep
r += 'DEFS : ' + ', '.join(self.definition._object_class)
if self.definition._auxiliary_class:
r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']'
r += linesep
# for attr_def in sorted(self.definition):
# r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', '
# if r[-2] == ',':
# r = r[:-2]
# r += ']' + linesep
if hasattr(self, 'attributes'):
r += 'ATTRS : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep
if isinstance(self, Reader):
if hasattr(self, 'base'):
r += 'BASE : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep
if hasattr(self, '_query') and self._query:
r += 'QUERY : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
if hasattr(self, 'validated_query') and self.validated_query:
r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
if hasattr(self, 'query_filter') and self.query_filter:
r += 'FILTER : ' + repr(self.query_filter) + linesep
if hasattr(self, 'execution_time') and self.execution_time:
r += 'ENTRIES: ' + str(len(self.entries))
r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep
if self.failed:
r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']'
return r
def __str__(self):
return self.__repr__()
def __iter__(self):
return self.entries.__iter__()
def __getitem__(self, item):
"""Return indexed item, if index is not found then try to sequentially search in DN of entries.
If only one entry is found return it else raise a KeyError exception. The exception message
includes the number of entries that matches, if less than 10 entries match then show the DNs
in the exception message.
"""
try:
return self.entries[item]
except TypeError:
pass
if isinstance(item, STRING_TYPES):
found = self.match_dn(item)
if len(found) == 1:
return found[0]
elif len(found) > 1:
error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']'))
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise KeyError(error_message)
error_message = 'no entry found'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise KeyError(error_message)
def __len__(self):
return len(self.entries)
if str is not bytes: # Python 3
def __bool__(self): # needed to make the cursor appears as existing in "if cursor:" even if there are no entries
return True
else: # Python 2
def __nonzero__(self):
return True
def _get_attributes(self, response, attr_defs, entry):
"""Assign the result of the LDAP query to the Entry object dictionary.
If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute.
Returns the default value for missing attributes.
If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value.
"""
conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
attributes = CaseInsensitiveWithAliasDict()
used_attribute_names = set()
for attr in attr_defs:
attr_def = attr_defs[attr]
attribute_name = None
for attr_name in response['attributes']:
if attr_def.name.lower() == attr_name.lower():
attribute_name = attr_name
break
if attribute_name or attr_def.default is not NotImplemented: # attribute value found in result or default value present - NotImplemented allows use of None as default
attribute = self.attribute_class(attr_def, entry, self)
attribute.response = response
attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None
if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list():
attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name])
else:
if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()):
attribute.values = response['attributes'][attribute_name]
else:
attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default]
if not isinstance(attribute.values, list): # force attribute values to list (if attribute is single-valued)
attribute.values = [attribute.values]
if attr_def.dereference_dn: # try to get object referenced in value
if attribute.values:
temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls)
temp_values = []
for element in attribute.values:
if entry.entry_dn != element:
temp_values.append(temp_reader.search_object(element))
else:
error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPObjectDereferenceError(error_message)
del temp_reader # remove the temporary Reader
attribute.values = temp_values
attributes[attribute.key] = attribute
if attribute.other_names:
attributes.set_alias(attribute.key, attribute.other_names)
if attr_def.other_names:
attributes.set_alias(attribute.key, attr_def.other_names)
used_attribute_names.add(attribute_name)
if self.attributes:
used_attribute_names.update(self.attributes)
for attribute_name in response['attributes']:
if attribute_name not in used_attribute_names:
operational_attribute = False
# check if the type is an operational attribute
if attribute_name in self.schema.attribute_types:
if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]:
operational_attribute = True
else:
operational_attribute = True
if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def:
error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self)
attribute.raw_values = response['raw_attributes'][attribute_name]
attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]]
if (conf_operational_attribute_prefix + attribute_name) not in attributes:
attributes[conf_operational_attribute_prefix + attribute_name] = attribute
return attributes
def match_dn(self, dn):
"""Return entries with text in DN"""
matched = []
for entry in self.entries:
if dn.lower() in entry.entry_dn.lower():
matched.append(entry)
return matched
def match(self, attributes, value):
"""Return entries with text in one of the specified attributes"""
matched = []
if not isinstance(attributes, SEQUENCE_TYPES):
attributes = [attributes]
for entry in self.entries:
found = False
for attribute in attributes:
if attribute in entry:
for attr_value in entry[attribute].values:
if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower():
found = True
elif value == attr_value:
found = True
if found:
matched.append(entry)
break
if found:
break
# checks raw values, tries to convert value to byte
raw_value = to_raw(value)
if isinstance(raw_value, (bytes, bytearray)):
for attr_value in entry[attribute].raw_values:
if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower():
found = True
elif raw_value == attr_value:
found = True
if found:
matched.append(entry)
break
if found:
break
return matched
def _create_entry(self, response):
if not response['type'] == 'searchResEntry':
return None
entry = self.entry_class(response['dn'], self) # define an Entry (writable or readonly), as specified in the cursor definition
entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry)
entry._state.entry_raw_attributes = deepcopy(response['raw_attributes'])
entry._state.response = response
entry._state.read_time = datetime.now()
entry._state.set_status(self.entry_initial_status)
for attr in entry: # returns the whole attribute object
entry.__dict__[attr.key] = attr
return entry
def _execute_query(self, query_scope, attributes):
if not self.connection:
error_message = 'no connection established'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
old_query_filter = None
if query_scope == BASE: # requesting a single object so an always-valid filter is set
if hasattr(self, 'query_filter'): # only Reader has a query filter
old_query_filter = self.query_filter
self.query_filter = '(objectclass=*)'
else:
self._create_query_filter()
if log_enabled(PROTOCOL):
log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self)
with self.connection:
result = self.connection.search(search_base=self.base,
search_filter=self.query_filter,
search_scope=query_scope,
dereference_aliases=self.dereference_aliases,
attributes=attributes if attributes else list(self.attributes),
get_operational_attributes=self.get_operational_attributes,
controls=self.controls)
if not self.connection.strategy.sync:
response, result, request = self.connection.get_response(result, get_request=True)
else:
response = self.connection.response
result = self.connection.result
request = self.connection.request
self._store_operation_in_history(request, result, response)
if self._do_not_reset: # trick to not remove entries when using _refresh()
return self._create_entry(response[0])
self.entries = []
for r in response:
entry = self._create_entry(r)
if entry is not None:
self.entries.append(entry)
if 'objectClass' in entry:
for object_class in entry.objectClass:
if self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class:
# add auxiliary class to object definition
self.definition._auxiliary_class.append(object_class)
self.definition._populate_attr_defs(object_class)
self.execution_time = datetime.now()
if old_query_filter: # requesting a single object so an always-valid filter is set
self.query_filter = old_query_filter
def remove(self, entry):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self)
self.entries.remove(entry)
def _reset_history(self):
self._operation_history = list()
def _store_operation_in_history(self, request, result, response):
self._operation_history.append(Operation(request, result, response))
@property
def operations(self):
return self._operation_history
@property
def errors(self):
return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS]
@property
def failed(self):
if hasattr(self, '_operation_history'):
return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history])
class Reader(Cursor):
"""Reader object to perform searches:
:param connection: the LDAP connection object to use
:type connection: LDAPConnection
:param object_def: the ObjectDef of the LDAP object returned
:type object_def: ObjectDef
:param query: the simplified query (will be transformed in an LDAP filter)
:type query: str
:param base: starting base of the search
:type base: str
:param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR)
:type components_in_and: bool
:param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True)
:type sub_tree: bool
:param get_operational_attributes: specify if operational attributes are returned or not
:type get_operational_attributes: bool
:param controls: controls to be used in search
:type controls: tuple
"""
entry_class = Entry # entries are read_only
attribute_class = Attribute # attributes are read_only
entry_initial_status = STATUS_READ
def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
self._components_in_and = components_in_and
self.sub_tree = sub_tree
self._query = query
self.base = base
self.dereference_aliases = DEREF_ALWAYS
self.validated_query = None
self._query_dict = dict()
self._validated_query_dict = dict()
self.query_filter = None
self.reset()
if log_enabled(BASIC):
log(BASIC, 'instantiated Reader Cursor: <%r>', self)
@property
def query(self):
return self._query
@query.setter
def query(self, value):
self._query = value
self.reset()
@property
def components_in_and(self):
return self._components_in_and
@components_in_and.setter
def components_in_and(self, value):
self._components_in_and = value
self.reset()
def clear(self):
"""Clear the Reader search parameters
"""
self.dereference_aliases = DEREF_ALWAYS
self._reset_history()
def reset(self):
"""Clear all the Reader parameters
"""
self.clear()
self.validated_query = None
self._query_dict = dict()
self._validated_query_dict = dict()
self.execution_time = None
self.query_filter = None
self.entries = []
self._create_query_filter()
def _validate_query(self):
"""Processes the text query and verifies that the requested friendly names are in the Reader dictionary
If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised
"""
if not self._query_dict:
self._query_dict = _create_query_dict(self._query)
query = ''
for d in sorted(self._query_dict):
attr = d[1:] if d[0] in '&|' else d
for attr_def in self.definition:
if ''.join(attr.split()).lower() == attr_def.key.lower():
attr = attr_def.key
break
if attr in self.definition:
vals = sorted(self._query_dict[d].split(';'))
query += (d[0] + attr if d[0] in '&|' else attr) + ': '
for val in vals:
val = val.strip()
val_not = True if val[0] == '!' else False
val_search_operator = '=' # default
if val_not:
if val[1:].lstrip()[0] not in '=<>~':
value = val[1:].lstrip()
else:
val_search_operator = val[1:].lstrip()[0]
value = val[1:].lstrip()[1:]
else:
if val[0] not in '=<>~':
value = val.lstrip()
else:
val_search_operator = val[0]
value = val[1:].lstrip()
if self.definition[attr].validate:
validated = self.definition[attr].validate(value) # returns True, False or a value to substitute to the actual values
if validated is False:
error_message = 'validation failed for attribute %s and value %s' % (d, val)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
elif validated is not True: # a valid LDAP value equivalent to the actual values
value = validated
if val_not:
query += '!' + val_search_operator + str(value)
else:
query += val_search_operator + str(value)
query += ';'
query = query[:-1] + ', '
else:
error_message = 'attribute \'%s\' not in definition' % attr
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self.validated_query = query[:-2]
self._validated_query_dict = _create_query_dict(self.validated_query)
def _create_query_filter(self):
"""Converts the query dictionary to the filter text"""
self.query_filter = ''
if self.definition._object_class:
self.query_filter += '(&'
if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1:
self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')'
elif isinstance(self.definition._object_class, SEQUENCE_TYPES):
self.query_filter += '(&'
for object_class in self.definition._object_class:
self.query_filter += '(objectClass=' + object_class + ')'
self.query_filter += ')'
else:
error_message = 'object class must be a string or a list'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if self._query and self._query.startswith('(') and self._query.endswith(')'): # query is already an LDAP filter
if 'objectclass' not in self._query.lower():
self.query_filter += self._query + ')' # if objectclass not in filter adds from definition
else:
self.query_filter = self._query
return
elif self._query: # if a simplified filter is present
if not self.components_in_and:
self.query_filter += '(|'
elif not self.definition._object_class:
self.query_filter += '(&'
self._validate_query()
attr_counter = 0
for attr in sorted(self._validated_query_dict):
attr_counter += 1
multi = True if ';' in self._validated_query_dict[attr] else False
vals = sorted(self._validated_query_dict[attr].split(';'))
attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr]
if attr_def.pre_query:
modvals = []
for val in vals:
modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:]))
vals = modvals
if multi:
if attr[0] in '&|':
self.query_filter += '(' + attr[0]
else:
self.query_filter += '(|'
for val in vals:
if val[0] == '!':
self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))'
else:
self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')'
if multi:
self.query_filter += ')'
if not self.components_in_and:
self.query_filter += '))'
else:
self.query_filter += ')'
if not self.definition._object_class and attr_counter == 1: # removes unneeded starting filter
self.query_filter = self.query_filter[2: -1]
if self.query_filter == '(|)' or self.query_filter == '(&)': # removes empty filter
self.query_filter = ''
else: # no query, remove unneeded leading (&
self.query_filter = self.query_filter[2:]
def search(self, attributes=None):
"""Perform the LDAP search
:return: Entries found in search
"""
self.clear()
query_scope = SUBTREE if self.sub_tree else LEVEL
if log_enabled(PROTOCOL):
log(PROTOCOL, 'performing search in <%s>', self)
self._execute_query(query_scope, attributes)
return self.entries
def search_object(self, entry_dn=None, attributes=None): # base must be a single dn
"""Perform the LDAP search operation SINGLE_OBJECT scope
:return: Entry found in search
"""
if log_enabled(PROTOCOL):
log(PROTOCOL, 'performing object search in <%s>', self)
self.clear()
if entry_dn:
old_base = self.base
self.base = entry_dn
self._execute_query(BASE, attributes)
self.base = old_base
else:
self._execute_query(BASE, attributes)
return self.entries[0] if len(self.entries) > 0 else None
def search_level(self, attributes=None):
"""Perform the LDAP search operation with SINGLE_LEVEL scope
:return: Entries found in search
"""
if log_enabled(PROTOCOL):
log(PROTOCOL, 'performing single level search in <%s>', self)
self.clear()
self._execute_query(LEVEL, attributes)
return self.entries
def search_subtree(self, attributes=None):
"""Perform the LDAP search operation WHOLE_SUBTREE scope
:return: Entries found in search
"""
if log_enabled(PROTOCOL):
log(PROTOCOL, 'performing whole subtree search in <%s>', self)
self.clear()
self._execute_query(SUBTREE, attributes)
return self.entries
def _entries_generator(self, responses):
for response in responses:
yield self._create_entry(response)
def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None):
"""Perform a paged search, can be called as an Iterator
:param attributes: optional attributes to search
:param paged_size: number of entries returned in each search
:type paged_size: int
:param paged_criticality: specify if server must not execute the search if it is not capable of paging searches
:type paged_criticality: bool
:param generator: if True the paged searches are executed while generating the entries,
if False all the paged searches are execute before returning the generator
:type generator: bool
:return: Entries found in search
"""
if log_enabled(PROTOCOL):
log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size))
if not self.connection:
error_message = 'no connection established'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self.clear()
self._create_query_filter()
self.entries = []
self.execution_time = datetime.now()
response = self.connection.extend.standard.paged_search(search_base=self.base,
search_filter=self.query_filter,
search_scope=SUBTREE if self.sub_tree else LEVEL,
dereference_aliases=self.dereference_aliases,
attributes=attributes if attributes else self.attributes,
get_operational_attributes=self.get_operational_attributes,
controls=self.controls,
paged_size=paged_size,
paged_criticality=paged_criticality,
generator=generator)
if generator:
return self._entries_generator(response)
else:
return list(self._entries_generator(response))
class Writer(Cursor):
entry_class = WritableEntry
attribute_class = WritableAttribute
entry_initial_status = STATUS_WRITABLE
@staticmethod
def from_cursor(cursor, connection=None, object_def=None, custom_validator=None):
if connection is None:
connection = cursor.connection
if object_def is None:
object_def = cursor.definition
writer = Writer(connection, object_def, attributes=cursor.attributes)
for entry in cursor.entries:
if isinstance(cursor, Reader):
entry.entry_writable(object_def, writer, custom_validator=custom_validator)
elif isinstance(cursor, Writer):
pass
else:
error_message = 'unknown cursor type %s' % str(type(cursor))
if log_enabled(ERROR):
log(ERROR, '%s', error_message)
raise LDAPCursorError(error_message)
writer.execution_time = cursor.execution_time
if log_enabled(BASIC):
log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor)
return writer
@staticmethod
def from_response(connection, object_def, response=None):
if response is None:
if not connection.strategy.sync:
error_message = 'with asynchronous strategies response must be specified'
if log_enabled(ERROR):
log(ERROR, '%s', error_message)
raise LDAPCursorError(error_message)
elif connection.response:
response = connection.response
else:
error_message = 'response not present'
if log_enabled(ERROR):
log(ERROR, '%s', error_message)
raise LDAPCursorError(error_message)
writer = Writer(connection, object_def)
for resp in response:
if resp['type'] == 'searchResEntry':
entry = writer._create_entry(resp)
writer.entries.append(entry)
if log_enabled(BASIC):
log(BASIC, 'instantiated Writer Cursor <%r> from response', writer)
return writer
def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
self.dereference_aliases = DEREF_NEVER
if log_enabled(BASIC):
log(BASIC, 'instantiated Writer Cursor: <%r>', self)
def commit(self, refresh=True):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'committed changes for <%s>', self)
self._reset_history()
successful = True
for entry in self.entries:
if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False):
successful = False
self.execution_time = datetime.now()
return successful
def discard(self):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'discarded changes for <%s>', self)
for entry in self.entries:
entry.entry_discard_changes()
def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None): # base must be a single dn
"""Performs the LDAP search operation SINGLE_OBJECT scope
:return: Entry found in search
"""
if log_enabled(PROTOCOL):
log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self)
if not self.connection:
error_message = 'no connection established'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
response = []
with self.connection:
counter = 0
while counter < tries:
result = self.connection.search(search_base=entry_dn,
search_filter='(objectclass=*)',
search_scope=BASE,
dereference_aliases=DEREF_NEVER,
attributes=attributes if attributes else self.attributes,
get_operational_attributes=self.get_operational_attributes,
controls=controls)
if not self.connection.strategy.sync:
response, result, request = self.connection.get_response(result, get_request=True)
else:
response = self.connection.response
result = self.connection.result
request = self.connection.request
if result['result'] in [RESULT_SUCCESS]:
break
sleep(seconds)
counter += 1
self._store_operation_in_history(request, result, response)
if len(response) == 1:
return self._create_entry(response[0])
elif len(response) == 0:
return None
error_message = 'more than 1 entry returned for a single object search'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
def new(self, dn):
if log_enabled(BASIC):
log(BASIC, 'creating new entry <%s> for <%s>', dn, self)
dn = safe_dn(dn)
for entry in self.entries: # checks if dn is already used in an cursor entry
if entry.entry_dn == dn:
error_message = 'dn already present in cursor'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
rdns = safe_rdn(dn, decompose=True)
entry = self.entry_class(dn, self) # defines a new empty Entry
for attr in entry.entry_mandatory_attributes: # defines all mandatory attributes as virtual
entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self)
entry.__dict__[attr] = entry._state.attributes[attr]
entry.objectclass.set(self.definition._object_class)
for rdn in rdns: # adds virtual attributes from rdns in entry name (should be more than one with + syntax)
if rdn[0] in entry._state.definition._attributes:
rdn_name = entry._state.definition._attributes[rdn[0]].name # normalize case folding
if rdn_name not in entry._state.attributes:
entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self)
entry.__dict__[rdn_name] = entry._state.attributes[rdn_name]
entry.__dict__[rdn_name].set(rdn[1])
else:
error_message = 'rdn type \'%s\' not in object class definition' % rdn[0]
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
entry._state.set_status(STATUS_VIRTUAL) # set intial status
entry._state.set_status(STATUS_PENDING_CHANGES) # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING
self.entries.append(entry)
return entry
def refresh_entry(self, entry, tries=4, seconds=2):
conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
self._do_not_reset = True
attr_list = []
if log_enabled(PROTOCOL):
log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self)
for attr in entry._state.attributes: # check friendly attribute name in AttrDef, do not check operational attributes
if attr.lower().startswith(conf_operational_attribute_prefix.lower()):
continue
if entry._state.definition[attr].name:
attr_list.append(entry._state.definition[attr].name)
else:
attr_list.append(entry._state.definition[attr].key)
temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds) # if any attributes is added adds only to the entry not to the definition
self._do_not_reset = False
if temp_entry:
temp_entry._state.origin = entry._state.origin
entry.__dict__.clear()
entry.__dict__['_state'] = temp_entry._state
for attr in entry._state.attributes: # returns the attribute key
entry.__dict__[attr] = entry._state.attributes[attr]
for attr in entry.entry_attributes: # if any attribute of the class was deleted makes it virtual
if attr not in entry._state.attributes and attr in entry.entry_definition._attributes:
entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self)
entry.__dict__[attr] = entry._state.attributes[attr]
entry._state.set_status(entry._state._initial_status)
return True
return False

View File

@ -0,0 +1,671 @@
"""
"""
# Created on 2016.08.19
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import json
try:
from collections import OrderedDict
except ImportError:
from ..utils.ordDict import OrderedDict # for Python 2.6
from os import linesep
from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE
from .attribute import WritableAttribute
from .objectDef import ObjectDef
from .attrDef import AttrDef
from ..core.exceptions import LDAPKeyError, LDAPCursorError
from ..utils.conv import check_json_dict, format_json, prepare_for_stream
from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
from ..utils.dn import safe_dn, safe_rdn, to_dn
from ..utils.repr import to_stdout_encoding
from ..utils.ciDict import CaseInsensitiveWithAliasDict
from ..utils.config import get_config_parameter
from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\
STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES
from ..core.results import RESULT_SUCCESS
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
class EntryState(object):
"""Contains data on the status of the entry. Does not pollute the Entry __dict__.
"""
def __init__(self, dn, cursor):
self.dn = dn
self._initial_status = None
self._to = None # used for move and rename
self.status = STATUS_INIT
self.attributes = CaseInsensitiveWithAliasDict()
self.raw_attributes = CaseInsensitiveWithAliasDict()
self.response = None
self.cursor = cursor
self.origin = None # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server)
self.read_time = None
self.changes = OrderedDict() # includes changes to commit in a writable entry
if cursor.definition:
self.definition = cursor.definition
else:
self.definition = None
def __repr__(self):
if self.__dict__ and self.dn is not None:
r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '<never>') + linesep
r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep
r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '<None>') + linesep
r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep
r += 'response: ' + ('present' if self.response else '<None>') + linesep
r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '<None>') + linesep
return r
else:
return object.__repr__(self)
def __str__(self):
return self.__repr__()
def set_status(self, status):
conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')]
if status not in STATUSES:
error_message = 'invalid entry status ' + str(status)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if status in INITIAL_STATUSES:
self._initial_status = status
self.status = status
if status == STATUS_DELETED:
self._initial_status = STATUS_VIRTUAL
if status == STATUS_COMMITTED:
self._initial_status = STATUS_WRITABLE
if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL): # checks if all mandatory attributes are present in new entries
for attr in self.definition._attributes:
if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def:
if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes:
self.status = STATUS_MANDATORY_MISSING
break
class EntryBase(object):
"""The Entry object contains a single LDAP entry.
Attributes can be accessed either by sequence, by assignment
or as dictionary keys. Keys are not case sensitive.
The Entry object is read only
- The DN is retrieved by entry_dn
- The cursor reference is in _cursor
- Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods
"""
def __init__(self, dn, cursor):
self.__dict__['_state'] = EntryState(dn, cursor)
def __repr__(self):
if self.__dict__ and self.entry_dn is not None:
r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '<never>') + linesep
if self._state.attributes:
for attr in sorted(self._state.attributes):
if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes):
r += ' ' + repr(self._state.attributes[attr]) + linesep
return r
else:
return object.__repr__(self)
def __str__(self):
return self.__repr__()
def __iter__(self):
for attribute in self._state.attributes:
yield self._state.attributes[attribute]
# raise StopIteration # deprecated in PEP 479
return
def __contains__(self, item):
try:
self.__getitem__(item)
return True
except LDAPKeyError:
return False
def __getattr__(self, item):
if isinstance(item, STRING_TYPES):
if item == '_state':
return self.__dict__['_state']
item = ''.join(item.split()).lower()
attr_found = None
for attr in self._state.attributes.keys():
if item == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.aliases():
if item == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.keys():
if item + ';binary' == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.aliases():
if item + ';binary' == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.keys():
if item + ';range' in attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.aliases():
if item + ';range' in attr.lower():
attr_found = attr
break
if not attr_found:
error_message = 'attribute \'%s\' not found' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
return self._state.attributes[attr]
error_message = 'attribute name must be a string'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
def __setattr__(self, item, value):
if item in self._state.attributes:
error_message = 'attribute \'%s\' is read only' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
else:
error_message = 'entry is read only, cannot add \'%s\'' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
def __getitem__(self, item):
if isinstance(item, STRING_TYPES):
item = ''.join(item.split()).lower()
attr_found = None
for attr in self._state.attributes.keys():
if item == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.aliases():
if item == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.keys():
if item + ';binary' == attr.lower():
attr_found = attr
break
if not attr_found:
for attr in self._state.attributes.aliases():
if item + ';binary' == attr.lower():
attr_found = attr
break
if not attr_found:
error_message = 'key \'%s\' not found' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPKeyError(error_message)
return self._state.attributes[attr]
error_message = 'key must be a string'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPKeyError(error_message)
def __eq__(self, other):
if isinstance(other, EntryBase):
return self.entry_dn == other.entry_dn
return False
def __lt__(self, other):
if isinstance(other, EntryBase):
return self.entry_dn <= other.entry_dn
return False
@property
def entry_dn(self):
return self._state.dn
@property
def entry_cursor(self):
return self._state.cursor
@property
def entry_status(self):
return self._state.status
@property
def entry_definition(self):
return self._state.definition
@property
def entry_raw_attributes(self):
return self._state.entry_raw_attributes
def entry_raw_attribute(self, name):
"""
:param name: name of the attribute
:return: raw (unencoded) value of the attribute, None if attribute is not found
"""
return self._state.entry_raw_attributes[name] if name in self._state.entry_raw_attributes else None
@property
def entry_mandatory_attributes(self):
return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory]
@property
def entry_attributes(self):
return list(self._state.attributes.keys())
@property
def entry_attributes_as_dict(self):
return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items())
@property
def entry_read_time(self):
return self._state.read_time
@property
def _changes(self):
return self._state.changes
def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True):
json_entry = dict()
json_entry['dn'] = self.entry_dn
if checked_attributes:
if not include_empty:
# needed for python 2.6 compatibility
json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key])
else:
json_entry['attributes'] = self.entry_attributes_as_dict
if raw:
if not include_empty:
# needed for python 2.6 compatibility
json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key])
else:
json_entry['raw'] = dict(self.entry_raw_attributes)
if str is bytes: # Python 2
check_json_dict(json_entry)
json_output = json.dumps(json_entry,
ensure_ascii=True,
sort_keys=sort,
indent=indent,
check_circular=True,
default=format_json,
separators=(',', ': '))
if stream:
stream.write(json_output)
return json_output
def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None):
ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order)
ldif_lines = add_ldif_header(ldif_lines)
line_separator = line_separator or linesep
ldif_output = line_separator.join(ldif_lines)
if stream:
if stream.tell() == 0:
header = add_ldif_header(['-'])[0]
stream.write(prepare_for_stream(header + line_separator + line_separator))
stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
return ldif_output
class Entry(EntryBase):
"""The Entry object contains a single LDAP entry.
Attributes can be accessed either by sequence, by assignment
or as dictionary keys. Keys are not case sensitive.
The Entry object is read only
- The DN is retrieved by entry_dn
- The Reader reference is in _cursor()
- Raw attributes values are retrieved by the _ra_attributes and
_raw_attribute() methods
"""
def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None):
if not self.entry_cursor.schema:
error_message = 'schema must be available to make an entry writable'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
# returns a new WritableEntry and its Writer cursor
if object_def is None:
if self.entry_cursor.definition._object_class:
object_def = self.entry_definition._object_class
auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else [])
elif 'objectclass' in self:
object_def = self.objectclass.values
if not object_def:
error_message = 'object class must be specified to make an entry writable'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
if not isinstance(object_def, ObjectDef):
object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class)
if attributes:
if isinstance(attributes, STRING_TYPES):
attributes = [attributes]
if isinstance(attributes, SEQUENCE_TYPES):
for attribute in attributes:
if attribute not in object_def._attributes:
error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
else:
attributes = []
if not writer_cursor:
from .cursor import Writer # local import to avoid circular reference in import at startup
writable_cursor = Writer(self.entry_cursor.connection, object_def)
else:
writable_cursor = writer_cursor
if attributes: # force reading of attributes
writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes)
else:
writable_entry = writable_cursor._create_entry(self._state.response)
writable_cursor.entries.append(writable_entry)
writable_entry._state.read_time = self.entry_read_time
writable_entry._state.origin = self # reference to the original read-only entry
# checks original entry for custom definitions in AttrDefs
for attr in writable_entry._state.origin.entry_definition._attributes:
original_attr = writable_entry._state.origin.entry_definition._attributes[attr]
if attr != original_attr.name and attr not in writable_entry._state.attributes:
old_attr_def = writable_entry.entry_definition._attributes[original_attr.name]
new_attr_def = AttrDef(original_attr.name,
key=attr,
validate=original_attr.validate,
pre_query=original_attr.pre_query,
post_query=original_attr.post_query,
default=original_attr.default,
dereference_dn=original_attr.dereference_dn,
description=original_attr.description,
mandatory=old_attr_def.mandatory, # keeps value read from schema
single_value=old_attr_def.single_value, # keeps value read from schema
alias=original_attr.other_names)
object_def = writable_entry.entry_definition
object_def -= old_attr_def
object_def += new_attr_def
# updates attribute name in entry attributes
new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor)
if original_attr.name in writable_entry._state.attributes:
new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names
new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values
new_attr.values = writable_entry._state.attributes[original_attr.name].values
new_attr.response = writable_entry._state.attributes[original_attr.name].response
writable_entry._state.attributes[attr] = new_attr
# writable_entry._state.attributes.set_alias(attr, new_attr.other_names)
del writable_entry._state.attributes[original_attr.name]
writable_entry._state.set_status(STATUS_WRITABLE)
return writable_entry
class WritableEntry(EntryBase):
def __setitem__(self, key, value):
if value is not Ellipsis: # hack for using implicit operators in writable attributes
self.__setattr__(key, value)
def __setattr__(self, item, value):
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
if item == '_state' and isinstance(value, EntryState):
self.__dict__['_state'] = value
return
if value is not Ellipsis: # hack for using implicit operators in writable attributes
# checks if using an alias
if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def:
if item not in self._state.attributes: # setting value to an attribute still without values
new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor)
self._state.attributes[str(item)] = new_attribute # force item to a string for key in attributes dict
self._state.attributes[item].set(value) # try to add to new_values
else:
error_message = 'attribute \'%s\' not defined' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
def __getattr__(self, item):
if isinstance(item, STRING_TYPES):
if item == '_state':
return self.__dict__['_state']
item = ''.join(item.split()).lower()
for attr in self._state.attributes.keys():
if item == attr.lower():
return self._state.attributes[attr]
for attr in self._state.attributes.aliases():
if item == attr.lower():
return self._state.attributes[attr]
if item in self.entry_definition._attributes: # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive
self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor)
self.entry_cursor.attributes.add(item)
return self._state.attributes[item]
error_message = 'attribute \'%s\' not defined' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
else:
error_message = 'attribute name must be a string'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
@property
def entry_virtual_attributes(self):
return [attr for attr in self.entry_attributes if self[attr].virtual]
def entry_commit_changes(self, refresh=True, controls=None, clear_history=True):
if clear_history:
self.entry_cursor._reset_history()
if self.entry_status == STATUS_READY_FOR_DELETION:
result = self.entry_cursor.connection.delete(self.entry_dn, controls)
if not self.entry_cursor.connection.strategy.sync:
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
else:
response = self.entry_cursor.connection.response
result = self.entry_cursor.connection.result
request = self.entry_cursor.connection.request
self.entry_cursor._store_operation_in_history(request, result, response)
if result['result'] == RESULT_SUCCESS:
dn = self.entry_dn
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # deletes original read-only Entry
cursor = self._state.origin.entry_cursor
self._state.origin.__dict__.clear()
self._state.origin.__dict__['_state'] = EntryState(dn, cursor)
self._state.origin._state.set_status(STATUS_DELETED)
cursor = self.entry_cursor
self.__dict__.clear()
self._state = EntryState(dn, cursor)
self._state.set_status(STATUS_DELETED)
return True
return False
elif self.entry_status == STATUS_READY_FOR_MOVING:
result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to)
if not self.entry_cursor.connection.strategy.sync:
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
else:
response = self.entry_cursor.connection.response
result = self.entry_cursor.connection.result
request = self.entry_cursor.connection.request
self.entry_cursor._store_operation_in_history(request, result, response)
if result['result'] == RESULT_SUCCESS:
self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to)
if refresh:
if self.entry_refresh():
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin
self._state.origin._state.dn = self.entry_dn
self._state.set_status(STATUS_COMMITTED)
self._state._to = None
return True
return False
elif self.entry_status == STATUS_READY_FOR_RENAMING:
rdn = '+'.join(safe_rdn(self._state._to))
result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn)
if not self.entry_cursor.connection.strategy.sync:
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
else:
response = self.entry_cursor.connection.response
result = self.entry_cursor.connection.result
request = self.entry_cursor.connection.request
self.entry_cursor._store_operation_in_history(request, result, response)
if result['result'] == RESULT_SUCCESS:
self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:])
if refresh:
if self.entry_refresh():
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin
self._state.origin._state.dn = self.entry_dn
self._state.set_status(STATUS_COMMITTED)
self._state._to = None
return True
return False
elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]:
missing_attributes = []
for attr in self.entry_mandatory_attributes:
if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes:
missing_attributes.append('\'' + attr + '\'')
error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn)
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
elif self.entry_status == STATUS_PENDING_CHANGES:
if self._changes:
if self.entry_definition._auxiliary_class: # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present
for attr in self._changes:
# checks schema to see if attribute is defined in one of the already present object classes
attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in
for object_class in self.objectclass:
if object_class in attr_classes:
break
else: # executed only if the attribute class is not present in the objectClass attribute
# checks if attribute is defined in one of the possible auxiliary classes
for aux_class in self.entry_definition._auxiliary_class:
if aux_class in attr_classes:
if self._state._initial_status == STATUS_VIRTUAL: # entry is new, there must be a pending objectClass MODIFY_REPLACE
self._changes['objectClass'][0][1].append(aux_class)
else:
self.objectclass += aux_class
if self._state._initial_status == STATUS_VIRTUAL:
new_attributes = dict()
for attr in self._changes:
new_attributes[attr] = self._changes[attr][0][1]
result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls)
else:
result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls)
if not self.entry_cursor.connection.strategy.sync: # asynchronous request
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
else:
response = self.entry_cursor.connection.response
result = self.entry_cursor.connection.result
request = self.entry_cursor.connection.request
self.entry_cursor._store_operation_in_history(request, result, response)
if result['result'] == RESULT_SUCCESS:
if refresh:
if self.entry_refresh():
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # updates original read-only entry if present
for attr in self: # adds AttrDefs from writable entry to origin entry definition if some is missing
if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes:
self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key]) # adds AttrDef from writable entry to original entry if missing
temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response)
self._state.origin.__dict__.clear()
self._state.origin.__dict__['_state'] = temp_entry._state
for attr in self: # returns the whole attribute object
if not attr.virtual:
self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key]
self._state.origin._state.read_time = self.entry_read_time
else:
self.entry_discard_changes() # if not refreshed remove committed changes
self._state.set_status(STATUS_COMMITTED)
return True
return False
def entry_discard_changes(self):
self._changes.clear()
self._state.set_status(self._state._initial_status)
def entry_delete(self):
if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]:
error_message = 'cannot delete entry, invalid status: ' + self.entry_status
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self._state.set_status(STATUS_READY_FOR_DELETION)
def entry_refresh(self, tries=4, seconds=2):
"""
Refreshes the entry from the LDAP Server
"""
if self.entry_cursor.connection:
if self.entry_cursor.refresh_entry(self, tries, seconds):
return True
return False
def entry_move(self, destination_dn):
if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]:
error_message = 'cannot move entry, invalid status: ' + self.entry_status
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self._state._to = safe_dn(destination_dn)
self._state.set_status(STATUS_READY_FOR_MOVING)
def entry_rename(self, new_name):
if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]:
error_message = 'cannot rename entry, invalid status: ' + self.entry_status
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPCursorError(error_message)
self._state._to = new_name
self._state.set_status(STATUS_READY_FOR_RENAMING)
@property
def entry_changes(self):
return self._changes

View File

@ -0,0 +1,270 @@
"""
"""
# Created on 2014.02.02
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from os import linesep
from .attrDef import AttrDef
from ..core.exceptions import LDAPKeyError, LDAPObjectError, LDAPAttributeError, LDAPSchemaError
from .. import STRING_TYPES, SEQUENCE_TYPES, Server, Connection
from ..protocol.rfc4512 import SchemaInfo, constant_to_class_kind
from ..protocol.formatters.standard import find_attribute_validator
from ..utils.ciDict import CaseInsensitiveWithAliasDict
from ..utils.config import get_config_parameter
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
class ObjectDef(object):
"""Represent an object in the LDAP server. AttrDefs are stored in a dictionary; the key is the friendly name defined in AttrDef.
AttrDefs can be added and removed using the += and -= operators
ObjectDef can be accessed either as a sequence and a dictionary. When accessed the whole AttrDef instance is returned
"""
def __init__(self, object_class=None, schema=None, custom_validator=None, auxiliary_class=None):
if object_class is None:
object_class = []
if not isinstance(object_class, SEQUENCE_TYPES):
object_class = [object_class]
if auxiliary_class is None:
auxiliary_class = []
if not isinstance(auxiliary_class, SEQUENCE_TYPES):
auxiliary_class = [auxiliary_class]
self.__dict__['_attributes'] = CaseInsensitiveWithAliasDict()
self.__dict__['_custom_validator'] = custom_validator
self.__dict__['_oid_info'] = []
if isinstance(schema, Connection) and (schema._deferred_bind or schema._deferred_open): # probably a lazy connection, tries to bind
schema._fire_deferred()
if schema is not None:
if isinstance(schema, Server):
schema = schema.schema
elif isinstance(schema, Connection):
schema = schema.server.schema
elif isinstance(schema, SchemaInfo):
pass
elif schema:
error_message = 'unable to read schema'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPSchemaError(error_message)
if schema is None:
error_message = 'schema not present'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPSchemaError(error_message)
self.__dict__['_schema'] = schema
if self._schema:
object_class = [schema.object_classes[name].name[0] for name in object_class] # uses object class names capitalized as in schema
auxiliary_class = [schema.object_classes[name].name[0] for name in auxiliary_class]
for object_name in object_class:
if object_name:
self._populate_attr_defs(object_name)
for object_name in auxiliary_class:
if object_name:
self._populate_attr_defs(object_name)
self.__dict__['_object_class'] = object_class
self.__dict__['_auxiliary_class'] = auxiliary_class
if log_enabled(BASIC):
log(BASIC, 'instantiated ObjectDef: <%r>', self)
def _populate_attr_defs(self, object_name):
if object_name in self._schema.object_classes:
object_schema = self._schema.object_classes[object_name]
self.__dict__['_oid_info'].append(object_name + " (" + constant_to_class_kind(object_schema.kind) + ") " + str(object_schema.oid))
if object_schema.superior:
for sup in object_schema.superior:
self._populate_attr_defs(sup)
for attribute_name in object_schema.must_contain:
self.add_from_schema(attribute_name, True)
for attribute_name in object_schema.may_contain:
if attribute_name not in self._attributes: # the attribute could already be defined as "mandatory" in a superclass
self.add_from_schema(attribute_name, False)
else:
error_message = 'object class \'%s\' not defined in schema' % object_name
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPObjectError(error_message)
def __repr__(self):
if self._object_class:
r = 'OBJ : ' + ', '.join(self._object_class) + linesep
else:
r = 'OBJ : <None>' + linesep
if self._auxiliary_class:
r += 'AUX : ' + ', '.join(self._auxiliary_class) + linesep
else:
r += 'AUX : <None>' + linesep
r += 'OID: ' + ', '.join([oid for oid in self._oid_info]) + linesep
r += 'MUST: ' + ', '.join(sorted([attr for attr in self._attributes if self._attributes[attr].mandatory])) + linesep
r += 'MAY : ' + ', '.join(sorted([attr for attr in self._attributes if not self._attributes[attr].mandatory])) + linesep
return r
def __str__(self):
return self.__repr__()
def __getitem__(self, item):
return self.__getattr__(item)
def __getattr__(self, item):
item = ''.join(item.split()).lower()
if '_attributes' in self.__dict__:
try:
return self._attributes[item]
except KeyError:
error_message = 'key \'%s\' not present' % item
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPKeyError(error_message)
else:
error_message = 'internal _attributes property not defined'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPKeyError(error_message)
def __setattr__(self, key, value):
error_message = 'object \'%s\' is read only' % key
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPObjectError(error_message)
def __iadd__(self, other):
self.add_attribute(other)
return self
def __isub__(self, other):
if isinstance(other, AttrDef):
self.remove_attribute(other.key)
elif isinstance(other, STRING_TYPES):
self.remove_attribute(other)
return self
def __iter__(self):
for attribute in self._attributes:
yield self._attributes[attribute]
def __len__(self):
return len(self._attributes)
if str is not bytes: # Python 3
def __bool__(self): # needed to make the objectDef appears as existing in "if cursor:" even if there are no entries
return True
else: # Python 2
def __nonzero__(self):
return True
def __contains__(self, item):
try:
self.__getitem__(item)
except KeyError:
return False
return True
def add_from_schema(self, attribute_name, mandatory=False):
attr_def = AttrDef(attribute_name)
attr_def.validate = find_attribute_validator(self._schema, attribute_name, self._custom_validator)
attr_def.mandatory = mandatory # in schema mandatory is specified in the object class, not in the attribute class
if self._schema and self._schema.attribute_types and attribute_name in self._schema.attribute_types:
attr_def.single_value = self._schema.attribute_types[attribute_name].single_value
attr_def.oid_info = self._schema.attribute_types[attribute_name]
self.add_attribute(attr_def)
def add_attribute(self, definition=None):
"""Add an AttrDef to the ObjectDef. Can be called with the += operator.
:param definition: the AttrDef object to add, can also be a string containing the name of attribute to add. Can be a list of both
"""
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
if isinstance(definition, STRING_TYPES):
self.add_from_schema(definition)
elif isinstance(definition, AttrDef):
if definition.key.lower() not in conf_attributes_excluded_from_object_def:
if definition.key not in self._attributes:
self._attributes[definition.key] = definition
if definition.name and definition.name != definition.key:
self._attributes.set_alias(definition.key, definition.name)
other_names = [name for name in definition.oid_info.name if definition.key.lower() != name.lower()] if definition.oid_info else None
if other_names:
self._attributes.set_alias(definition.key, other_names)
if not definition.validate:
validator = find_attribute_validator(self._schema, definition.key, self._custom_validator)
self._attributes[definition.key].validate = validator
elif isinstance(definition, SEQUENCE_TYPES):
for element in definition:
self.add_attribute(element)
else:
error_message = 'unable to add element to object definition'
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPObjectError(error_message)
def remove_attribute(self, item):
"""Remove an AttrDef from the ObjectDef. Can be called with the -= operator.
:param item: the AttrDef to remove, can also be a string containing the name of attribute to remove
"""
key = None
if isinstance(item, STRING_TYPES):
key = ''.join(item.split()).lower()
elif isinstance(item, AttrDef):
key = item.key.lower()
if key:
for attr in self._attributes:
if key == attr.lower():
del self._attributes[attr]
break
else:
error_message = 'key \'%s\' not present' % key
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPKeyError(error_message)
else:
error_message = 'key type must be str or AttrDef not ' + str(type(item))
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', error_message, self)
raise LDAPAttributeError(error_message)
def clear_attributes(self):
"""Empty the ObjectDef attribute list
"""
self.__dict__['object_class'] = None
self.__dict__['auxiliary_class'] = None
self.__dict__['_attributes'] = dict()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,599 @@
"""
"""
# Created on 2014.05.14
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from os import sep
from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \
RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \
RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \
RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \
RESULT_CONSTRAINT_VIOLATION, \
RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \
RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \
RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \
RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \
RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \
RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \
RESULT_NOT_ALLOWED_ON_NON_LEAF, \
RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \
RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \
RESULT_INAPPROPRIATE_AUTHENTICATION
import socket
# LDAPException hierarchy
class LDAPException(Exception):
pass
class LDAPOperationResult(LDAPException):
def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None):
if cls is LDAPOperationResult and result and result in exception_table:
exc = super(LDAPOperationResult, exception_table[result]).__new__(
exception_table[result]) # create an exception of the required result error
exc.result = result
exc.description = description
exc.dn = dn
exc.message = message
exc.type = response_type
exc.response = response
else:
exc = super(LDAPOperationResult, cls).__new__(cls)
return exc
def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None):
self.result = result
self.description = description
self.dn = dn
self.message = message
self.type = response_type
self.response = response
def __str__(self):
s = [self.__class__.__name__,
str(self.result) if self.result else None,
self.description if self.description else None,
self.dn if self.dn else None,
self.message if self.message else None,
self.type if self.type else None,
self.response if self.response else None]
return ' - '.join([str(item) for item in s if s is not None])
def __repr__(self):
return self.__str__()
class LDAPOperationsErrorResult(LDAPOperationResult):
pass
class LDAPProtocolErrorResult(LDAPOperationResult):
pass
class LDAPTimeLimitExceededResult(LDAPOperationResult):
pass
class LDAPSizeLimitExceededResult(LDAPOperationResult):
pass
class LDAPAuthMethodNotSupportedResult(LDAPOperationResult):
pass
class LDAPStrongerAuthRequiredResult(LDAPOperationResult):
pass
class LDAPReferralResult(LDAPOperationResult):
pass
class LDAPAdminLimitExceededResult(LDAPOperationResult):
pass
class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult):
pass
class LDAPConfidentialityRequiredResult(LDAPOperationResult):
pass
class LDAPSASLBindInProgressResult(LDAPOperationResult):
pass
class LDAPNoSuchAttributeResult(LDAPOperationResult):
pass
class LDAPUndefinedAttributeTypeResult(LDAPOperationResult):
pass
class LDAPInappropriateMatchingResult(LDAPOperationResult):
pass
class LDAPConstraintViolationResult(LDAPOperationResult):
pass
class LDAPAttributeOrValueExistsResult(LDAPOperationResult):
pass
class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult):
pass
class LDAPNoSuchObjectResult(LDAPOperationResult):
pass
class LDAPAliasProblemResult(LDAPOperationResult):
pass
class LDAPInvalidDNSyntaxResult(LDAPOperationResult):
pass
class LDAPAliasDereferencingProblemResult(LDAPOperationResult):
pass
class LDAPInappropriateAuthenticationResult(LDAPOperationResult):
pass
class LDAPInvalidCredentialsResult(LDAPOperationResult):
pass
class LDAPInsufficientAccessRightsResult(LDAPOperationResult):
pass
class LDAPBusyResult(LDAPOperationResult):
pass
class LDAPUnavailableResult(LDAPOperationResult):
pass
class LDAPUnwillingToPerformResult(LDAPOperationResult):
pass
class LDAPLoopDetectedResult(LDAPOperationResult):
pass
class LDAPNamingViolationResult(LDAPOperationResult):
pass
class LDAPObjectClassViolationResult(LDAPOperationResult):
pass
class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult):
pass
class LDAPNotAllowedOnRDNResult(LDAPOperationResult):
pass
class LDAPEntryAlreadyExistsResult(LDAPOperationResult):
pass
class LDAPObjectClassModsProhibitedResult(LDAPOperationResult):
pass
class LDAPAffectMultipleDSASResult(LDAPOperationResult):
pass
class LDAPOtherResult(LDAPOperationResult):
pass
class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult):
pass
class LDAPLCUPSecurityViolationResult(LDAPOperationResult):
pass
class LDAPLCUPInvalidDataResult(LDAPOperationResult):
pass
class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult):
pass
class LDAPLCUPReloadRequiredResult(LDAPOperationResult):
pass
class LDAPCanceledResult(LDAPOperationResult):
pass
class LDAPNoSuchOperationResult(LDAPOperationResult):
pass
class LDAPTooLateResult(LDAPOperationResult):
pass
class LDAPCannotCancelResult(LDAPOperationResult):
pass
class LDAPAssertionFailedResult(LDAPOperationResult):
pass
class LDAPAuthorizationDeniedResult(LDAPOperationResult):
pass
class LDAPESyncRefreshRequiredResult(LDAPOperationResult):
pass
exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult,
RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult,
RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult,
RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult,
RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult,
RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult,
RESULT_REFERRAL: LDAPReferralResult,
RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult,
RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult,
RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult,
RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult,
RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult,
RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult,
RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult,
RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult,
RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult,
RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult,
RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult,
RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult,
RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult,
RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult,
RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult,
RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult,
RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult,
RESULT_BUSY: LDAPBusyResult,
RESULT_UNAVAILABLE: LDAPUnavailableResult,
RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult,
RESULT_LOOP_DETECTED: LDAPLoopDetectedResult,
RESULT_NAMING_VIOLATION: LDAPNamingViolationResult,
RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult,
RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult,
RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult,
RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult,
RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult,
RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult,
RESULT_OTHER: LDAPOtherResult,
RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult,
RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult,
RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult,
RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult,
RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult,
RESULT_CANCELED: LDAPCanceledResult,
RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult,
RESULT_TOO_LATE: LDAPTooLateResult,
RESULT_CANNOT_CANCEL: LDAPCannotCancelResult,
RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult,
RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult,
RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult}
class LDAPExceptionError(LDAPException):
pass
# configuration exceptions
class LDAPConfigurationError(LDAPExceptionError):
pass
class LDAPUnknownStrategyError(LDAPConfigurationError):
pass
class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError):
pass
class LDAPSSLConfigurationError(LDAPConfigurationError):
pass
class LDAPDefinitionError(LDAPConfigurationError):
pass
class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError):
pass
class LDAPConfigurationParameterError(LDAPConfigurationError):
pass
# abstract layer exceptions
class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError):
pass
class LDAPObjectError(LDAPExceptionError, ValueError):
pass
class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError):
pass
class LDAPCursorError(LDAPExceptionError):
pass
class LDAPObjectDereferenceError(LDAPExceptionError):
pass
# security exceptions
class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError):
pass
class LDAPInvalidTlsSpecificationError(LDAPExceptionError):
pass
class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError):
pass
# connection exceptions
class LDAPBindError(LDAPExceptionError):
pass
class LDAPInvalidServerError(LDAPExceptionError):
pass
class LDAPSASLMechanismNotSupportedError(LDAPExceptionError):
pass
class LDAPConnectionIsReadOnlyError(LDAPExceptionError):
pass
class LDAPChangeError(LDAPExceptionError, ValueError):
pass
class LDAPServerPoolError(LDAPExceptionError):
pass
class LDAPServerPoolExhaustedError(LDAPExceptionError):
pass
class LDAPInvalidPortError(LDAPExceptionError):
pass
class LDAPStartTLSError(LDAPExceptionError):
pass
class LDAPCertificateError(LDAPExceptionError):
pass
class LDAPUserNameNotAllowedError(LDAPExceptionError):
pass
class LDAPUserNameIsMandatoryError(LDAPExceptionError):
pass
class LDAPPasswordIsMandatoryError(LDAPExceptionError):
pass
class LDAPInvalidFilterError(LDAPExceptionError):
pass
class LDAPInvalidScopeError(LDAPExceptionError, ValueError):
pass
class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError):
pass
class LDAPInvalidValueError(LDAPExceptionError, ValueError):
pass
class LDAPControlError(LDAPExceptionError, ValueError):
pass
class LDAPExtensionError(LDAPExceptionError, ValueError):
pass
class LDAPLDIFError(LDAPExceptionError):
pass
class LDAPSchemaError(LDAPExceptionError):
pass
class LDAPSASLPrepError(LDAPExceptionError):
pass
class LDAPSASLBindInProgressError(LDAPExceptionError):
pass
class LDAPMetricsError(LDAPExceptionError):
pass
class LDAPObjectClassError(LDAPExceptionError):
pass
class LDAPInvalidDnError(LDAPExceptionError):
pass
class LDAPResponseTimeoutError(LDAPExceptionError):
pass
class LDAPTransactionError(LDAPExceptionError):
pass
# communication exceptions
class LDAPCommunicationError(LDAPExceptionError):
pass
class LDAPSocketOpenError(LDAPCommunicationError):
pass
class LDAPSocketCloseError(LDAPCommunicationError):
pass
class LDAPSocketReceiveError(LDAPCommunicationError, socket.error):
pass
class LDAPSocketSendError(LDAPCommunicationError, socket.error):
pass
class LDAPSessionTerminatedByServerError(LDAPCommunicationError):
pass
class LDAPUnknownResponseError(LDAPCommunicationError):
pass
class LDAPUnknownRequestError(LDAPCommunicationError):
pass
class LDAPReferralError(LDAPCommunicationError):
pass
# pooling exceptions
class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError):
pass
class LDAPConnectionPoolNotStartedError(LDAPExceptionError):
pass
# restartable strategy
class LDAPMaximumRetriesError(LDAPExceptionError):
def __str__(self):
s = []
if self.args:
if isinstance(self.args, tuple):
if len(self.args) > 0:
s.append('LDAPMaximumRetriesError: ' + str(self.args[0]))
if len(self.args) > 1:
s.append('Exception history:')
prev_exc = ''
for i, exc in enumerate(self.args[1]): # args[1] contains exception history
# if str(exc[1]) != prev_exc:
# s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2])))
# prev_exc = str(exc[1])
if str(exc) != prev_exc:
s.append((str(i).rjust(5) + ' ' + str(type(exc)) + ': ' + str(exc)))
prev_exc = str(exc)
if len(self.args) > 2:
s.append('Maximum number of retries reached: ' + str(self.args[2]))
else:
s = [LDAPExceptionError.__str__(self)]
return sep.join(s)
# exception factories
def communication_exception_factory(exc_to_raise, exc):
"""
Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter
"""
if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]:
return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
else:
raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
def start_tls_exception_factory(exc_to_raise, exc):
"""
Generates a new exception class of the requested type merged with the exception raised by the interpreter
"""
if exc_to_raise.__name__ == 'LDAPStartTLSError':
return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
else:
raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))

View File

@ -0,0 +1,306 @@
"""
"""
# Created on 2014.03.14
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, MINYEAR
from os import linesep
from random import randint
from time import sleep
from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError
from .server import Server
from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM]
class ServerPoolState(object):
def __init__(self, server_pool):
self.servers = [] # each element is a list: [server, last_checked_time, available]
self.strategy = server_pool.strategy
self.server_pool = server_pool
self.last_used_server = 0
self.refresh()
self.initialize_time = datetime.now()
if log_enabled(BASIC):
log(BASIC, 'instantiated ServerPoolState: <%r>', self)
def __str__(self):
s = 'servers: ' + linesep
if self.servers:
for server in self.servers:
s += str(server[0]) + linesep
else:
s += 'None' + linesep
s += 'Pool strategy: ' + str(self.strategy) + linesep
s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.servers[self.last_used_server][0]))
return s
def refresh(self):
self.servers = []
for server in self.server_pool.servers:
self.servers.append([server, datetime(MINYEAR, 1, 1), True]) # server, smallest date ever, supposed available
self.last_used_server = randint(0, len(self.servers) - 1)
def get_current_server(self):
return self.servers[self.last_used_server][0]
def get_server(self):
if self.servers:
if self.server_pool.strategy == FIRST:
if self.server_pool.active:
# returns the first active server
self.last_used_server = self.find_active_server(starting=0)
else:
# returns always the first server - no pooling
self.last_used_server = 0
elif self.server_pool.strategy == ROUND_ROBIN:
if self.server_pool.active:
# returns the next active server in a circular range
self.last_used_server = self.find_active_server(self.last_used_server + 1)
else:
# returns the next server in a circular range
self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.servers) else 0
elif self.server_pool.strategy == RANDOM:
if self.server_pool.active:
self.last_used_server = self.find_active_random_server()
else:
# returns a random server in the pool
self.last_used_server = randint(0, len(self.servers) - 1)
else:
if log_enabled(ERROR):
log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy)
raise LDAPUnknownStrategyError('unknown server pooling strategy')
if log_enabled(BASIC):
log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server)
return self.servers[self.last_used_server][0]
else:
if log_enabled(ERROR):
log(ERROR, 'no servers in Server Pool <%s>', self)
raise LDAPServerPoolError('no servers in server pool')
def find_active_random_server(self):
counter = self.server_pool.active # can be True for "forever" or the number of cycles to try
while counter:
if log_enabled(NETWORK):
log(NETWORK, 'entering loop for finding active server in pool <%s>', self)
temp_list = self.servers[:] # copy
while temp_list:
# pops a random server from a temp list and checks its
# availability, if not available tries another one
server = temp_list.pop(randint(0, len(temp_list) - 1))
if not server[2]: # server is offline
if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server[1]).seconds < self.server_pool.exhaust: # keeps server offline
if log_enabled(NETWORK):
log(NETWORK, 'server <%s> excluded from checking because it is offline', server[0])
continue
if log_enabled(NETWORK):
log(NETWORK, 'server <%s> reinserted in pool', server[0])
server[1] = datetime.now()
if log_enabled(NETWORK):
log(NETWORK, 'checking server <%s> for availability', server[0])
if server[0].check_availability():
# returns a random active server in the pool
server[2] = True
return self.servers.index(server)
else:
server[2] = False
if not isinstance(self.server_pool.active, bool):
counter -= 1
if log_enabled(ERROR):
log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self)
raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries')
def find_active_server(self, starting):
conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT')
counter = self.server_pool.active # can be True for "forever" or the number of cycles to try
if starting >= len(self.servers):
starting = 0
while counter:
if log_enabled(NETWORK):
log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self)
index = -1
pool_size = len(self.servers)
while index < pool_size - 1:
index += 1
offset = index + starting if index + starting < pool_size else index + starting - pool_size
if not self.servers[offset][2]: # server is offline
if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - self.servers[offset][1]).seconds < self.server_pool.exhaust: # keeps server offline
if log_enabled(NETWORK):
if isinstance(self.server_pool.exhaust, bool):
log(NETWORK, 'server <%s> excluded from checking because is offline', self.servers[offset][0])
else:
log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', self.servers[offset][0], (self.server_pool.exhaust - (datetime.now() - self.servers[offset][1]).seconds))
continue
if log_enabled(NETWORK):
log(NETWORK, 'server <%s> reinserted in pool', self.servers[offset][0])
self.servers[offset][1] = datetime.now()
if log_enabled(NETWORK):
log(NETWORK, 'checking server <%s> for availability', self.servers[offset][0])
if self.servers[offset][0].check_availability():
self.servers[offset][2] = True
return offset
else:
self.servers[offset][2] = False # sets server offline
if not isinstance(self.server_pool.active, bool):
counter -= 1
if log_enabled(NETWORK):
log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout)
sleep(conf_pool_timeout)
if log_enabled(ERROR):
log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self)
raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries')
def __len__(self):
return len(self.servers)
class ServerPool(object):
def __init__(self,
servers=None,
pool_strategy=ROUND_ROBIN,
active=True,
exhaust=False):
if pool_strategy not in POOLING_STRATEGIES:
if log_enabled(ERROR):
log(ERROR, 'unknown pooling strategy <%s>', pool_strategy)
raise LDAPUnknownStrategyError('unknown pooling strategy')
if exhaust and not active:
if log_enabled(ERROR):
log(ERROR, 'cannot instantiate pool with exhaust and not active')
raise LDAPServerPoolError('pools can be exhausted only when checking for active servers')
self.servers = []
self.pool_states = dict()
self.active = active
self.exhaust = exhaust
if isinstance(servers, SEQUENCE_TYPES + (Server, )):
self.add(servers)
elif isinstance(servers, STRING_TYPES):
self.add(Server(servers))
self.strategy = pool_strategy
if log_enabled(BASIC):
log(BASIC, 'instantiated ServerPool: <%r>', self)
def __str__(self):
s = 'servers: ' + linesep
if self.servers:
for server in self.servers:
s += str(server) + linesep
else:
s += 'None' + linesep
s += 'Pool strategy: ' + str(self.strategy)
s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False')
s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False')
return s
def __repr__(self):
r = 'ServerPool(servers='
if self.servers:
r += '['
for server in self.servers:
r += server.__repr__() + ', '
r = r[:-2] + ']'
else:
r += 'None'
r += ', pool_strategy={0.strategy!r}'.format(self)
r += ', active={0.active!r}'.format(self)
r += ', exhaust={0.exhaust!r}'.format(self)
r += ')'
return r
def __len__(self):
return len(self.servers)
def __getitem__(self, item):
return self.servers[item]
def __iter__(self):
return self.servers.__iter__()
def add(self, servers):
if isinstance(servers, Server):
if servers not in self.servers:
self.servers.append(servers)
elif isinstance(servers, STRING_TYPES):
self.servers.append(Server(servers))
elif isinstance(servers, SEQUENCE_TYPES):
for server in servers:
if isinstance(server, Server):
self.servers.append(server)
elif isinstance(server, STRING_TYPES):
self.servers.append(Server(server))
else:
if log_enabled(ERROR):
log(ERROR, 'element must be a server in Server Pool <%s>', self)
raise LDAPServerPoolError('server in ServerPool must be a Server')
else:
if log_enabled(ERROR):
log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self)
raise LDAPServerPoolError('server must be a Server or a list of Server')
for connection in self.pool_states:
# notifies connections using this pool to refresh
self.pool_states[connection].refresh()
def remove(self, server):
if server in self.servers:
self.servers.remove(server)
else:
if log_enabled(ERROR):
log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self)
raise LDAPServerPoolError('server not in server pool')
for connection in self.pool_states:
# notifies connections using this pool to refresh
self.pool_states[connection].refresh()
def initialize(self, connection):
pool_state = ServerPoolState(self)
# registers pool_state in ServerPool object
self.pool_states[connection] = pool_state
def get_server(self, connection):
if connection in self.pool_states:
return self.pool_states[connection].get_server()
else:
if log_enabled(ERROR):
log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
raise LDAPServerPoolError('connection not in ServerPoolState')
def get_current_server(self, connection):
if connection in self.pool_states:
return self.pool_states[connection].get_current_server()
else:
if log_enabled(ERROR):
log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
raise LDAPServerPoolError('connection not in ServerPoolState')

View File

@ -0,0 +1,137 @@
"""
"""
# Created on 2016.08.31
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
# result codes
RESULT_SUCCESS = 0
RESULT_OPERATIONS_ERROR = 1
RESULT_PROTOCOL_ERROR = 2
RESULT_TIME_LIMIT_EXCEEDED = 3
RESULT_SIZE_LIMIT_EXCEEDED = 4
RESULT_COMPARE_FALSE = 5
RESULT_COMPARE_TRUE = 6
RESULT_AUTH_METHOD_NOT_SUPPORTED = 7
RESULT_STRONGER_AUTH_REQUIRED = 8
RESULT_RESERVED = 9
RESULT_REFERRAL = 10
RESULT_ADMIN_LIMIT_EXCEEDED = 11
RESULT_UNAVAILABLE_CRITICAL_EXTENSION = 12
RESULT_CONFIDENTIALITY_REQUIRED = 13
RESULT_SASL_BIND_IN_PROGRESS = 14
RESULT_NO_SUCH_ATTRIBUTE = 16
RESULT_UNDEFINED_ATTRIBUTE_TYPE = 17
RESULT_INAPPROPRIATE_MATCHING = 18
RESULT_CONSTRAINT_VIOLATION = 19
RESULT_ATTRIBUTE_OR_VALUE_EXISTS = 20
RESULT_INVALID_ATTRIBUTE_SYNTAX = 21
RESULT_NO_SUCH_OBJECT = 32
RESULT_ALIAS_PROBLEM = 33
RESULT_INVALID_DN_SYNTAX = 34
RESULT_ALIAS_DEREFERENCING_PROBLEM = 36
RESULT_INAPPROPRIATE_AUTHENTICATION = 48
RESULT_INVALID_CREDENTIALS = 49
RESULT_INSUFFICIENT_ACCESS_RIGHTS = 50
RESULT_BUSY = 51
RESULT_UNAVAILABLE = 52
RESULT_UNWILLING_TO_PERFORM = 53
RESULT_LOOP_DETECTED = 54
RESULT_NAMING_VIOLATION = 64
RESULT_OBJECT_CLASS_VIOLATION = 65
RESULT_NOT_ALLOWED_ON_NON_LEAF = 66
RESULT_NOT_ALLOWED_ON_RDN = 67
RESULT_ENTRY_ALREADY_EXISTS = 68
RESULT_OBJECT_CLASS_MODS_PROHIBITED = 69
RESULT_AFFECT_MULTIPLE_DSAS = 71
RESULT_OTHER = 80
RESULT_LCUP_RESOURCES_EXHAUSTED = 113
RESULT_LCUP_SECURITY_VIOLATION = 114
RESULT_LCUP_INVALID_DATA = 115
RESULT_LCUP_UNSUPPORTED_SCHEME = 116
RESULT_LCUP_RELOAD_REQUIRED = 117
RESULT_CANCELED = 118
RESULT_NO_SUCH_OPERATION = 119
RESULT_TOO_LATE = 120
RESULT_CANNOT_CANCEL = 121
RESULT_ASSERTION_FAILED = 122
RESULT_AUTHORIZATION_DENIED = 123
RESULT_E_SYNC_REFRESH_REQUIRED = 4096
RESULT_CODES = {
RESULT_SUCCESS: 'success',
RESULT_OPERATIONS_ERROR: 'operationsError',
RESULT_PROTOCOL_ERROR: 'protocolError',
RESULT_TIME_LIMIT_EXCEEDED: 'timeLimitExceeded',
RESULT_SIZE_LIMIT_EXCEEDED: 'sizeLimitExceeded',
RESULT_COMPARE_FALSE: 'compareFalse',
RESULT_COMPARE_TRUE: 'compareTrue',
RESULT_AUTH_METHOD_NOT_SUPPORTED: 'authMethodNotSupported',
RESULT_RESERVED: 'reserved',
RESULT_STRONGER_AUTH_REQUIRED: 'strongerAuthRequired',
RESULT_REFERRAL: 'referral',
RESULT_ADMIN_LIMIT_EXCEEDED: 'adminLimitExceeded',
RESULT_UNAVAILABLE_CRITICAL_EXTENSION: 'unavailableCriticalExtension',
RESULT_CONFIDENTIALITY_REQUIRED: 'confidentialityRequired',
RESULT_SASL_BIND_IN_PROGRESS: 'saslBindInProgress',
RESULT_NO_SUCH_ATTRIBUTE: 'noSuchAttribute',
RESULT_UNDEFINED_ATTRIBUTE_TYPE: 'undefinedAttributeType',
RESULT_INAPPROPRIATE_MATCHING: 'inappropriateMatching',
RESULT_CONSTRAINT_VIOLATION: 'constraintViolation',
RESULT_ATTRIBUTE_OR_VALUE_EXISTS: 'attributeOrValueExists',
RESULT_INVALID_ATTRIBUTE_SYNTAX: 'invalidAttributeSyntax',
RESULT_NO_SUCH_OBJECT: 'noSuchObject',
RESULT_ALIAS_PROBLEM: 'aliasProblem',
RESULT_INVALID_DN_SYNTAX: 'invalidDNSyntax',
RESULT_ALIAS_DEREFERENCING_PROBLEM: 'aliasDereferencingProblem',
RESULT_INAPPROPRIATE_AUTHENTICATION: 'inappropriateAuthentication',
RESULT_INVALID_CREDENTIALS: 'invalidCredentials',
RESULT_INSUFFICIENT_ACCESS_RIGHTS: 'insufficientAccessRights',
RESULT_BUSY: 'busy',
RESULT_UNAVAILABLE: 'unavailable',
RESULT_UNWILLING_TO_PERFORM: 'unwillingToPerform',
RESULT_LOOP_DETECTED: 'loopDetected',
RESULT_NAMING_VIOLATION: 'namingViolation',
RESULT_OBJECT_CLASS_VIOLATION: 'objectClassViolation',
RESULT_NOT_ALLOWED_ON_NON_LEAF: 'notAllowedOnNonLeaf',
RESULT_NOT_ALLOWED_ON_RDN: 'notAllowedOnRDN',
RESULT_ENTRY_ALREADY_EXISTS: 'entryAlreadyExists',
RESULT_OBJECT_CLASS_MODS_PROHIBITED: 'objectClassModsProhibited',
RESULT_AFFECT_MULTIPLE_DSAS: 'affectMultipleDSAs',
RESULT_OTHER: 'other',
RESULT_LCUP_RESOURCES_EXHAUSTED: 'lcupResourcesExhausted',
RESULT_LCUP_SECURITY_VIOLATION: 'lcupSecurityViolation',
RESULT_LCUP_INVALID_DATA: 'lcupInvalidData',
RESULT_LCUP_UNSUPPORTED_SCHEME: 'lcupUnsupportedScheme',
RESULT_LCUP_RELOAD_REQUIRED: 'lcupReloadRequired',
RESULT_CANCELED: 'canceled',
RESULT_NO_SUCH_OPERATION: 'noSuchOperation',
RESULT_TOO_LATE: 'tooLate',
RESULT_CANNOT_CANCEL: 'cannotCancel',
RESULT_ASSERTION_FAILED: 'assertionFailed',
RESULT_AUTHORIZATION_DENIED: 'authorizationDenied',
RESULT_E_SYNC_REFRESH_REQUIRED: 'e-syncRefreshRequired'
}
# do not raise exception for (in raise_exceptions connection mode)
DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS]

View File

@ -0,0 +1,572 @@
"""
"""
# Created on 2014.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import socket
from threading import Lock
from datetime import datetime, MINYEAR
from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES
from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError
from ..protocol.formatters.standard import format_attribute_values
from ..protocol.rfc4511 import LDAP_MAX_INT
from ..protocol.rfc4512 import SchemaInfo, DsaInfo
from .tls import Tls
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL
from ..utils.conv import to_unicode
try:
from urllib.parse import unquote # Python 3
except ImportError:
from urllib import unquote # Python 2
try: # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme)
# noinspection PyUnresolvedReferences
from socket import AF_UNIX
unix_socket_available = True
except ImportError:
unix_socket_available = False
class Server(object):
"""
LDAP Server definition class
Allowed_referral_hosts can be None (default), or a list of tuples of
allowed servers ip address or names to contact while redirecting
search to referrals.
The second element of the tuple is a boolean to indicate if
authentication to that server is allowed; if False only anonymous
bind will be used.
Per RFC 4516. Use [('*', False)] to allow any host with anonymous
bind, use [('*', True)] to allow any host with same authentication of
Server.
"""
_message_counter = 0
_message_id_lock = Lock() # global lock for message_id shared by all Server objects
def __init__(self,
host,
port=None,
use_ssl=False,
allowed_referral_hosts=None,
get_info=SCHEMA,
tls=None,
formatter=None,
connect_timeout=None,
mode=IP_V6_PREFERRED,
validator=None):
self.ipc = False
url_given = False
host = host.strip()
if host.lower().startswith('ldap://'):
self.host = host[7:]
use_ssl = False
url_given = True
elif host.lower().startswith('ldaps://'):
self.host = host[8:]
use_ssl = True
url_given = True
elif host.lower().startswith('ldapi://') and unix_socket_available:
self.ipc = True
use_ssl = False
url_given = True
elif host.lower().startswith('ldapi://') and not unix_socket_available:
raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present')
else:
self.host = host
if self.ipc:
if str is bytes: # Python 2
self.host = unquote(host[7:]).decode('utf-8')
else: # Python 3
self.host = unquote(host[7:]) # encoding defaults to utf-8 in python3
self.port = None
elif ':' in self.host and self.host.count(':') == 1:
hostname, _, hostport = self.host.partition(':')
try:
port = int(hostport) or port
except ValueError:
if log_enabled(ERROR):
log(ERROR, 'port <%s> must be an integer', port)
raise LDAPInvalidPortError('port must be an integer')
self.host = hostname
elif url_given and self.host.startswith('['):
hostname, sep, hostport = self.host[1:].partition(']')
if sep != ']' or not self._is_ipv6(hostname):
if log_enabled(ERROR):
log(ERROR, 'invalid IPv6 server address for <%s>', self.host)
raise LDAPInvalidServerError()
if len(hostport):
if not hostport.startswith(':'):
if log_enabled(ERROR):
log(ERROR, 'invalid URL in server name for <%s>', self.host)
raise LDAPInvalidServerError('invalid URL in server name')
if not hostport[1:].isdecimal():
if log_enabled(ERROR):
log(ERROR, 'port must be an integer for <%s>', self.host)
raise LDAPInvalidPortError('port must be an integer')
port = int(hostport[1:])
self.host = hostname
elif not url_given and self._is_ipv6(self.host):
pass
elif self.host.count(':') > 1:
if log_enabled(ERROR):
log(ERROR, 'invalid server address for <%s>', self.host)
raise LDAPInvalidServerError()
if not self.ipc:
self.host.rstrip('/')
if not use_ssl and not port:
port = 389
elif use_ssl and not port:
port = 636
if isinstance(port, int):
if port in range(0, 65535):
self.port = port
else:
if log_enabled(ERROR):
log(ERROR, 'port <%s> must be in range from 0 to 65535', port)
raise LDAPInvalidPortError('port must in range from 0 to 65535')
else:
if log_enabled(ERROR):
log(ERROR, 'port <%s> must be an integer', port)
raise LDAPInvalidPortError('port must be an integer')
if allowed_referral_hosts is None: # defaults to any server with authentication
allowed_referral_hosts = [('*', True)]
if isinstance(allowed_referral_hosts, SEQUENCE_TYPES):
self.allowed_referral_hosts = []
for referral_host in allowed_referral_hosts:
if isinstance(referral_host, tuple):
if isinstance(referral_host[1], bool):
self.allowed_referral_hosts.append(referral_host)
elif isinstance(allowed_referral_hosts, tuple):
if isinstance(allowed_referral_hosts[1], bool):
self.allowed_referral_hosts = [allowed_referral_hosts]
else:
self.allowed_referral_hosts = []
self.ssl = True if use_ssl else False
if tls and not isinstance(tls, Tls):
if log_enabled(ERROR):
log(ERROR, 'invalid tls specification: <%s>', tls)
raise LDAPInvalidTlsSpecificationError('invalid Tls object')
self.tls = Tls() if self.ssl and not tls else tls
if not self.ipc:
if self._is_ipv6(self.host):
self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port)
else:
self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port)
else:
self.name = host
self.get_info = get_info
self._dsa_info = None
self._schema_info = None
self.dit_lock = Lock()
self.custom_formatter = formatter
self.custom_validator = validator
self._address_info = [] # property self.address_info resolved at open time (or when check_availability is called)
self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date ever
self.current_address = None
self.connect_timeout = connect_timeout
self.mode = mode
self.get_info_from_server(None) # load offline schema if needed
if log_enabled(BASIC):
log(BASIC, 'instantiated Server: <%r>', self)
@staticmethod
def _is_ipv6(host):
try:
socket.inet_pton(socket.AF_INET6, host)
except (socket.error, AttributeError, ValueError):
return False
return True
def __str__(self):
if self.host:
s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '')
else:
s = object.__str__(self)
return s
def __repr__(self):
r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self)
r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self)
r += '' if self.tls is None else ', tls={0.tls!r}'.format(self)
r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self)
r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self)
r += '' if not self.mode else ', mode={0.mode!r}'.format(self)
r += ')'
return r
@property
def address_info(self):
conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME')
if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval:
# converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time
addresses = None
try:
if self.ipc:
addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)]
else:
addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
except (socket.gaierror, AttributeError):
pass
if not addresses: # if addresses not found or raised an exception (for example for bad flags) tries again without flags
try:
addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP)
except socket.gaierror:
pass
if addresses:
self._address_info = [list(address) + [None, None] for address in addresses]
self._address_info_resolved_time = datetime.now()
else:
self._address_info = []
self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date
if log_enabled(BASIC):
for address in self._address_info:
log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2])
return self._address_info
def update_availability(self, address, available):
cont = 0
while cont < len(self._address_info):
if self.address_info[cont] == address:
self._address_info[cont][5] = True if available else False
self._address_info[cont][6] = datetime.now()
break
cont += 1
def reset_availability(self):
for address in self._address_info:
address[5] = None
address[6] = None
def check_availability(self):
"""
Tries to open, connect and close a socket to specified address
and port to check availability. Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in
the Server object
"""
conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT')
available = False
self.reset_availability()
for address in self.candidate_addresses():
available = True
try:
temp_socket = socket.socket(*address[:3])
if self.connect_timeout:
temp_socket.settimeout(self.connect_timeout)
else:
temp_socket.settimeout(conf_availability_timeout) # set timeout for checking availability to default
try:
temp_socket.connect(address[4])
except socket.error:
available = False
finally:
try:
temp_socket.shutdown(socket.SHUT_RDWR)
except socket.error:
available = False
finally:
temp_socket.close()
except socket.gaierror:
available = False
if available:
if log_enabled(BASIC):
log(BASIC, 'server <%s> available at <%r>', self, address)
self.update_availability(address, True)
break # if an available address is found exits immediately
else:
self.update_availability(address, False)
if log_enabled(ERROR):
log(ERROR, 'server <%s> not available at <%r>', self, address)
return available
@staticmethod
def next_message_id():
"""
LDAP messageId is unique for all connections to same server
"""
with Server._message_id_lock:
Server._message_counter += 1
if Server._message_counter >= LDAP_MAX_INT:
Server._message_counter = 1
if log_enabled(PROTOCOL):
log(PROTOCOL, 'new message id <%d> generated', Server._message_counter)
return Server._message_counter
def _get_dsa_info(self, connection):
"""
Retrieve DSE operational attribute as per RFC4512 (5.1).
"""
if connection.strategy.no_real_dsa: # do not try for mock strategies
return
if not connection.strategy.pooled: # in pooled strategies get_dsa_info is performed by the worker threads
result = connection.search(search_base='',
search_filter='(objectClass=*)',
search_scope=BASE,
attributes=['altServer', # requests specific dsa info attributes
'namingContexts',
'supportedControl',
'supportedExtension',
'supportedFeatures',
'supportedCapabilities',
'supportedLdapVersion',
'supportedSASLMechanisms',
'vendorName',
'vendorVersion',
'subschemaSubentry',
'*',
'+'], # requests all remaining attributes (other),
get_operational_attributes=True)
with self.dit_lock:
if isinstance(result, bool): # sync request
self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info
elif result: # asynchronous request, must check if attributes in response
results, _ = connection.get_response(result)
if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes'])
if log_enabled(BASIC):
log(BASIC, 'DSA info read for <%s> via <%s>', self, connection)
def _get_schema_info(self, connection, entry=''):
"""
Retrieve schema from subschemaSubentry DSE attribute, per RFC
4512 (4.4 and 5.1); entry = '' means DSE.
"""
if connection.strategy.no_real_dsa: # do not try for mock strategies
return
schema_entry = None
if self._dsa_info and entry == '': # subschemaSubentry already present in dsaInfo
if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES):
schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None
else:
schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None
else:
result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True)
if isinstance(result, bool): # sync request
if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']:
if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0:
schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0]
else: # asynchronous request, must check if subschemaSubentry in attributes
results, _ = connection.get_response(result)
if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']:
if len(results[0]['raw_attributes']['subschemaSubentry']) > 0:
schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0]
if schema_entry and not connection.strategy.pooled: # in pooled strategies get_schema_info is performed by the worker threads
if isinstance(schema_entry, bytes) and str is not bytes: # Python 3
schema_entry = to_unicode(schema_entry, from_server=True)
result = connection.search(schema_entry,
search_filter='(objectClass=subschema)',
search_scope=BASE,
attributes=['objectClasses', # requests specific subschema attributes
'attributeTypes',
'ldapSyntaxes',
'matchingRules',
'matchingRuleUse',
'dITContentRules',
'dITStructureRules',
'nameForms',
'createTimestamp',
'modifyTimestamp',
'*'], # requests all remaining attributes (other)
get_operational_attributes=True
)
with self.dit_lock:
self._schema_info = None
if result:
if isinstance(result, bool): # sync request
self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None
else: # asynchronous request, must check if attributes in response
results, result = connection.get_response(result)
if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes'])
if self._schema_info and not self._schema_info.is_valid(): # flaky servers can return an empty schema, checks if it is so and set schema to None
self._schema_info = None
if self._schema_info: # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info
for attribute in self._schema_info.other:
self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter)
if self._dsa_info: # try to apply formatter to the "other" dict with dsa info raw values
for attribute in self._dsa_info.other:
self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter)
if log_enabled(BASIC):
log(BASIC, 'schema read for <%s> via <%s>', self, connection)
def get_info_from_server(self, connection):
"""
reads info from DSE and from subschema
"""
if connection and not connection.closed:
if self.get_info in [DSA, ALL]:
self._get_dsa_info(connection)
if self.get_info in [SCHEMA, ALL]:
self._get_schema_info(connection)
elif self.get_info == OFFLINE_EDIR_8_8_8:
from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info
self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema))
self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info))
elif self.get_info == OFFLINE_AD_2012_R2:
from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info
self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema))
self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info))
elif self.get_info == OFFLINE_SLAPD_2_4:
from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info
self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema))
self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info))
elif self.get_info == OFFLINE_DS389_1_3_3:
from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info
self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema))
self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info))
def attach_dsa_info(self, dsa_info=None):
if isinstance(dsa_info, DsaInfo):
self._dsa_info = dsa_info
if log_enabled(BASIC):
log(BASIC, 'attached DSA info to Server <%s>', self)
def attach_schema_info(self, dsa_schema=None):
if isinstance(dsa_schema, SchemaInfo):
self._schema_info = dsa_schema
if log_enabled(BASIC):
log(BASIC, 'attached schema info to Server <%s>', self)
@property
def info(self):
return self._dsa_info
@property
def schema(self):
return self._schema_info
@staticmethod
def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None):
"""
Define a dummy server with preloaded schema and info
:param host: host name
:param dsa_info: DsaInfo preloaded object or a json formatted string or a file name
:param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name
:param port: dummy port
:param use_ssl: use_ssl
:param formatter: custom formatter
:return: Server object
"""
if isinstance(host, SEQUENCE_TYPES):
dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) # for ServerPool object
else:
dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)
if isinstance(dsa_info, DsaInfo):
dummy._dsa_info = dsa_info
elif isinstance(dsa_info, STRING_TYPES):
try:
dummy._dsa_info = DsaInfo.from_json(dsa_info) # tries to use dsa_info as a json configuration string
except Exception:
dummy._dsa_info = DsaInfo.from_file(dsa_info) # tries to use dsa_info as a file name
if not dummy.info:
if log_enabled(ERROR):
log(ERROR, 'invalid DSA info for %s', host)
raise LDAPDefinitionError('invalid dsa info')
if isinstance(dsa_schema, SchemaInfo):
dummy._schema_info = dsa_schema
elif isinstance(dsa_schema, STRING_TYPES):
try:
dummy._schema_info = SchemaInfo.from_json(dsa_schema)
except Exception:
dummy._schema_info = SchemaInfo.from_file(dsa_schema)
if not dummy.schema:
if log_enabled(ERROR):
log(ERROR, 'invalid schema info for %s', host)
raise LDAPDefinitionError('invalid schema info')
if log_enabled(BASIC):
log(BASIC, 'created server <%s> from definition', dummy)
return dummy
def candidate_addresses(self):
conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT')
if self.ipc:
candidates = self.address_info
if log_enabled(BASIC):
log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name)
else:
# checks reset availability timeout
for address in self.address_info:
if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout):
address[5] = None
address[6] = None
# selects server address based on server mode and availability (in address[5])
addresses = self.address_info[:] # copy to avoid refreshing while searching candidates
candidates = []
if addresses:
if self.mode == IP_SYSTEM_DEFAULT:
candidates.append(addresses[0])
elif self.mode == IP_V4_ONLY:
candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
elif self.mode == IP_V6_ONLY:
candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
elif self.mode == IP_V4_PREFERRED:
candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
elif self.mode == IP_V6_PREFERRED:
candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
else:
if log_enabled(ERROR):
log(ERROR, 'invalid server mode for <%s>', self)
raise LDAPInvalidServerError('invalid server mode')
if log_enabled(BASIC):
for candidate in candidates:
log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode)
return candidates

View File

@ -0,0 +1,56 @@
"""
"""
# Created on 2015.01.07
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from datetime import timedelta, tzinfo
# from python standard library docs
class OffsetTzInfo(tzinfo):
"""Fixed offset in minutes east from UTC"""
def __init__(self, offset, name):
self.offset = offset
self.name = name
self._offset = timedelta(minutes=offset)
def __str__(self):
return self.name
def __repr__(self):
return 'OffsetTzInfo(offset={0.offset!r}, name={0.name!r})'.format(self)
def utcoffset(self, dt):
return self._offset
def tzname(self, dt):
return self.name
# noinspection PyMethodMayBeStatic
def dst(self, dt):
return timedelta(0)
def __getinitargs__(self): # for pickling/unpickling
return self.offset, self.name

View File

@ -0,0 +1,321 @@
"""
"""
# Created on 2013.08.05
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory
from .. import SEQUENCE_TYPES
from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
try:
# noinspection PyUnresolvedReferences
import ssl
except ImportError:
if log_enabled(ERROR):
log(ERROR, 'SSL not supported in this Python interpreter')
raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter')
try:
from ssl import match_hostname, CertificateError # backport for python2 missing ssl functionalities
except ImportError:
from ..utils.tls_backport import CertificateError
from ..utils.tls_backport import match_hostname
if log_enabled(BASIC):
log(BASIC, 'using tls_backport')
try: # try to use SSLContext
# noinspection PyUnresolvedReferences
from ssl import create_default_context, Purpose # defined in Python 3.4 and Python 2.7.9
use_ssl_context = True
except ImportError:
use_ssl_context = False
if log_enabled(BASIC):
log(BASIC, 'SSLContext unavailable')
from os import path
# noinspection PyProtectedMember
class Tls(object):
"""
tls/ssl configuration for Server object
Starting from python 2.7.9 and python 3.4 uses the SSLContext object
that tries to read the CAs defined at system level
ca_certs_path and ca_certs_data are valid only when using SSLContext
local_private_key_password is valid only when using SSLContext
sni is the server name for Server Name Indication (when available)
"""
def __init__(self,
local_private_key_file=None,
local_certificate_file=None,
validate=ssl.CERT_NONE,
version=None,
ca_certs_file=None,
valid_names=None,
ca_certs_path=None,
ca_certs_data=None,
local_private_key_password=None,
ciphers=None,
sni=None):
if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]:
self.validate = validate
elif validate:
if log_enabled(ERROR):
log(ERROR, 'invalid validate parameter <%s>', validate)
raise LDAPSSLConfigurationError('invalid validate parameter')
if ca_certs_file and path.exists(ca_certs_file):
self.ca_certs_file = ca_certs_file
elif ca_certs_file:
if log_enabled(ERROR):
log(ERROR, 'invalid CA public key file <%s>', ca_certs_file)
raise LDAPSSLConfigurationError('invalid CA public key file')
else:
self.ca_certs_file = None
if ca_certs_path and use_ssl_context and path.exists(ca_certs_path):
self.ca_certs_path = ca_certs_path
elif ca_certs_path and not use_ssl_context:
if log_enabled(ERROR):
log(ERROR, 'cannot use CA public keys path, SSLContext not available')
raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available')
elif ca_certs_path:
if log_enabled(ERROR):
log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path)
raise LDAPSSLConfigurationError('invalid CA public keys path')
else:
self.ca_certs_path = None
if ca_certs_data and use_ssl_context:
self.ca_certs_data = ca_certs_data
elif ca_certs_data:
if log_enabled(ERROR):
log(ERROR, 'cannot use CA data, SSLContext not available')
raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available')
else:
self.ca_certs_data = None
if local_private_key_password and use_ssl_context:
self.private_key_password = local_private_key_password
elif local_private_key_password:
if log_enabled(ERROR):
log(ERROR, 'cannot use local private key password, SSLContext not available')
raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available')
else:
self.private_key_password = None
self.version = version
self.private_key_file = local_private_key_file
self.certificate_file = local_certificate_file
self.valid_names = valid_names
self.ciphers = ciphers
self.sni = sni
if log_enabled(BASIC):
log(BASIC, 'instantiated Tls: <%r>' % self)
def __str__(self):
s = [
'protocol: ' + str(self.version),
'client private key: ' + ('present ' if self.private_key_file else 'not present'),
'client certificate: ' + ('present ' if self.certificate_file else 'not present'),
'private key password: ' + ('present ' if self.private_key_password else 'not present'),
'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'),
'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'),
'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'),
'verify mode: ' + str(self.validate),
'valid names: ' + str(self.valid_names),
'ciphers: ' + str(self.ciphers),
'sni: ' + str(self.sni)
]
return ' - '.join(s)
def __repr__(self):
r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self)
r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self)
r += '' if self.validate is None else ', validate={0.validate!r}'.format(self)
r += '' if self.version is None else ', version={0.version!r}'.format(self)
r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self)
r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self)
r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self)
r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self)
r += '' if self.sni is None else ', sni={0.sni!r}'.format(self)
r = 'Tls(' + r[2:] + ')'
return r
def wrap_socket(self, connection, do_handshake=False):
"""
Adds TLS to the connection socket
"""
if use_ssl_context:
if self.version is None: # uses the default ssl context for reasonable security
ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH,
cafile=self.ca_certs_file,
capath=self.ca_certs_path,
cadata=self.ca_certs_data)
else: # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version
ssl_context = ssl.SSLContext(self.version)
if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data:
ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data)
elif self.validate != ssl.CERT_NONE:
ssl_context.load_default_certs(Purpose.SERVER_AUTH)
if self.certificate_file:
ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password)
ssl_context.check_hostname = False
ssl_context.verify_mode = self.validate
if self.ciphers:
try:
ssl_context.set_ciphers(self.ciphers)
except ssl.SSLError:
pass
if self.sni:
wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni)
else:
wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake)
if log_enabled(NETWORK):
log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection)
else:
if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'):
self.version = ssl.PROTOCOL_SSLv23
if self.ciphers:
try:
wrapped_socket = ssl.wrap_socket(connection.socket,
keyfile=self.private_key_file,
certfile=self.certificate_file,
server_side=False,
cert_reqs=self.validate,
ssl_version=self.version,
ca_certs=self.ca_certs_file,
do_handshake_on_connect=do_handshake,
ciphers=self.ciphers)
except ssl.SSLError:
raise
except TypeError: # in python2.6 no ciphers argument is present, failback to self.ciphers=None
self.ciphers = None
if not self.ciphers:
wrapped_socket = ssl.wrap_socket(connection.socket,
keyfile=self.private_key_file,
certfile=self.certificate_file,
server_side=False,
cert_reqs=self.validate,
ssl_version=self.version,
ca_certs=self.ca_certs_file,
do_handshake_on_connect=do_handshake)
if log_enabled(NETWORK):
log(NETWORK, 'socket wrapped with SSL for <%s>', connection)
if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL):
check_hostname(wrapped_socket, connection.server.host, self.valid_names)
connection.socket = wrapped_socket
return
def start_tls(self, connection):
if connection.server.ssl: # ssl already established at server level
return False
if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress:
# Per RFC 4513 (3.1.1)
if log_enabled(ERROR):
log(ERROR, "can't start tls because operations are in progress for <%s>", self)
return False
connection.starting_tls = True
if log_enabled(BASIC):
log(BASIC, 'starting tls for <%s>', connection)
if not connection.strategy.sync:
connection._awaiting_for_async_start_tls = True # some flaky servers (OpenLDAP) doesn't return the extended response name in response
result = connection.extended('1.3.6.1.4.1.1466.20037')
if not connection.strategy.sync:
# asynchronous - _start_tls must be executed by the strategy
response = connection.get_response(result)
if response != (None, None):
if log_enabled(BASIC):
log(BASIC, 'tls started for <%s>', connection)
return True
else:
if log_enabled(BASIC):
log(BASIC, 'tls not started for <%s>', connection)
return False
else:
if connection.result['description'] not in ['success']:
# startTLS failed
connection.last_error = 'startTLS failed - ' + str(connection.result['description'])
if log_enabled(ERROR):
log(ERROR, '%s for <%s>', connection.last_error, connection)
raise LDAPStartTLSError(connection.last_error)
if log_enabled(BASIC):
log(BASIC, 'tls started for <%s>', connection)
return self._start_tls(connection)
def _start_tls(self, connection):
try:
self.wrap_socket(connection, do_handshake=True)
except Exception as e:
connection.last_error = 'wrap socket error: ' + str(e)
if log_enabled(ERROR):
log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection)
raise start_tls_exception_factory(LDAPStartTLSError, e)(connection.last_error)
finally:
connection.starting_tls = False
if connection.usage:
connection._usage.wrapped_sockets += 1
connection.tls_started = True
return True
def check_hostname(sock, server_name, additional_names):
server_certificate = sock.getpeercert()
if log_enabled(NETWORK):
log(NETWORK, 'certificate found for %s: %s', sock, server_certificate)
if additional_names:
host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names])
else:
host_names = [server_name]
for host_name in host_names:
if not host_name:
continue
elif host_name == '*':
if log_enabled(NETWORK):
log(NETWORK, 'certificate matches * wildcard')
return # valid
try:
match_hostname(server_certificate, host_name) # raise CertificateError if certificate doesn't match server name
if log_enabled(NETWORK):
log(NETWORK, 'certificate matches host name <%s>', host_name)
return # valid
except CertificateError as e:
if log_enabled(NETWORK):
log(NETWORK, str(e))
if log_enabled(ERROR):
log(ERROR, "hostname doesn't match certificate")
raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names)))

View File

@ -0,0 +1,229 @@
"""
"""
# Created on 2014.03.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, timedelta
from os import linesep
from .exceptions import LDAPMetricsError
from ..utils.log import log, log_enabled, ERROR, BASIC
class ConnectionUsage(object):
"""
Collect statistics on connection usage
"""
def reset(self):
self.open_sockets = 0
self.closed_sockets = 0
self.wrapped_sockets = 0
self.bytes_transmitted = 0
self.bytes_received = 0
self.messages_transmitted = 0
self.messages_received = 0
self.operations = 0
self.abandon_operations = 0
self.add_operations = 0
self.bind_operations = 0
self.compare_operations = 0
self.delete_operations = 0
self.extended_operations = 0
self.modify_operations = 0
self.modify_dn_operations = 0
self.search_operations = 0
self.unbind_operations = 0
self.referrals_received = 0
self.referrals_followed = 0
self.referrals_connections = 0
self.restartable_failures = 0
self.restartable_successes = 0
self.servers_from_pool = 0
if log_enabled(BASIC):
log(BASIC, 'reset usage metrics')
def __init__(self):
self.initial_connection_start_time = None
self.open_socket_start_time = None
self.connection_stop_time = None
self.last_transmitted_time = None
self.last_received_time = None
self.open_sockets = 0
self.closed_sockets = 0
self.wrapped_sockets = 0
self.bytes_transmitted = 0
self.bytes_received = 0
self.messages_transmitted = 0
self.messages_received = 0
self.operations = 0
self.abandon_operations = 0
self.add_operations = 0
self.bind_operations = 0
self.compare_operations = 0
self.delete_operations = 0
self.extended_operations = 0
self.modify_operations = 0
self.modify_dn_operations = 0
self.search_operations = 0
self.unbind_operations = 0
self.referrals_received = 0
self.referrals_followed = 0
self.referrals_connections = 0
self.restartable_failures = 0
self.restartable_successes = 0
self.servers_from_pool = 0
if log_enabled(BASIC):
log(BASIC, 'instantiated Usage object')
def __repr__(self):
r = 'Connection Usage:' + linesep
r += ' Time: [elapsed: ' + str(self.elapsed_time) + ']' + linesep
r += ' Initial start time: ' + (str(self.initial_connection_start_time.isoformat()) if self.initial_connection_start_time else '') + linesep
r += ' Open socket time: ' + (str(self.open_socket_start_time.isoformat()) if self.open_socket_start_time else '') + linesep
r += ' Last transmitted time: ' + (str(self.last_transmitted_time.isoformat()) if self.last_transmitted_time else '') + linesep
r += ' Last received time: ' + (str(self.last_received_time.isoformat()) if self.last_received_time else '') + linesep
r += ' Close socket time: ' + (str(self.connection_stop_time.isoformat()) if self.connection_stop_time else '') + linesep
r += ' Server:' + linesep
r += ' Servers from pool: ' + str(self.servers_from_pool) + linesep
r += ' Sockets open: ' + str(self.open_sockets) + linesep
r += ' Sockets closed: ' + str(self.closed_sockets) + linesep
r += ' Sockets wrapped: ' + str(self.wrapped_sockets) + linesep
r += ' Bytes: ' + str(self.bytes_transmitted + self.bytes_received) + linesep
r += ' Transmitted: ' + str(self.bytes_transmitted) + linesep
r += ' Received: ' + str(self.bytes_received) + linesep
r += ' Messages: ' + str(self.messages_transmitted + self.messages_received) + linesep
r += ' Transmitted: ' + str(self.messages_transmitted) + linesep
r += ' Received: ' + str(self.messages_received) + linesep
r += ' Operations: ' + str(self.operations) + linesep
r += ' Abandon: ' + str(self.abandon_operations) + linesep
r += ' Bind: ' + str(self.bind_operations) + linesep
r += ' Add: ' + str(self.add_operations) + linesep
r += ' Compare: ' + str(self.compare_operations) + linesep
r += ' Delete: ' + str(self.delete_operations) + linesep
r += ' Extended: ' + str(self.extended_operations) + linesep
r += ' Modify: ' + str(self.modify_operations) + linesep
r += ' ModifyDn: ' + str(self.modify_dn_operations) + linesep
r += ' Search: ' + str(self.search_operations) + linesep
r += ' Unbind: ' + str(self.unbind_operations) + linesep
r += ' Referrals: ' + linesep
r += ' Received: ' + str(self.referrals_received) + linesep
r += ' Followed: ' + str(self.referrals_followed) + linesep
r += ' Connections: ' + str(self.referrals_connections) + linesep
r += ' Restartable tries: ' + str(self.restartable_failures + self.restartable_successes) + linesep
r += ' Failed restarts: ' + str(self.restartable_failures) + linesep
r += ' Successful restarts: ' + str(self.restartable_successes) + linesep
return r
def __str__(self):
return self.__repr__()
def __iadd__(self, other):
if not isinstance(other, ConnectionUsage):
raise LDAPMetricsError('unable to add to ConnectionUsage')
self.open_sockets += other.open_sockets
self.closed_sockets += other.closed_sockets
self.wrapped_sockets += other.wrapped_sockets
self.bytes_transmitted += other.bytes_transmitted
self.bytes_received += other.bytes_received
self.messages_transmitted += other.messages_transmitted
self.messages_received += other.messages_received
self.operations += other.operations
self.abandon_operations += other.abandon_operations
self.add_operations += other.add_operations
self.bind_operations += other.bind_operations
self.compare_operations += other.compare_operations
self.delete_operations += other.delete_operations
self.extended_operations += other.extended_operations
self.modify_operations += other.modify_operations
self.modify_dn_operations += other.modify_dn_operations
self.search_operations += other.search_operations
self.unbind_operations += other.unbind_operations
self.referrals_received += other.referrals_received
self.referrals_followed += other.referrals_followed
self.referrals_connections += other.referrals_connections
self.restartable_failures += other.restartable_failures
self.restartable_successes += other.restartable_successes
self.servers_from_pool += other.servers_from_pool
return self
def update_transmitted_message(self, message, length):
self.last_transmitted_time = datetime.now()
self.bytes_transmitted += length
self.operations += 1
self.messages_transmitted += 1
if message['type'] == 'abandonRequest':
self.abandon_operations += 1
elif message['type'] == 'addRequest':
self.add_operations += 1
elif message['type'] == 'bindRequest':
self.bind_operations += 1
elif message['type'] == 'compareRequest':
self.compare_operations += 1
elif message['type'] == 'delRequest':
self.delete_operations += 1
elif message['type'] == 'extendedReq':
self.extended_operations += 1
elif message['type'] == 'modifyRequest':
self.modify_operations += 1
elif message['type'] == 'modDNRequest':
self.modify_dn_operations += 1
elif message['type'] == 'searchRequest':
self.search_operations += 1
elif message['type'] == 'unbindRequest':
self.unbind_operations += 1
else:
if log_enabled(ERROR):
log(ERROR, 'unable to collect usage for unknown message type <%s>', message['type'])
raise LDAPMetricsError('unable to collect usage for unknown message type')
def update_received_message(self, length):
self.last_received_time = datetime.now()
self.bytes_received += length
self.messages_received += 1
def start(self, reset=True):
if reset:
self.reset()
self.open_socket_start_time = datetime.now()
self.connection_stop_time = None
if not self.initial_connection_start_time:
self.initial_connection_start_time = self.open_socket_start_time
if log_enabled(BASIC):
log(BASIC, 'start collecting usage metrics')
def stop(self):
if self.open_socket_start_time:
self.connection_stop_time = datetime.now()
if log_enabled(BASIC):
log(BASIC, 'stop collecting usage metrics')
@property
def elapsed_time(self):
if self.connection_stop_time:
return self.connection_stop_time - self.open_socket_start_time
else:
return (datetime.now() - self.open_socket_start_time) if self.open_socket_start_time else timedelta(0)

View File

@ -0,0 +1,289 @@
"""
"""
# Created on 2014.04.28
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from os import linesep
from .. import SUBTREE, DEREF_ALWAYS, ALL_ATTRIBUTES, DEREF_NEVER
from .microsoft.dirSync import DirSync
from .microsoft.modifyPassword import ad_modify_password
from .microsoft.unlockAccount import ad_unlock_account
from .microsoft.addMembersToGroups import ad_add_members_to_groups
from .microsoft.removeMembersFromGroups import ad_remove_members_from_groups
from .novell.partition_entry_count import PartitionEntryCount
from .novell.replicaInfo import ReplicaInfo
from .novell.listReplicas import ListReplicas
from .novell.getBindDn import GetBindDn
from .novell.nmasGetUniversalPassword import NmasGetUniversalPassword
from .novell.nmasSetUniversalPassword import NmasSetUniversalPassword
from .novell.startTransaction import StartTransaction
from .novell.endTransaction import EndTransaction
from .novell.addMembersToGroups import edir_add_members_to_groups
from .novell.removeMembersFromGroups import edir_remove_members_from_groups
from .novell.checkGroupsMemberships import edir_check_groups_memberships
from .standard.whoAmI import WhoAmI
from .standard.modifyPassword import ModifyPassword
from .standard.PagedSearch import paged_search_generator, paged_search_accumulator
from .standard.PersistentSearch import PersistentSearch
class ExtendedOperationContainer(object):
def __init__(self, connection):
self._connection = connection
def __repr__(self):
return linesep.join([' ' + element for element in dir(self) if element[0] != '_'])
def __str__(self):
return self.__repr__()
class StandardExtendedOperations(ExtendedOperationContainer):
def who_am_i(self, controls=None):
return WhoAmI(self._connection,
controls).send()
def modify_password(self,
user=None,
old_password=None,
new_password=None,
hash_algorithm=None,
salt=None,
controls=None):
return ModifyPassword(self._connection,
user,
old_password,
new_password,
hash_algorithm,
salt,
controls).send()
def paged_search(self,
search_base,
search_filter,
search_scope=SUBTREE,
dereference_aliases=DEREF_ALWAYS,
attributes=None,
size_limit=0,
time_limit=0,
types_only=False,
get_operational_attributes=False,
controls=None,
paged_size=100,
paged_criticality=False,
generator=True):
if generator:
return paged_search_generator(self._connection,
search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
types_only,
get_operational_attributes,
controls,
paged_size,
paged_criticality)
else:
return paged_search_accumulator(self._connection,
search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
types_only,
get_operational_attributes,
controls,
paged_size,
paged_criticality)
def persistent_search(self,
search_base='',
search_filter='(objectclass=*)',
search_scope=SUBTREE,
dereference_aliases=DEREF_NEVER,
attributes=ALL_ATTRIBUTES,
size_limit=0,
time_limit=0,
controls=None,
changes_only=True,
show_additions=True,
show_deletions=True,
show_modifications=True,
show_dn_modifications=True,
notifications=True,
streaming=True,
callback=None
):
events_type = 0
if show_additions:
events_type += 1
if show_deletions:
events_type += 2
if show_modifications:
events_type += 4
if show_dn_modifications:
events_type += 8
if callback:
streaming = False
return PersistentSearch(self._connection,
search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
controls,
changes_only,
events_type,
notifications,
streaming,
callback)
class NovellExtendedOperations(ExtendedOperationContainer):
def get_bind_dn(self, controls=None):
return GetBindDn(self._connection,
controls).send()
def get_universal_password(self, user, controls=None):
return NmasGetUniversalPassword(self._connection,
user,
controls).send()
def set_universal_password(self, user, new_password=None, controls=None):
return NmasSetUniversalPassword(self._connection,
user,
new_password,
controls).send()
def list_replicas(self, server_dn, controls=None):
return ListReplicas(self._connection,
server_dn,
controls).send()
def partition_entry_count(self, partition_dn, controls=None):
return PartitionEntryCount(self._connection,
partition_dn,
controls).send()
def replica_info(self, server_dn, partition_dn, controls=None):
return ReplicaInfo(self._connection,
server_dn,
partition_dn,
controls).send()
def start_transaction(self, controls=None):
return StartTransaction(self._connection,
controls).send()
def end_transaction(self, commit=True, controls=None): # attach the groupingControl to commit, None to abort transaction
return EndTransaction(self._connection,
commit,
controls).send()
def add_members_to_groups(self, members, groups, fix=True, transaction=True):
return edir_add_members_to_groups(self._connection,
members_dn=members,
groups_dn=groups,
fix=fix,
transaction=transaction)
def remove_members_from_groups(self, members, groups, fix=True, transaction=True):
return edir_remove_members_from_groups(self._connection,
members_dn=members,
groups_dn=groups,
fix=fix,
transaction=transaction)
def check_groups_memberships(self, members, groups, fix=False, transaction=True):
return edir_check_groups_memberships(self._connection,
members_dn=members,
groups_dn=groups,
fix=fix,
transaction=transaction)
class MicrosoftExtendedOperations(ExtendedOperationContainer):
def dir_sync(self,
sync_base,
sync_filter='(objectclass=*)',
attributes=ALL_ATTRIBUTES,
cookie=None,
object_security=False,
ancestors_first=True,
public_data_only=False,
incremental_values=True,
max_length=2147483647,
hex_guid=False):
return DirSync(self._connection,
sync_base=sync_base,
sync_filter=sync_filter,
attributes=attributes,
cookie=cookie,
object_security=object_security,
ancestors_first=ancestors_first,
public_data_only=public_data_only,
incremental_values=incremental_values,
max_length=max_length,
hex_guid=hex_guid)
def modify_password(self, user, new_password, old_password=None, controls=None):
return ad_modify_password(self._connection,
user,
new_password,
old_password,
controls)
def unlock_account(self, user):
return ad_unlock_account(self._connection,
user)
def add_members_to_groups(self, members, groups, fix=True):
return ad_add_members_to_groups(self._connection,
members_dn=members,
groups_dn=groups,
fix=fix)
def remove_members_from_groups(self, members, groups, fix=True):
return ad_remove_members_from_groups(self._connection,
members_dn=members,
groups_dn=groups,
fix=fix)
class ExtendedOperationsRoot(ExtendedOperationContainer):
def __init__(self, connection):
ExtendedOperationContainer.__init__(self, connection) # calls super
self.standard = StandardExtendedOperations(self._connection)
self.novell = NovellExtendedOperations(self._connection)
self.microsoft = MicrosoftExtendedOperations(self._connection)

View File

@ -0,0 +1,81 @@
"""
"""
# Created on 2016.12.26
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...core.exceptions import LDAPInvalidDnError
from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER
def ad_add_members_to_groups(connection,
members_dn,
groups_dn,
fix=True):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to add to groups
:param groups_dn: the list of groups where members are to be added
:param fix: checks for group existence and already assigned members
:return: a boolean where True means that the operation was successful and False means an error has happened
Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups.
Raises LDAPInvalidDnError if members or groups are not found in the DIT.
"""
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
error = False
for group in groups_dn:
if fix: # checks for existance of group and for already assigned members
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(group + ' not found')
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
existing_members = [element.lower() for element in existing_members]
else:
existing_members = []
changes = dict()
member_to_add = [element for element in members_dn if element.lower() not in existing_members]
if member_to_add:
changes['member'] = (MODIFY_ADD, member_to_add)
if changes:
result = connection.modify(group, changes)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
if result['description'] != 'success':
error = True
break
return not error # returns True if no error is raised in the LDAP operations

View File

@ -0,0 +1,91 @@
"""
"""
# Created on 2015.10.21
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...core.exceptions import LDAPExtensionError
from ...protocol.microsoft import dir_sync_control, extended_dn_control, show_deleted_control
from ... import SUBTREE, DEREF_NEVER
from ...utils.dn import safe_dn
class DirSync(object):
def __init__(self,
connection,
sync_base,
sync_filter,
attributes,
cookie,
object_security,
ancestors_first,
public_data_only,
incremental_values,
max_length,
hex_guid
):
self.connection = connection
if self.connection.check_names and sync_base:
self. base = safe_dn(sync_base)
else:
self.base = sync_base
self.filter = sync_filter
self.attributes = attributes
self.cookie = cookie
self.object_security = object_security
self.ancestors_first = ancestors_first
self.public_data_only = public_data_only
self.incremental_values = incremental_values
self.max_length = max_length
self.hex_guid = hex_guid
self.more_results = True
def loop(self):
result = self.connection.search(search_base=self.base,
search_filter=self.filter,
search_scope=SUBTREE,
attributes=self.attributes,
dereference_aliases=DEREF_NEVER,
controls=[dir_sync_control(criticality=True,
object_security=self.object_security,
ancestors_first=self.ancestors_first,
public_data_only=self.public_data_only,
incremental_values=self.incremental_values,
max_length=self.max_length, cookie=self.cookie),
extended_dn_control(criticality=False, hex_format=self.hex_guid),
show_deleted_control(criticality=False)]
)
if not self.connection.strategy.sync:
response, result = self.connection.get_response(result)
else:
response = self.connection.response
result = self.connection.result
if result['description'] == 'success' and 'controls' in result and '1.2.840.113556.1.4.841' in result['controls']:
self.more_results = result['controls']['1.2.840.113556.1.4.841']['value']['more_results']
self.cookie = result['controls']['1.2.840.113556.1.4.841']['value']['cookie']
return response
elif 'controls' in result:
raise LDAPExtensionError('Missing DirSync control in response from server')
else:
raise LDAPExtensionError('error %r in DirSync' % result)

View File

@ -0,0 +1,72 @@
"""
"""
# Created on 2015.11.27
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ... import MODIFY_REPLACE, MODIFY_DELETE, MODIFY_ADD
from ...utils.log import log, log_enabled, PROTOCOL
from ...core.results import RESULT_SUCCESS
from ...utils.dn import safe_dn
from ...utils.conv import to_unicode
def ad_modify_password(connection, user_dn, new_password, old_password, controls=None):
# old password must be None to reset password with sufficient privileges
if connection.check_names:
user_dn = safe_dn(user_dn)
if str is bytes: # python2, converts to unicode
new_password = to_unicode(new_password)
if old_password:
old_password = to_unicode(old_password)
encoded_new_password = ('"%s"' % new_password).encode('utf-16-le')
if old_password: # normal users must specify old and new password
encoded_old_password = ('"%s"' % old_password).encode('utf-16-le')
result = connection.modify(user_dn,
{'unicodePwd': [(MODIFY_DELETE, [encoded_old_password]),
(MODIFY_ADD, [encoded_new_password])]},
controls)
else: # admin users can reset password without sending the old one
result = connection.modify(user_dn,
{'unicodePwd': [(MODIFY_REPLACE, [encoded_new_password])]},
controls)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
# change successful, returns True
if result['result'] == RESULT_SUCCESS:
return True
# change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result']
if connection.raise_exceptions:
from ...core.exceptions import LDAPOperationResult
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, connection)
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
return False

View File

@ -0,0 +1,93 @@
"""
"""
# Created on 2016.12.26
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...core.exceptions import LDAPInvalidDnError
from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER
from ...utils.dn import safe_dn
def ad_remove_members_from_groups(connection,
members_dn,
groups_dn,
fix):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to remove from groups
:param groups_dn: the list of groups where members are to be removed
:param fix: checks for group existence and existing members
:return: a boolean where True means that the operation was successful and False means an error has happened
Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute
"""
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
if connection.check_names: # builds new lists with sanitized dn
safe_members_dn = []
safe_groups_dn = []
for member_dn in members_dn:
safe_members_dn.append(safe_dn(member_dn))
for group_dn in groups_dn:
safe_groups_dn.append(safe_dn(group_dn))
members_dn = safe_members_dn
groups_dn = safe_groups_dn
error = False
for group in groups_dn:
if fix: # checks for existance of group and for already assigned members
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(group + ' not found')
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
else:
existing_members = members_dn
existing_members = [element.lower() for element in existing_members]
changes = dict()
member_to_remove = [element for element in members_dn if element.lower() in existing_members]
if member_to_remove:
changes['member'] = (MODIFY_DELETE, member_to_remove)
if changes:
result = connection.modify(group, changes)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
if result['description'] != 'success':
error = True
break
return not error

View File

@ -0,0 +1,56 @@
"""
"""
# Created on 2016.11.01
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ... import MODIFY_REPLACE
from ...utils.log import log, log_enabled, PROTOCOL
from ...core.results import RESULT_SUCCESS
from ...utils.dn import safe_dn
def ad_unlock_account(connection, user_dn, controls=None):
if connection.check_names:
user_dn = safe_dn(user_dn)
result = connection.modify(user_dn,
{'lockoutTime': [(MODIFY_REPLACE, ['0'])]},
controls)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
# change successful, returns True
if result['result'] == RESULT_SUCCESS:
return True
# change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result']
if connection.raise_exceptions:
from ...core.exceptions import LDAPOperationResult
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, connection)
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
return result

View File

@ -0,0 +1,153 @@
"""
"""
# Created on 2016.04.16
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...core.exceptions import LDAPInvalidDnError
from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER
from ...utils.dn import safe_dn
def edir_add_members_to_groups(connection,
members_dn,
groups_dn,
fix,
transaction):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to add to groups
:param groups_dn: the list of groups where members are to be added
:param fix: checks for inconsistences in the users-groups relation and fixes them
:param transaction: activates an LDAP transaction
:return: a boolean where True means that the operation was successful and False means an error has happened
Establishes users-groups relations following the eDirectory rules: groups are added to securityEquals and groupMembership
attributes in the member object while members are added to member and equivalentToMe attributes in the group object.
Raises LDAPInvalidDnError if members or groups are not found in the DIT.
"""
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
transaction_control = None
error = False
if connection.check_names: # builds new lists with sanitized dn
safe_members_dn = []
safe_groups_dn = []
for member_dn in members_dn:
safe_members_dn.append(safe_dn(member_dn))
for group_dn in groups_dn:
safe_groups_dn.append(safe_dn(group_dn))
members_dn = safe_members_dn
groups_dn = safe_groups_dn
if transaction:
transaction_control = connection.extend.novell.start_transaction()
if not error:
for member in members_dn:
if fix: # checks for existance of member and for already assigned groups
result = connection.search(member, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['securityEquals', 'groupMembership'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(member + ' not found')
existing_security_equals = response[0]['attributes']['securityEquals'] if 'securityEquals' in response[0]['attributes'] else []
existing_group_membership = response[0]['attributes']['groupMembership'] if 'groupMembership' in response[0]['attributes'] else []
existing_security_equals = [element.lower() for element in existing_security_equals]
existing_group_membership = [element.lower() for element in existing_group_membership]
else:
existing_security_equals = []
existing_group_membership = []
changes = dict()
security_equals_to_add = [element for element in groups_dn if element.lower() not in existing_security_equals]
group_membership_to_add = [element for element in groups_dn if element.lower() not in existing_group_membership]
if security_equals_to_add:
changes['securityEquals'] = (MODIFY_ADD, security_equals_to_add)
if group_membership_to_add:
changes['groupMembership'] = (MODIFY_ADD, group_membership_to_add)
if changes:
result = connection.modify(member, changes, controls=[transaction_control] if transaction else None)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
if result['description'] != 'success':
error = True
break
if not error:
for group in groups_dn:
if fix: # checks for existance of group and for already assigned members
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member', 'equivalentToMe'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(group + ' not found')
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
existing_equivalent_to_me = response[0]['attributes']['equivalentToMe'] if 'equivalentToMe' in response[0]['attributes'] else []
existing_members = [element.lower() for element in existing_members]
existing_equivalent_to_me = [element.lower() for element in existing_equivalent_to_me]
else:
existing_members = []
existing_equivalent_to_me = []
changes = dict()
member_to_add = [element for element in members_dn if element.lower() not in existing_members]
equivalent_to_me_to_add = [element for element in members_dn if element.lower() not in existing_equivalent_to_me]
if member_to_add:
changes['member'] = (MODIFY_ADD, member_to_add)
if equivalent_to_me_to_add:
changes['equivalentToMe'] = (MODIFY_ADD, equivalent_to_me_to_add)
if changes:
result = connection.modify(group, changes, controls=[transaction_control] if transaction else None)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
if result['description'] != 'success':
error = True
break
if transaction:
if error: # aborts transaction in case of error in the modify operations
result = connection.extend.novell.end_transaction(commit=False, controls=[transaction_control])
else:
result = connection.extend.novell.end_transaction(commit=True, controls=[transaction_control])
if result['description'] != 'success':
error = True
return not error # returns True if no error is raised in the LDAP operations

View File

@ -0,0 +1,172 @@
"""
"""
# Created on 2016.05.14
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .addMembersToGroups import edir_add_members_to_groups
from ...core.exceptions import LDAPInvalidDnError
from ... import SEQUENCE_TYPES, BASE, DEREF_NEVER
from ...utils.dn import safe_dn
def _check_members_have_memberships(connection,
members_dn,
groups_dn):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to add to groups
:param groups_dn: the list of groups where members are to be added
:return: two booleans. The first when True means that all members have membership in all groups, The second when True means that
there are inconsistences in the securityEquals attribute
Checks user's group membership.
Raises LDAPInvalidDNError if member is not found in the DIT.
"""
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
partial = False # True when a member has groupMembership but doesn't have securityEquals
for member in members_dn:
result = connection.search(member, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['groupMembership', 'securityEquals'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success': # member not found in DIT
raise LDAPInvalidDnError(member + ' not found')
existing_security_equals = response[0]['attributes']['securityEquals'] if 'securityEquals' in response[0]['attributes'] else []
existing_group_membership = response[0]['attributes']['groupMembership'] if 'groupMembership' in response[0]['attributes'] else []
existing_security_equals = [element.lower() for element in existing_security_equals]
existing_group_membership = [element.lower() for element in existing_group_membership]
for group in groups_dn:
if group.lower() not in existing_group_membership:
return False, False
if group.lower() not in existing_security_equals:
partial = True
return True, partial
def _check_groups_contain_members(connection,
groups_dn,
members_dn):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to add to groups
:param groups_dn: the list of groups where members are to be added
:return: two booleans. The first when True means that all members have membership in all groups, The second when True means that
there are inconsistences in the EquivalentToMe attribute
Checks if groups have members in their 'member' attribute.
Raises LDAPInvalidDNError if member is not found in the DIT.
"""
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
partial = False # True when a group has member but doesn't have equivalentToMe
for group in groups_dn:
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member', 'equivalentToMe'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(group + ' not found')
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
existing_equivalent_to_me = response[0]['attributes']['equivalentToMe'] if 'equivalentToMe' in response[0]['attributes'] else []
existing_members = [element.lower() for element in existing_members]
existing_equivalent_to_me = [element.lower() for element in existing_equivalent_to_me]
for member in members_dn:
if member.lower() not in existing_members:
return False, False
if member.lower() not in existing_equivalent_to_me:
partial = True
return True, partial
def edir_check_groups_memberships(connection,
members_dn,
groups_dn,
fix,
transaction):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to check
:param groups_dn: the list of groups to check
:param fix: checks for inconsistences in the users-groups relation and fixes them
:param transaction: activates an LDAP transaction when fixing
:return: a boolean where True means that the operation was successful and False means an error has happened
Checks and fixes users-groups relations following the eDirectory rules: groups are checked against 'groupMembership'
attribute in the member object while members are checked against 'member' attribute in the group object.
Raises LDAPInvalidDnError if members or groups are not found in the DIT.
"""
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
if connection.check_names: # builds new lists with sanitized dn
safe_members_dn = []
safe_groups_dn = []
for member_dn in members_dn:
safe_members_dn.append(safe_dn(member_dn))
for group_dn in groups_dn:
safe_groups_dn.append(safe_dn(group_dn))
members_dn = safe_members_dn
groups_dn = safe_groups_dn
try:
members_have_memberships, partial_member_security = _check_members_have_memberships(connection, members_dn, groups_dn)
groups_contain_members, partial_group_security = _check_groups_contain_members(connection, groups_dn, members_dn)
except LDAPInvalidDnError:
return False
if not members_have_memberships and not groups_contain_members:
return False
if fix: # fix any inconsistences
if (members_have_memberships and not groups_contain_members) \
or (groups_contain_members and not members_have_memberships) \
or partial_group_security \
or partial_member_security:
for member in members_dn:
for group in groups_dn:
edir_add_members_to_groups(connection, member, group, True, transaction)
return True

View File

@ -0,0 +1,58 @@
"""
"""
# Created on 2016.04.14
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...extend.operation import ExtendedOperation
from ...protocol.novell import EndGroupTypeRequestValue, EndGroupTypeResponseValue, Sequence
from ...utils.asn1 import decoder
class EndTransaction(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.27.103.2'
self.response_name = '2.16.840.1.113719.1.27.103.2'
self.request_value = EndGroupTypeRequestValue()
self.asn1_spec = EndGroupTypeResponseValue()
def __init__(self, connection, commit=True, controls=None):
if controls and len(controls) == 1:
group_cookie = decoder.decode(controls[0][2], asn1Spec=Sequence())[0][0] # get the cookie from the built groupingControl
else:
group_cookie = None
controls = None
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
if group_cookie:
self.request_value['endGroupCookie'] = group_cookie # transactionGroupingType
if not commit:
self.request_value['endGroupValue'] = '' # an empty endGroupValue means abort transaction
def populate_result(self):
try:
self.result['value'] = self.decoded_response['endGroupValue']
except TypeError:
self.result['value'] = None
def set_response(self):
self.response_value = self.result

View File

@ -0,0 +1,41 @@
"""
"""
# Created on 2014.04.30
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...protocol.novell import Identity
from ...extend.operation import ExtendedOperation
class GetBindDn(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.27.100.31'
self.response_name = '2.16.840.1.113719.1.27.100.32'
self.response_attribute = 'identity'
self.asn1_spec = Identity()
def populate_result(self):
try:
self.result['identity'] = str(self.decoded_response) if self.decoded_response else None
except TypeError:
self.result['identity'] = None

View File

@ -0,0 +1,50 @@
"""
"""
# Created on 2014.07.03
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...extend.operation import ExtendedOperation
from ...protocol.novell import ReplicaList
from ...protocol.rfc4511 import LDAPDN
from ...utils.dn import safe_dn
class ListReplicas(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.27.100.19'
self.response_name = '2.16.840.1.113719.1.27.100.20'
self.request_value = LDAPDN()
self.asn1_spec = ReplicaList()
self.response_attribute = 'replicas'
def __init__(self, connection, server_dn, controls=None):
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
if connection.check_names:
server_dn = safe_dn(server_dn)
self.request_value = LDAPDN(server_dn)
def populate_result(self):
try:
self.result['replicas'] = str(self.decoded_response['replicaList']) if self.decoded_response['replicaList'] else None
except TypeError:
self.result['replicas'] = None

View File

@ -0,0 +1,56 @@
"""
"""
# Created on 2014.07.03
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...extend.operation import ExtendedOperation
from ...protocol.novell import NmasGetUniversalPasswordRequestValue, NmasGetUniversalPasswordResponseValue, NMAS_LDAP_EXT_VERSION
from ...utils.dn import safe_dn
class NmasGetUniversalPassword(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.39.42.100.13'
self.response_name = '2.16.840.1.113719.1.39.42.100.14'
self.request_value = NmasGetUniversalPasswordRequestValue()
self.asn1_spec = NmasGetUniversalPasswordResponseValue()
self.response_attribute = 'password'
def __init__(self, connection, user, controls=None):
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
if connection.check_names:
user = safe_dn(user)
self.request_value['nmasver'] = NMAS_LDAP_EXT_VERSION
self.request_value['reqdn'] = user
def populate_result(self):
if self.decoded_response:
self.result['nmasver'] = int(self.decoded_response['nmasver'])
self.result['error'] = int(self.decoded_response['err'])
try:
self.result['password'] = str(self.decoded_response['passwd']) if self.decoded_response['passwd'].hasValue() else None
except TypeError:
self.result['password'] = None

View File

@ -0,0 +1,52 @@
"""
"""
# Created on 2014.07.03
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...extend.operation import ExtendedOperation
from ...protocol.novell import NmasSetUniversalPasswordRequestValue, NmasSetUniversalPasswordResponseValue, NMAS_LDAP_EXT_VERSION
from ...utils.dn import safe_dn
class NmasSetUniversalPassword(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.39.42.100.11'
self.response_name = '2.16.840.1.113719.1.39.42.100.12'
self.request_value = NmasSetUniversalPasswordRequestValue()
self.asn1_spec = NmasSetUniversalPasswordResponseValue()
self.response_attribute = 'password'
def __init__(self, connection, user, new_password, controls=None):
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
if connection.check_names and user:
user = safe_dn(user)
self.request_value['nmasver'] = NMAS_LDAP_EXT_VERSION
if user:
self.request_value['reqdn'] = user
if new_password:
self.request_value['new_passwd'] = new_password
def populate_result(self):
self.result['nmasver'] = int(self.decoded_response['nmasver'])
self.result['error'] = int(self.decoded_response['err'])

View File

@ -0,0 +1,57 @@
"""
"""
# Created on 2014.08.05
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.type.univ import Integer
from ...core.exceptions import LDAPExtensionError
from ..operation import ExtendedOperation
from ...protocol.rfc4511 import LDAPDN
from ...utils.asn1 import decoder
from ...utils.dn import safe_dn
class PartitionEntryCount(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.27.100.13'
self.response_name = '2.16.840.1.113719.1.27.100.14'
self.request_value = LDAPDN()
self.response_attribute = 'entry_count'
def __init__(self, connection, partition_dn, controls=None):
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
if connection.check_names:
partition_dn = safe_dn(partition_dn)
self.request_value = LDAPDN(partition_dn)
def populate_result(self):
substrate = self.decoded_response
try:
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['entry_count'] = int(decoded)
except Exception:
raise LDAPExtensionError('unable to decode substrate')
if substrate:
raise LDAPExtensionError('unknown substrate remaining')

View File

@ -0,0 +1,156 @@
"""
"""
# Created on 2016.04.17
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...core.exceptions import LDAPInvalidDnError
from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER
from ...utils.dn import safe_dn
def edir_remove_members_from_groups(connection,
members_dn,
groups_dn,
fix,
transaction):
"""
:param connection: a bound Connection object
:param members_dn: the list of members to remove from groups
:param groups_dn: the list of groups where members are to be removed
:param fix: checks for inconsistences in the users-groups relation and fixes them
:param transaction: activates an LDAP transaction
:return: a boolean where True means that the operation was successful and False means an error has happened
Removes users-groups relations following the eDirectory rules: groups are removed from securityEquals and groupMembership
attributes in the member object while members are removed from member and equivalentToMe attributes in the group object.
Raises LDAPInvalidDnError if members or groups are not found in the DIT.
"""
if not isinstance(members_dn, SEQUENCE_TYPES):
members_dn = [members_dn]
if not isinstance(groups_dn, SEQUENCE_TYPES):
groups_dn = [groups_dn]
if connection.check_names: # builds new lists with sanitized dn
safe_members_dn = []
safe_groups_dn = []
for member_dn in members_dn:
safe_members_dn.append(safe_dn(member_dn))
for group_dn in groups_dn:
safe_groups_dn.append(safe_dn(group_dn))
members_dn = safe_members_dn
groups_dn = safe_groups_dn
transaction_control = None
error = False
if transaction:
transaction_control = connection.extend.novell.start_transaction()
if not error:
for member in members_dn:
if fix: # checks for existance of member and for already assigned groups
result = connection.search(member, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['securityEquals', 'groupMembership'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(member + ' not found')
existing_security_equals = response[0]['attributes']['securityEquals'] if 'securityEquals' in response[0]['attributes'] else []
existing_group_membership = response[0]['attributes']['groupMembership'] if 'groupMembership' in response[0]['attributes'] else []
else:
existing_security_equals = groups_dn
existing_group_membership = groups_dn
existing_security_equals = [element.lower() for element in existing_security_equals]
existing_group_membership = [element.lower() for element in existing_group_membership]
changes = dict()
security_equals_to_remove = [element for element in groups_dn if element.lower() in existing_security_equals]
group_membership_to_remove = [element for element in groups_dn if element.lower() in existing_group_membership]
if security_equals_to_remove:
changes['securityEquals'] = (MODIFY_DELETE, security_equals_to_remove)
if group_membership_to_remove:
changes['groupMembership'] = (MODIFY_DELETE, group_membership_to_remove)
if changes:
result = connection.modify(member, changes, controls=[transaction_control] if transaction else None)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
if result['description'] != 'success':
error = True
break
if not error:
for group in groups_dn:
if fix: # checks for existance of group and for already assigned members
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member', 'equivalentToMe'])
if not connection.strategy.sync:
response, result = connection.get_response(result)
else:
response, result = connection.response, connection.result
if not result['description'] == 'success':
raise LDAPInvalidDnError(group + ' not found')
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
existing_equivalent_to_me = response[0]['attributes']['equivalentToMe'] if 'equivalentToMe' in response[0]['attributes'] else []
else:
existing_members = members_dn
existing_equivalent_to_me = members_dn
existing_members = [element.lower() for element in existing_members]
existing_equivalent_to_me = [element.lower() for element in existing_equivalent_to_me]
changes = dict()
member_to_remove = [element for element in members_dn if element.lower() in existing_members]
equivalent_to_me_to_remove = [element for element in members_dn if element.lower() in existing_equivalent_to_me]
if member_to_remove:
changes['member'] = (MODIFY_DELETE, member_to_remove)
if equivalent_to_me_to_remove:
changes['equivalentToMe'] = (MODIFY_DELETE, equivalent_to_me_to_remove)
if changes:
result = connection.modify(group, changes, controls=[transaction_control] if transaction else None)
if not connection.strategy.sync:
_, result = connection.get_response(result)
else:
result = connection.result
if result['description'] != 'success':
error = True
break
if transaction:
if error: # aborts transaction in case of error in the modify operations
result = connection.extend.novell.end_transaction(commit=False, controls=[transaction_control])
else:
result = connection.extend.novell.end_transaction(commit=True, controls=[transaction_control])
if result['description'] != 'success':
error = True
return not error # return True if no error is raised in the LDAP operations

View File

@ -0,0 +1,79 @@
"""
"""
# Created on 2014.08.07
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
from pyasn1.type.univ import Integer
from ...core.exceptions import LDAPExtensionError
from ...protocol.novell import LDAPDN, ReplicaInfoRequestValue
from ..operation import ExtendedOperation
from ...utils.asn1 import decoder
from ...utils.dn import safe_dn
class ReplicaInfo(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.27.100.17'
self.response_name = '2.16.840.1.113719.1.27.100.18'
# self.asn1_spec = ReplicaInfoResponseValue()
self.request_value = ReplicaInfoRequestValue()
self.response_attribute = 'partition_dn'
def __init__(self, connection, server_dn, partition_dn, controls=None):
if connection.check_names:
if server_dn:
server_dn = safe_dn(server_dn)
if partition_dn:
partition_dn = safe_dn(partition_dn)
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
self.request_value['server_dn'] = server_dn
self.request_value['partition_dn'] = partition_dn
def populate_result(self):
substrate = self.decoded_response
try:
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['partition_id'] = int(decoded)
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['replica_state'] = int(decoded)
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['modification_time'] = datetime.utcfromtimestamp(int(decoded))
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['purge_time'] = datetime.utcfromtimestamp(int(decoded))
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['local_partition_id'] = int(decoded)
decoded, substrate = decoder.decode(substrate, asn1Spec=LDAPDN())
self.result['partition_dn'] = str(decoded)
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['replica_type'] = int(decoded)
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer())
self.result['flags'] = int(decoded)
except Exception:
raise LDAPExtensionError('unable to decode substrate')
if substrate:
raise LDAPExtensionError('unknown substrate remaining')

View File

@ -0,0 +1,56 @@
"""
"""
# Created on 2016.04.14
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...extend.operation import ExtendedOperation
from ...protocol.novell import CreateGroupTypeRequestValue, CreateGroupTypeResponseValue, GroupingControlValue
from ...protocol.controls import build_control
class StartTransaction(ExtendedOperation):
def config(self):
self.request_name = '2.16.840.1.113719.1.27.103.1'
self.response_name = '2.16.840.1.113719.1.27.103.1'
self.request_value = CreateGroupTypeRequestValue()
self.asn1_spec = CreateGroupTypeResponseValue()
def __init__(self, connection, controls=None):
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
self.request_value['createGroupType'] = '2.16.840.1.113719.1.27.103.7' # transactionGroupingType
def populate_result(self):
self.result['cookie'] = int(self.decoded_response['createGroupCookie'])
try:
self.result['value'] = self.decoded_response['createGroupValue']
except TypeError:
self.result['value'] = None
def set_response(self):
try:
grouping_cookie_value = GroupingControlValue()
grouping_cookie_value['groupingCookie'] = self.result['cookie']
self.response_value = build_control('2.16.840.1.113719.1.27.103.7', True, grouping_cookie_value, encode_control_value=True) # groupingControl
except TypeError:
self.response_value = None

View File

@ -0,0 +1,91 @@
"""
"""
# Created on 2014.07.04
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..core.results import RESULT_SUCCESS
from ..core.exceptions import LDAPExtensionError
from ..utils.asn1 import decoder
class ExtendedOperation(object):
def __init__(self, connection, controls=None):
self.connection = connection
self.decoded_response = None
self.result = None
self.asn1_spec = None # if None the response_value is returned without encoding
self.request_name = None
self.response_name = None
self.request_value = None
self.response_value = None
self.response_attribute = None
self.controls = controls
self.config()
def send(self):
if self.connection.check_names and self.connection.server.info is not None and self.connection.server.info.supported_extensions is not None: # checks if extension is supported
for request_name in self.connection.server.info.supported_extensions:
if request_name[0] == self.request_name:
break
else:
raise LDAPExtensionError('extension not in DSA list of supported extensions')
resp = self.connection.extended(self.request_name, self.request_value, self.controls)
if not self.connection.strategy.sync:
_, self.result = self.connection.get_response(resp)
else:
self.result = self.connection.result
self.decode_response()
self.populate_result()
self.set_response()
return self.response_value
def populate_result(self):
pass
def decode_response(self):
if not self.result:
return None
if self.result['result'] not in [RESULT_SUCCESS]:
if self.connection.raise_exceptions:
raise LDAPExtensionError('extended operation error: ' + self.result['description'] + ' - ' + self.result['message'])
else:
return None
if not self.response_name or self.result['responseName'] == self.response_name:
if self.result['responseValue']:
if self.asn1_spec is not None:
decoded, unprocessed = decoder.decode(self.result['responseValue'], asn1Spec=self.asn1_spec)
if unprocessed:
raise LDAPExtensionError('error decoding extended response value')
self.decoded_response = decoded
else:
self.decoded_response = self.result['responseValue']
else:
raise LDAPExtensionError('invalid response name received')
def set_response(self):
self.response_value = self.result[self.response_attribute] if self.result and self.response_attribute in self.result else None
self.connection.response = self.response_value
def config(self):
pass

View File

@ -0,0 +1,125 @@
"""
"""
# Created on 2014.07.08
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ... import SUBTREE, DEREF_ALWAYS
from ...utils.dn import safe_dn
from ...core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_SIZE_LIMIT_EXCEEDED
from ...core.exceptions import LDAPOperationResult
from ...utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED
def paged_search_generator(connection,
search_base,
search_filter,
search_scope=SUBTREE,
dereference_aliases=DEREF_ALWAYS,
attributes=None,
size_limit=0,
time_limit=0,
types_only=False,
get_operational_attributes=False,
controls=None,
paged_size=100,
paged_criticality=False):
if connection.check_names and search_base:
search_base = safe_dn(search_base)
responses = []
cookie = True # performs search at least one time
while cookie:
result = connection.search(search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
types_only,
get_operational_attributes,
controls,
paged_size,
paged_criticality,
None if cookie is True else cookie)
if not isinstance(result, bool):
response, result = connection.get_response(result)
else:
response = connection.response
result = connection.result
responses.extend(response)
try:
cookie = result['controls']['1.2.840.113556.1.4.319']['value']['cookie']
except KeyError:
cookie = None
if result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'paged search operation result <%s> for <%s>', result, connection)
if result['result'] == RESULT_SIZE_LIMIT_EXCEEDED:
while responses:
yield responses.pop()
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
while responses:
yield responses.pop()
connection.response = None
def paged_search_accumulator(connection,
search_base,
search_filter,
search_scope=SUBTREE,
dereference_aliases=DEREF_ALWAYS,
attributes=None,
size_limit=0,
time_limit=0,
types_only=False,
get_operational_attributes=False,
controls=None,
paged_size=100,
paged_criticality=False):
if connection.check_names and search_base:
search_base = safe_dn(search_base)
responses = []
for response in paged_search_generator(connection,
search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
types_only,
get_operational_attributes,
controls,
paged_size,
paged_criticality):
responses.append(response)
connection.response = responses
return responses

View File

@ -0,0 +1,121 @@
"""
"""
# Created on 2016.07.08
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
try:
from queue import Empty
except ImportError: # Python 2
# noinspection PyUnresolvedReferences
from Queue import Empty
from ...core.exceptions import LDAPExtensionError
from ...protocol.persistentSearch import persistent_search_control
from ... import SEQUENCE_TYPES
from ...utils.dn import safe_dn
class PersistentSearch(object):
def __init__(self,
connection,
search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
controls,
changes_only,
events_type,
notifications,
streaming,
callback
):
if connection.strategy.sync:
raise LDAPExtensionError('Persistent Search needs an asynchronous streaming connection')
if connection.check_names and search_base:
search_base = safe_dn(search_base)
self.connection = connection
self.changes_only = changes_only
self.notifications = notifications
self.message_id = None
self.base = search_base
self.filter = search_filter
self.scope = search_scope
self.dereference_aliases = dereference_aliases
self.attributes = attributes
self.size_limit = size_limit
self.time_limit = time_limit
self.connection.strategy.streaming = streaming
if callback and callable(callback):
self.connection.strategy.callback = callback
elif callback:
raise LDAPExtensionError('callback is not callable')
if not isinstance(controls, SEQUENCE_TYPES):
self.controls = []
else:
self.controls = controls
self.controls.append(persistent_search_control(events_type, changes_only, notifications))
self.start()
def start(self):
if self.message_id: # persistent search already started
return
if not self.connection.bound:
self.connection.bind()
with self.connection.strategy.async_lock:
self.message_id = self.connection.search(search_base=self.base,
search_filter=self.filter,
search_scope=self.scope,
dereference_aliases=self.dereference_aliases,
attributes=self.attributes,
size_limit=self.size_limit,
time_limit=self.time_limit,
controls=self.controls)
self.connection.strategy.persistent_search_message_id = self.message_id
def stop(self):
self.connection.abandon(self.message_id)
self.connection.unbind()
if self.message_id in self.connection.strategy._responses:
del self.connection.strategy._responses[self.message_id]
if hasattr(self.connection.strategy, '_requests') and self.message_id in self.connection.strategy._requests: # asynchronous strategy has a dict of request that could be returned by get_response()
del self.connection.strategy._requests[self.message_id]
self.connection.strategy.persistent_search_message_id = None
self.message_id = None
def next(self):
if not self.connection.strategy.streaming and not self.connection.strategy.callback:
try:
return self.connection.strategy.events.get_nowait()
except Empty:
return None
raise LDAPExtensionError('Persistent search is not accumulating events in queue')

View File

@ -0,0 +1,72 @@
"""
"""
# Created on 2014.04.30
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ... import HASHED_NONE
from ...extend.operation import ExtendedOperation
from ...protocol.rfc3062 import PasswdModifyRequestValue, PasswdModifyResponseValue
from ...utils.hashed import hashed
from ...protocol.sasl.sasl import validate_simple_password
from ...utils.dn import safe_dn
from ...core.results import RESULT_SUCCESS
# implements RFC3062
class ModifyPassword(ExtendedOperation):
def config(self):
self.request_name = '1.3.6.1.4.1.4203.1.11.1'
self.request_value = PasswdModifyRequestValue()
self.asn1_spec = PasswdModifyResponseValue()
self.response_attribute = 'new_password'
def __init__(self, connection, user=None, old_password=None, new_password=None, hash_algorithm=None, salt=None, controls=None):
ExtendedOperation.__init__(self, connection, controls) # calls super __init__()
if user:
if connection.check_names:
user = safe_dn(user)
self.request_value['userIdentity'] = user
if old_password:
if not isinstance(old_password, bytes): # bytes are returned raw, as per RFC (4.2)
old_password = validate_simple_password(old_password, True)
self.request_value['oldPasswd'] = old_password
if new_password:
if not isinstance(new_password, bytes): # bytes are returned raw, as per RFC (4.2)
new_password = validate_simple_password(new_password, True)
if hash_algorithm is None or hash_algorithm == HASHED_NONE:
self.request_value['newPasswd'] = new_password
else:
self.request_value['newPasswd'] = hashed(hash_algorithm, new_password, salt)
def populate_result(self):
try:
self.result[self.response_attribute] = str(self.decoded_response['genPasswd'])
except TypeError: # optional field can be absent, so returns True if operation is successful else False
if self.result['result'] == RESULT_SUCCESS:
self.result[self.response_attribute] = True
else: # change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result']
self.result[self.response_attribute] = False
if not self.connection.raise_exceptions:
from ...core.exceptions import LDAPOperationResult
raise LDAPOperationResult(result=self.result['result'], description=self.result['description'], dn=self.result['dn'], message=self.result['message'], response_type=self.result['type'])

View File

@ -0,0 +1,42 @@
"""
"""
# Created on 2014.04.30
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
# implements RFC4532
from pyasn1.type.univ import NoValue
from ...extend.operation import ExtendedOperation
from ...utils.conv import to_unicode
from ...protocol.rfc4511 import OctetString
class WhoAmI(ExtendedOperation):
def config(self):
self.request_name = '1.3.6.1.4.1.4203.1.11.3'
self.response_attribute = 'authzid'
def populate_result(self):
try:
self.result['authzid'] = to_unicode(self.decoded_response) if self.decoded_response else None
except TypeError:
self.result['authzid'] = self.decoded_response if self.decoded_response else None

View File

@ -0,0 +1,36 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..protocol.rfc4511 import AbandonRequest, MessageID
def abandon_operation(msg_id):
# AbandonRequest ::= [APPLICATION 16] MessageID
request = AbandonRequest(MessageID(msg_id))
return request
def abandon_request_to_dict(request):
return {'messageId': str(request)}

View File

@ -0,0 +1,72 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .. import SEQUENCE_TYPES
from ..protocol.rfc4511 import AddRequest, LDAPDN, AttributeList, Attribute, AttributeDescription, ResultCode, Vals
from ..protocol.convert import referrals_to_list, attributes_to_dict, validate_attribute_value, prepare_for_sending
def add_operation(dn,
attributes,
auto_encode,
schema=None,
validator=None,
check_names=False):
# AddRequest ::= [APPLICATION 8] SEQUENCE {
# entry LDAPDN,
# attributes AttributeList }
#
# attributes is a dictionary in the form 'attribute': ['val1', 'val2', 'valN']
attribute_list = AttributeList()
for pos, attribute in enumerate(attributes):
attribute_list[pos] = Attribute()
attribute_list[pos]['type'] = AttributeDescription(attribute)
vals = Vals() # changed from ValsAtLeast1() for allowing empty member value in groups
if isinstance(attributes[attribute], SEQUENCE_TYPES):
for index, value in enumerate(attributes[attribute]):
vals.setComponentByPosition(index, prepare_for_sending(validate_attribute_value(schema, attribute, value, auto_encode, validator, check_names)))
else:
vals.setComponentByPosition(0, prepare_for_sending(validate_attribute_value(schema, attribute, attributes[attribute], auto_encode, validator, check_names)))
attribute_list[pos]['vals'] = vals
request = AddRequest()
request['entry'] = LDAPDN(dn)
request['attributes'] = attribute_list
return request
def add_request_to_dict(request):
return {'entry': str(request['entry']),
'attributes': attributes_to_dict(request['attributes'])}
def add_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'dn': str(response['matchedDN']),
'message': str(response['diagnosticMessage']),
'referrals': referrals_to_list(response['referral'])}

View File

@ -0,0 +1,160 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .. import SIMPLE, ANONYMOUS, SASL, STRING_TYPES
from ..core.results import RESULT_CODES
from ..core.exceptions import LDAPUserNameIsMandatoryError, LDAPPasswordIsMandatoryError, LDAPUnknownAuthenticationMethodError, LDAPUserNameNotAllowedError
from ..protocol.sasl.sasl import validate_simple_password
from ..protocol.rfc4511 import Version, AuthenticationChoice, Simple, BindRequest, ResultCode, SaslCredentials, BindResponse, \
LDAPDN, LDAPString, Referral, ServerSaslCreds, SicilyPackageDiscovery, SicilyNegotiate, SicilyResponse
from ..protocol.convert import authentication_choice_to_dict, referrals_to_list
from ..utils.conv import to_unicode, to_raw
# noinspection PyUnresolvedReferences
def bind_operation(version,
authentication,
name='',
password=None,
sasl_mechanism=None,
sasl_credentials=None,
auto_encode=False):
# BindRequest ::= [APPLICATION 0] SEQUENCE {
# version INTEGER (1 .. 127),
# name LDAPDN,
# authentication AuthenticationChoice }
request = BindRequest()
request['version'] = Version(version)
if name is None:
name = ''
if isinstance(name, STRING_TYPES):
request['name'] = to_unicode(name) if auto_encode else name
if authentication == SIMPLE:
if not name:
raise LDAPUserNameIsMandatoryError('user name is mandatory in simple bind')
if password:
request['authentication'] = AuthenticationChoice().setComponentByName('simple', Simple(validate_simple_password(password)))
else:
raise LDAPPasswordIsMandatoryError('password is mandatory in simple bind')
elif authentication == SASL:
sasl_creds = SaslCredentials()
sasl_creds['mechanism'] = sasl_mechanism
if sasl_credentials is not None:
sasl_creds['credentials'] = sasl_credentials
# else:
# sasl_creds['credentials'] = None
request['authentication'] = AuthenticationChoice().setComponentByName('sasl', sasl_creds)
elif authentication == ANONYMOUS:
if name:
raise LDAPUserNameNotAllowedError('user name not allowed in anonymous bind')
request['name'] = ''
request['authentication'] = AuthenticationChoice().setComponentByName('simple', Simple(''))
elif authentication == 'SICILY_PACKAGE_DISCOVERY': # https://msdn.microsoft.com/en-us/library/cc223501.aspx
request['name'] = ''
request['authentication'] = AuthenticationChoice().setComponentByName('sicilyPackageDiscovery', SicilyPackageDiscovery(''))
elif authentication == 'SICILY_NEGOTIATE_NTLM': # https://msdn.microsoft.com/en-us/library/cc223501.aspx
request['name'] = 'NTLM'
request['authentication'] = AuthenticationChoice().setComponentByName('sicilyNegotiate', SicilyNegotiate(name.create_negotiate_message())) # ntlm client in self.name
elif authentication == 'SICILY_RESPONSE_NTLM': # https://msdn.microsoft.com/en-us/library/cc223501.aspx
name.parse_challenge_message(password) # server_creds returned by server in password
server_creds = name.create_authenticate_message()
if server_creds:
request['name'] = ''
request['authentication'] = AuthenticationChoice().setComponentByName('sicilyResponse', SicilyResponse(server_creds))
else:
request = None
else:
raise LDAPUnknownAuthenticationMethodError('unknown authentication method')
return request
def bind_request_to_dict(request):
return {'version': int(request['version']),
'name': str(request['name']),
'authentication': authentication_choice_to_dict(request['authentication'])}
def bind_response_operation(result_code,
matched_dn='',
diagnostic_message='',
referral=None,
server_sasl_credentials=None):
# BindResponse ::= [APPLICATION 1] SEQUENCE {
# COMPONENTS OF LDAPResult,
# serverSaslCreds [7] OCTET STRING OPTIONAL }
response = BindResponse()
response['resultCode'] = ResultCode(result_code)
response['matchedDN'] = LDAPDN(matched_dn)
response['diagnosticMessage'] = LDAPString(diagnostic_message)
if referral:
response['referral'] = Referral(referral)
if server_sasl_credentials:
response['serverSaslCreds'] = ServerSaslCreds(server_sasl_credentials)
return response
def bind_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'dn': str(response['matchedDN']),
'message': str(response['diagnosticMessage']),
'referrals': referrals_to_list(response['referral']),
'saslCreds': bytes(response['serverSaslCreds']) if response['serverSaslCreds'] is not None and response['serverSaslCreds'].hasValue() else None}
def sicily_bind_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'server_creds': bytes(response['matchedDN']),
'error_message': str(response['diagnosticMessage'])}
def bind_response_to_dict_fast(response):
response_dict = dict()
response_dict['result'] = int(response[0][3]) # resultCode
response_dict['description'] = RESULT_CODES[response_dict['result']]
response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN
response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage
response_dict['referrals'] = None # referrals
response_dict['saslCreds'] = None # saslCreds
for r in response[3:]:
if r[2] == 3: # referrals
response_dict['referrals'] = referrals_to_list(r[3]) # referrals
else:
response_dict['saslCreds'] = bytes(r[3]) # saslCreds
return response_dict
def sicily_bind_response_to_dict_fast(response):
response_dict = dict()
response_dict['result'] = int(response[0][3]) # resultCode
response_dict['description'] = RESULT_CODES[response_dict['result']]
response_dict['server_creds'] = bytes(response[1][3]) # server_creds
response_dict['error_message'] = to_unicode(response[2][3], from_server=True) # error_message
return response_dict

View File

@ -0,0 +1,64 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..protocol.convert import validate_attribute_value, prepare_for_sending
from ..protocol.rfc4511 import CompareRequest, AttributeValueAssertion, AttributeDescription, LDAPDN, AssertionValue, ResultCode
from ..operation.search import ava_to_dict
from ..operation.bind import referrals_to_list
def compare_operation(dn,
attribute,
value,
auto_encode,
schema=None,
validator=None,
check_names=False):
# CompareRequest ::= [APPLICATION 14] SEQUENCE {
# entry LDAPDN,
# ava AttributeValueAssertion }
ava = AttributeValueAssertion()
ava['attributeDesc'] = AttributeDescription(attribute)
ava['assertionValue'] = AssertionValue(prepare_for_sending(validate_attribute_value(schema, attribute, value, auto_encode, validator, check_names=check_names)))
request = CompareRequest()
request['entry'] = LDAPDN(dn)
request['ava'] = ava
return request
def compare_request_to_dict(request):
ava = ava_to_dict(request['ava'])
return {'entry': str(request['entry']),
'attribute': ava['attribute'],
'value': ava['value']}
def compare_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'dn': str(response['matchedDN']), 'message': str(response['diagnosticMessage']),
'referrals': referrals_to_list(response['referral'])}

View File

@ -0,0 +1,46 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..protocol.rfc4511 import DelRequest, LDAPDN, ResultCode
from ..operation.bind import referrals_to_list
def delete_operation(dn):
# DelRequest ::= [APPLICATION 10] LDAPDN
request = DelRequest(LDAPDN(dn))
return request
def delete_request_to_dict(request):
return {'entry': str(request)}
def delete_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'dn': str(response['matchedDN']),
'message': str(response['diagnosticMessage']),
'referrals': referrals_to_list(response['referral'])}

View File

@ -0,0 +1,109 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.type.univ import OctetString
from pyasn1.type.base import Asn1Item
from ..core.results import RESULT_CODES
from ..protocol.rfc4511 import ExtendedRequest, RequestName, ResultCode, RequestValue
from ..protocol.convert import referrals_to_list
from ..utils.asn1 import encode
from ..utils.conv import to_unicode
# ExtendedRequest ::= [APPLICATION 23] SEQUENCE {
# requestName [0] LDAPOID,
# requestValue [1] OCTET STRING OPTIONAL }
def extended_operation(request_name,
request_value=None,
no_encode=None):
request = ExtendedRequest()
request['requestName'] = RequestName(request_name)
if request_value and isinstance(request_value, Asn1Item):
request['requestValue'] = RequestValue(encode(request_value))
elif str is not bytes and isinstance(request_value, (bytes, bytearray)): # in Python 3 doesn't try to encode a byte value
request['requestValue'] = request_value
elif request_value and no_encode: # doesn't encode the value
request['requestValue'] = request_value
elif request_value: # tries to encode as a octet string
request['requestValue'] = RequestValue(encode(OctetString(str(request_value))))
# elif request_value is not None:
# raise LDAPExtensionError('unable to encode value for extended operation')
return request
def extended_request_to_dict(request):
# return {'name': str(request['requestName']), 'value': bytes(request['requestValue']) if request['requestValue'] else None}
return {'name': str(request['requestName']), 'value': bytes(request['requestValue']) if 'requestValue' in request and request['requestValue'] is not None and request['requestValue'].hasValue() else None}
def extended_response_to_dict(response):
return {'result': int(response['resultCode']),
'dn': str(response['matchedDN']),
'message': str(response['diagnosticMessage']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'referrals': referrals_to_list(response['referral']),
'responseName': str(response['responseName']) if response['responseName'] is not None and response['responseName'].hasValue() else str(),
'responseValue': bytes(response['responseValue']) if response['responseValue'] is not None and response['responseValue'].hasValue() else bytes()}
def intermediate_response_to_dict(response):
return {'responseName': str(response['responseName']),
'responseValue': bytes(response['responseValue']) if response['responseValue'] else bytes()}
def extended_response_to_dict_fast(response):
response_dict = dict()
response_dict['result'] = int(response[0][3]) # resultCode
response_dict['description'] = RESULT_CODES[response_dict['result']]
response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN
response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage
response_dict['referrals'] = None # referrals
response_dict['responseName'] = None # referrals
response_dict['responseValue'] = None # responseValue
for r in response[3:]:
if r[2] == 3: # referrals
response_dict['referrals'] = referrals_to_list(r[3]) # referrals
elif r[2] == 10: # responseName
response_dict['responseName'] = to_unicode(r[3], from_server=True)
response_dict['responseValue'] = b'' # responseValue could be empty
else: # responseValue (11)
response_dict['responseValue'] = bytes(r[3])
return response_dict
def intermediate_response_to_dict_fast(response):
response_dict = dict()
for r in response:
if r[2] == 0: # responseName
response_dict['responseName'] = to_unicode(r[3], from_server=True)
else: # responseValue (1)
response_dict['responseValue'] = bytes(r[3])
return response_dict

View File

@ -0,0 +1,96 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .. import SEQUENCE_TYPES, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT
from ..protocol.rfc4511 import ModifyRequest, LDAPDN, Changes, Change, Operation, PartialAttribute, AttributeDescription, Vals, ResultCode
from ..operation.bind import referrals_to_list
from ..protocol.convert import changes_to_list, validate_attribute_value, prepare_for_sending
# ModifyRequest ::= [APPLICATION 6] SEQUENCE {
# object LDAPDN,
# changes SEQUENCE OF change SEQUENCE {
# operation ENUMERATED {
# add (0),
# delete (1),
# replace (2),
# ... },
# modification PartialAttribute } }
change_table = {MODIFY_ADD: 0, # accepts actual values too
MODIFY_DELETE: 1,
MODIFY_REPLACE: 2,
MODIFY_INCREMENT: 3,
0: 0,
1: 1,
2: 2,
3: 3}
def modify_operation(dn,
changes,
auto_encode,
schema=None,
validator=None,
check_names=False):
# changes is a dictionary in the form {'attribute': [(operation, [val1, ...]), ...], ...}
# operation is 0 (add), 1 (delete), 2 (replace), 3 (increment)
# increment as per RFC4525
change_list = Changes()
pos = 0
for attribute in changes:
for change_operation in changes[attribute]:
partial_attribute = PartialAttribute()
partial_attribute['type'] = AttributeDescription(attribute)
partial_attribute['vals'] = Vals()
if isinstance(change_operation[1], SEQUENCE_TYPES):
for index, value in enumerate(change_operation[1]):
partial_attribute['vals'].setComponentByPosition(index, prepare_for_sending(validate_attribute_value(schema, attribute, value, auto_encode, validator, check_names=check_names)))
else:
partial_attribute['vals'].setComponentByPosition(0, prepare_for_sending(validate_attribute_value(schema, attribute, change_operation[1], auto_encode, validator, check_names=check_names)))
change = Change()
change['operation'] = Operation(change_table[change_operation[0]])
change['modification'] = partial_attribute
change_list[pos] = change
pos += 1
request = ModifyRequest()
request['object'] = LDAPDN(dn)
request['changes'] = change_list
return request
def modify_request_to_dict(request):
return {'entry': str(request['object']),
'changes': changes_to_list(request['changes'])}
def modify_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'message': str(response['diagnosticMessage']),
'dn': str(response['matchedDN']),
'referrals': referrals_to_list(response['referral'])}

View File

@ -0,0 +1,62 @@
"""
"""
# Created on 2013.05.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..protocol.rfc4511 import ModifyDNRequest, LDAPDN, RelativeLDAPDN, DeleteOldRDN, NewSuperior, ResultCode
from ..operation.bind import referrals_to_list
# ModifyDNRequest ::= [APPLICATION 12] SEQUENCE {
# entry LDAPDN,
# newrdn RelativeLDAPDN,
# deleteoldrdn BOOLEAN,
# newSuperior [0] LDAPDN OPTIONAL }
def modify_dn_operation(dn,
new_relative_dn,
delete_old_rdn=True,
new_superior=None):
request = ModifyDNRequest()
request['entry'] = LDAPDN(dn)
request['newrdn'] = RelativeLDAPDN(new_relative_dn)
request['deleteoldrdn'] = DeleteOldRDN(delete_old_rdn)
if new_superior:
request['newSuperior'] = NewSuperior(new_superior)
return request
def modify_dn_request_to_dict(request):
return {'entry': str(request['entry']),
'newRdn': str(request['newrdn']),
'deleteOldRdn': bool(request['deleteoldrdn']),
'newSuperior': str(request['newSuperior']) if request['newSuperior'] is not None and request['newSuperior'].hasValue() else None}
def modify_dn_response_to_dict(response):
return {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'dn': str(response['matchedDN']),
'referrals': referrals_to_list(response['referral']),
'message': str(response['diagnosticMessage'])}

View File

@ -0,0 +1,576 @@
"""
"""
# Created on 2013.06.02
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from string import whitespace
from os import linesep
from .. import DEREF_NEVER, BASE, LEVEL, SUBTREE, DEREF_SEARCH, DEREF_BASE, DEREF_ALWAYS, NO_ATTRIBUTES, SEQUENCE_TYPES, get_config_parameter, STRING_TYPES
from ..core.exceptions import LDAPInvalidFilterError, LDAPAttributeError, LDAPInvalidScopeError, LDAPInvalidDereferenceAliasesError
from ..utils.ciDict import CaseInsensitiveDict
from ..protocol.rfc4511 import SearchRequest, LDAPDN, Scope, DerefAliases, Integer0ToMax, TypesOnly, \
AttributeSelection, Selector, EqualityMatch, AttributeDescription, AssertionValue, Filter, \
Not, And, Or, ApproxMatch, GreaterOrEqual, LessOrEqual, ExtensibleMatch, Present, SubstringFilter, \
Substrings, Final, Initial, Any, ResultCode, Substring, MatchingRule, Type, MatchValue, DnAttributes
from ..operation.bind import referrals_to_list
from ..protocol.convert import ava_to_dict, attributes_to_list, search_refs_to_list, validate_assertion_value, prepare_filter_for_sending, search_refs_to_list_fast
from ..protocol.formatters.standard import format_attribute_values
from ..utils.conv import to_unicode, to_raw
ROOT = 0
AND = 1
OR = 2
NOT = 3
MATCH_APPROX = 4
MATCH_GREATER_OR_EQUAL = 5
MATCH_LESS_OR_EQUAL = 6
MATCH_EXTENSIBLE = 7
MATCH_PRESENT = 8
MATCH_SUBSTRING = 9
MATCH_EQUAL = 10
SEARCH_OPEN = 20
SEARCH_OPEN_OR_CLOSE = 21
SEARCH_MATCH_OR_CLOSE = 22
SEARCH_MATCH_OR_CONTROL = 23
class FilterNode(object):
def __init__(self, tag=None, assertion=None):
self.tag = tag
self.parent = None
self.assertion = assertion
self.elements = []
def append(self, filter_node):
filter_node.parent = self
self.elements.append(filter_node)
return filter_node
def __str__(self, pos=0):
self.__repr__(pos)
def __repr__(self, pos=0):
node_tags = ['ROOT', 'AND', 'OR', 'NOT', 'MATCH_APPROX', 'MATCH_GREATER_OR_EQUAL', 'MATCH_LESS_OR_EQUAL', 'MATCH_EXTENSIBLE', 'MATCH_PRESENT', 'MATCH_SUBSTRING', 'MATCH_EQUAL']
representation = ' ' * pos + 'tag: ' + node_tags[self.tag] + ' - assertion: ' + str(self.assertion)
if self.elements:
representation += ' - elements: ' + str(len(self.elements))
for element in self.elements:
representation += linesep + ' ' * pos + element.__repr__(pos + 2)
return representation
def evaluate_match(match, schema, auto_escape, auto_encode, validator, check_names):
left_part, equal_sign, right_part = match.strip().partition('=')
if not equal_sign:
raise LDAPInvalidFilterError('invalid matching assertion')
if left_part.endswith('~'): # approximate match '~='
tag = MATCH_APPROX
left_part = left_part[:-1].strip()
right_part = right_part.strip()
assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
elif left_part.endswith('>'): # greater or equal match '>='
tag = MATCH_GREATER_OR_EQUAL
left_part = left_part[:-1].strip()
right_part = right_part.strip()
assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
elif left_part.endswith('<'): # less or equal match '<='
tag = MATCH_LESS_OR_EQUAL
left_part = left_part[:-1].strip()
right_part = right_part.strip()
assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
elif left_part.endswith(':'): # extensible match ':='
tag = MATCH_EXTENSIBLE
left_part = left_part[:-1].strip()
right_part = right_part.strip()
extended_filter_list = left_part.split(':')
matching_rule = False
dn_attributes = False
attribute_name = False
if extended_filter_list[0] == '': # extensible filter format [:dn]:matchingRule:=assertionValue
if len(extended_filter_list) == 2 and extended_filter_list[1].lower().strip() != 'dn':
matching_rule = extended_filter_list[1]
elif len(extended_filter_list) == 3 and extended_filter_list[1].lower().strip() == 'dn':
dn_attributes = True
matching_rule = extended_filter_list[2]
else:
raise LDAPInvalidFilterError('invalid extensible filter')
elif len(extended_filter_list) <= 3: # extensible filter format attr[:dn][:matchingRule]:=assertionValue
if len(extended_filter_list) == 1:
attribute_name = extended_filter_list[0]
elif len(extended_filter_list) == 2:
attribute_name = extended_filter_list[0]
if extended_filter_list[1].lower().strip() == 'dn':
dn_attributes = True
else:
matching_rule = extended_filter_list[1]
elif len(extended_filter_list) == 3 and extended_filter_list[1].lower().strip() == 'dn':
attribute_name = extended_filter_list[0]
dn_attributes = True
matching_rule = extended_filter_list[2]
else:
raise LDAPInvalidFilterError('invalid extensible filter')
if not attribute_name and not matching_rule:
raise LDAPInvalidFilterError('invalid extensible filter')
attribute_name = attribute_name.strip() if attribute_name else False
matching_rule = matching_rule.strip() if matching_rule else False
assertion = {'attr': attribute_name, 'value': validate_assertion_value(schema, attribute_name, right_part, auto_escape, auto_encode, validator, check_names), 'matchingRule': matching_rule, 'dnAttributes': dn_attributes}
elif right_part == '*': # attribute present match '=*'
tag = MATCH_PRESENT
left_part = left_part.strip()
assertion = {'attr': left_part}
elif '*' in right_part: # substring match '=initial*substring*substring*final'
tag = MATCH_SUBSTRING
left_part = left_part.strip()
right_part = right_part.strip()
substrings = right_part.split('*')
initial = validate_assertion_value(schema, left_part, substrings[0], auto_escape, auto_encode, validator, check_names) if substrings[0] else None
final = validate_assertion_value(schema, left_part, substrings[-1], auto_escape, auto_encode, validator, check_names) if substrings[-1] else None
any_string = [validate_assertion_value(schema, left_part, substring, auto_escape, auto_encode, validator, check_names) for substring in substrings[1:-1] if substring]
#assertion = {'attr': left_part, 'initial': initial, 'any': any_string, 'final': final}
assertion = {'attr': left_part}
if initial:
assertion['initial'] = initial
if any_string:
assertion['any'] = any_string
if final:
assertion['final'] = final
else: # equality match '='
tag = MATCH_EQUAL
left_part = left_part.strip()
right_part = right_part.strip()
assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
return FilterNode(tag, assertion)
def parse_filter(search_filter, schema, auto_escape, auto_encode, validator, check_names):
if str is not bytes and isinstance(search_filter, bytes): # python 3 with byte filter
search_filter = to_unicode(search_filter)
search_filter = search_filter.strip()
if search_filter and search_filter.count('(') == search_filter.count(')') and search_filter.startswith('(') and search_filter.endswith(')'):
state = SEARCH_OPEN_OR_CLOSE
root = FilterNode(ROOT)
current_node = root
start_pos = None
skip_white_space = True
just_closed = False
for pos, c in enumerate(search_filter):
if skip_white_space and c in whitespace:
continue
elif (state == SEARCH_OPEN or state == SEARCH_OPEN_OR_CLOSE) and c == '(':
state = SEARCH_MATCH_OR_CONTROL
just_closed = False
elif state == SEARCH_MATCH_OR_CONTROL and c in '&!|':
if c == '&':
current_node = current_node.append(FilterNode(AND))
elif c == '|':
current_node = current_node.append(FilterNode(OR))
elif c == '!':
current_node = current_node.append(FilterNode(NOT))
state = SEARCH_OPEN
elif (state == SEARCH_MATCH_OR_CLOSE or state == SEARCH_OPEN_OR_CLOSE) and c == ')':
if just_closed:
current_node = current_node.parent
else:
just_closed = True
skip_white_space = True
end_pos = pos
if start_pos:
if current_node.tag == NOT and len(current_node.elements) > 0:
raise LDAPInvalidFilterError('NOT (!) clause in filter cannot be multiple')
current_node.append(evaluate_match(search_filter[start_pos:end_pos], schema, auto_escape, auto_encode, validator, check_names))
start_pos = None
state = SEARCH_OPEN_OR_CLOSE
elif (state == SEARCH_MATCH_OR_CLOSE or state == SEARCH_MATCH_OR_CONTROL) and c not in '()':
skip_white_space = False
if not start_pos:
start_pos = pos
state = SEARCH_MATCH_OR_CLOSE
else:
raise LDAPInvalidFilterError('malformed filter')
if len(root.elements) != 1:
raise LDAPInvalidFilterError('missing boolean operator in filter')
return root
else:
raise LDAPInvalidFilterError('invalid filter')
def compile_filter(filter_node):
"""Builds ASN1 structure for filter, converts from filter LDAP escaping to bytes"""
compiled_filter = Filter()
if filter_node.tag == AND:
boolean_filter = And()
pos = 0
for element in filter_node.elements:
boolean_filter[pos] = compile_filter(element)
pos += 1
compiled_filter['and'] = boolean_filter
elif filter_node.tag == OR:
boolean_filter = Or()
pos = 0
for element in filter_node.elements:
boolean_filter[pos] = compile_filter(element)
pos += 1
compiled_filter['or'] = boolean_filter
elif filter_node.tag == NOT:
boolean_filter = Not()
boolean_filter['innerNotFilter'] = compile_filter(filter_node.elements[0])
compiled_filter.setComponentByName('notFilter', boolean_filter, verifyConstraints=False) # do not verify constraints because of hack for recursive filters in rfc4511
elif filter_node.tag == MATCH_APPROX:
matching_filter = ApproxMatch()
matching_filter['attributeDesc'] = AttributeDescription(filter_node.assertion['attr'])
matching_filter['assertionValue'] = AssertionValue(prepare_filter_for_sending(filter_node.assertion['value']))
compiled_filter['approxMatch'] = matching_filter
elif filter_node.tag == MATCH_GREATER_OR_EQUAL:
matching_filter = GreaterOrEqual()
matching_filter['attributeDesc'] = AttributeDescription(filter_node.assertion['attr'])
matching_filter['assertionValue'] = AssertionValue(prepare_filter_for_sending(filter_node.assertion['value']))
compiled_filter['greaterOrEqual'] = matching_filter
elif filter_node.tag == MATCH_LESS_OR_EQUAL:
matching_filter = LessOrEqual()
matching_filter['attributeDesc'] = AttributeDescription(filter_node.assertion['attr'])
matching_filter['assertionValue'] = AssertionValue(prepare_filter_for_sending(filter_node.assertion['value']))
compiled_filter['lessOrEqual'] = matching_filter
elif filter_node.tag == MATCH_EXTENSIBLE:
matching_filter = ExtensibleMatch()
if filter_node.assertion['matchingRule']:
matching_filter['matchingRule'] = MatchingRule(filter_node.assertion['matchingRule'])
if filter_node.assertion['attr']:
matching_filter['type'] = Type(filter_node.assertion['attr'])
matching_filter['matchValue'] = MatchValue(prepare_filter_for_sending(filter_node.assertion['value']))
matching_filter['dnAttributes'] = DnAttributes(filter_node.assertion['dnAttributes'])
compiled_filter['extensibleMatch'] = matching_filter
elif filter_node.tag == MATCH_PRESENT:
matching_filter = Present(AttributeDescription(filter_node.assertion['attr']))
compiled_filter['present'] = matching_filter
elif filter_node.tag == MATCH_SUBSTRING:
matching_filter = SubstringFilter()
matching_filter['type'] = AttributeDescription(filter_node.assertion['attr'])
substrings = Substrings()
pos = 0
if 'initial' in filter_node.assertion and filter_node.assertion['initial']:
substrings[pos] = Substring().setComponentByName('initial', Initial(prepare_filter_for_sending(filter_node.assertion['initial'])))
pos += 1
if 'any' in filter_node.assertion and filter_node.assertion['any']:
for substring in filter_node.assertion['any']:
substrings[pos] = Substring().setComponentByName('any', Any(prepare_filter_for_sending(substring)))
pos += 1
if 'final' in filter_node.assertion and filter_node.assertion['final']:
substrings[pos] = Substring().setComponentByName('final', Final(prepare_filter_for_sending(filter_node.assertion['final'])))
matching_filter['substrings'] = substrings
compiled_filter['substringFilter'] = matching_filter
elif filter_node.tag == MATCH_EQUAL:
matching_filter = EqualityMatch()
matching_filter['attributeDesc'] = AttributeDescription(filter_node.assertion['attr'])
matching_filter['assertionValue'] = AssertionValue(prepare_filter_for_sending(filter_node.assertion['value']))
compiled_filter.setComponentByName('equalityMatch', matching_filter)
else:
raise LDAPInvalidFilterError('unknown filter node tag')
return compiled_filter
def build_attribute_selection(attribute_list, schema):
conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
attribute_selection = AttributeSelection()
for index, attribute in enumerate(attribute_list):
if schema and schema.attribute_types:
if ';' in attribute: # exclude tags from validation
if not attribute[0:attribute.index(';')] in schema.attribute_types and attribute.lower() not in conf_attributes_excluded_from_check:
raise LDAPAttributeError('invalid attribute type in attribute list: ' + attribute)
else:
if attribute not in schema.attribute_types and attribute.lower() not in conf_attributes_excluded_from_check:
raise LDAPAttributeError('invalid attribute type in attribute list: ' + attribute)
attribute_selection[index] = Selector(attribute)
return attribute_selection
def search_operation(search_base,
search_filter,
search_scope,
dereference_aliases,
attributes,
size_limit,
time_limit,
types_only,
auto_escape,
auto_encode,
schema=None,
validator=None,
check_names=False):
# SearchRequest ::= [APPLICATION 3] SEQUENCE {
# baseObject LDAPDN,
# scope ENUMERATED {
# baseObject (0),
# singleLevel (1),
# wholeSubtree (2),
# ... },
# derefAliases ENUMERATED {
# neverDerefAliases (0),
# derefInSearching (1),
# derefFindingBaseObj (2),
# derefAlways (3) },
# sizeLimit INTEGER (0 .. maxInt),
# timeLimit INTEGER (0 .. maxInt),
# typesOnly BOOLEAN,
# filter Filter,
# attributes AttributeSelection }
request = SearchRequest()
request['baseObject'] = LDAPDN(search_base)
if search_scope == BASE or search_scope == 0:
request['scope'] = Scope('baseObject')
elif search_scope == LEVEL or search_scope == 1:
request['scope'] = Scope('singleLevel')
elif search_scope == SUBTREE or search_scope == 2:
request['scope'] = Scope('wholeSubtree')
else:
raise LDAPInvalidScopeError('invalid scope type')
if dereference_aliases == DEREF_NEVER or dereference_aliases == 0:
request['derefAliases'] = DerefAliases('neverDerefAliases')
elif dereference_aliases == DEREF_SEARCH or dereference_aliases == 1:
request['derefAliases'] = DerefAliases('derefInSearching')
elif dereference_aliases == DEREF_BASE or dereference_aliases == 2:
request['derefAliases'] = DerefAliases('derefFindingBaseObj')
elif dereference_aliases == DEREF_ALWAYS or dereference_aliases == 3:
request['derefAliases'] = DerefAliases('derefAlways')
else:
raise LDAPInvalidDereferenceAliasesError('invalid dereference aliases type')
request['sizeLimit'] = Integer0ToMax(size_limit)
request['timeLimit'] = Integer0ToMax(time_limit)
request['typesOnly'] = TypesOnly(True) if types_only else TypesOnly(False)
request['filter'] = compile_filter(parse_filter(search_filter, schema, auto_escape, auto_encode, validator, check_names).elements[0]) # parse the searchFilter string and compile it starting from the root node
if not isinstance(attributes, SEQUENCE_TYPES):
attributes = [NO_ATTRIBUTES]
request['attributes'] = build_attribute_selection(attributes, schema)
return request
def decode_vals(vals):
return [str(val) for val in vals if val] if vals else None
def decode_vals_fast(vals):
try:
return [to_unicode(val[3], from_server=True) for val in vals if val] if vals else None
except UnicodeDecodeError:
return [val[3] for val in vals if val] if vals else None
def attributes_to_dict(attribute_list):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
attributes[str(attribute['type'])] = decode_vals(attribute['vals'])
return attributes
def attributes_to_dict_fast(attribute_list):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
attributes[to_unicode(attribute[3][0][3], from_server=True)] = decode_vals_fast(attribute[3][1][3])
return attributes
def decode_raw_vals(vals):
return [bytes(val) for val in vals] if vals else None
def decode_raw_vals_fast(vals):
return [bytes(val[3]) for val in vals] if vals else None
def raw_attributes_to_dict(attribute_list):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
attributes[str(attribute['type'])] = decode_raw_vals(attribute['vals'])
return attributes
def raw_attributes_to_dict_fast(attribute_list):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
attributes[to_unicode(attribute[3][0][3], from_server=True)] = decode_raw_vals_fast(attribute[3][1][3])
return attributes
def checked_attributes_to_dict(attribute_list, schema=None, custom_formatter=None):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
checked_attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
name = str(attribute['type'])
checked_attributes[name] = format_attribute_values(schema, name, decode_raw_vals(attribute['vals']) or [], custom_formatter)
return checked_attributes
def checked_attributes_to_dict_fast(attribute_list, schema=None, custom_formatter=None):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
checked_attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
name = to_unicode(attribute[3][0][3], from_server=True)
checked_attributes[name] = format_attribute_values(schema, name, decode_raw_vals_fast(attribute[3][1][3]) or [], custom_formatter)
return checked_attributes
def matching_rule_assertion_to_string(matching_rule_assertion):
return str(matching_rule_assertion)
def filter_to_string(filter_object):
filter_type = filter_object.getName()
filter_string = '('
if filter_type == 'and':
filter_string += '&'
for f in filter_object['and']:
filter_string += filter_to_string(f)
elif filter_type == 'or':
filter_string += '|'
for f in filter_object['or']:
filter_string += filter_to_string(f)
elif filter_type == 'notFilter':
filter_string += '!' + filter_to_string(filter_object['notFilter']['innerNotFilter'])
elif filter_type == 'equalityMatch':
ava = ava_to_dict(filter_object['equalityMatch'])
filter_string += ava['attribute'] + '=' + ava['value']
elif filter_type == 'substringFilter':
attribute = filter_object['substringFilter']['type']
filter_string += str(attribute) + '='
for substring in filter_object['substringFilter']['substrings']:
component = substring.getName()
if substring[component] is not None and substring[component].hasValue():
if component == 'initial':
filter_string += str(substring['initial']) + '*'
elif component == 'any':
filter_string += str(substring['any']) if filter_string.endswith('*') else '*' + str(substring['any'])
filter_string += '*'
elif component == 'final':
filter_string += '*' + str(substring['final'])
elif filter_type == 'greaterOrEqual':
ava = ava_to_dict(filter_object['greaterOrEqual'])
filter_string += ava['attribute'] + '>=' + ava['value']
elif filter_type == 'lessOrEqual':
ava = ava_to_dict(filter_object['lessOrEqual'])
filter_string += ava['attribute'] + '<=' + ava['value']
elif filter_type == 'present':
filter_string += str(filter_object['present']) + '=*'
elif filter_type == 'approxMatch':
ava = ava_to_dict(filter_object['approxMatch'])
filter_string += ava['attribute'] + '~=' + ava['value']
elif filter_type == 'extensibleMatch':
filter_string += matching_rule_assertion_to_string(filter_object['extensibleMatch'])
else:
raise LDAPInvalidFilterError('error converting filter to string')
filter_string += ')'
if str is bytes: # Python2, forces conversion to Unicode
filter_string = to_unicode(filter_string)
return filter_string
def search_request_to_dict(request):
return {'base': str(request['baseObject']),
'scope': int(request['scope']),
'dereferenceAlias': int(request['derefAliases']),
'sizeLimit': int(request['sizeLimit']),
'timeLimit': int(request['timeLimit']),
'typesOnly': bool(request['typesOnly']),
'filter': filter_to_string(request['filter']),
'attributes': attributes_to_list(request['attributes'])}
def search_result_entry_response_to_dict(response, schema, custom_formatter, check_names):
entry = dict()
# entry['dn'] = str(response['object'])
if response['object']:
entry['raw_dn'] = to_raw(response['object'])
if isinstance(response['object'], STRING_TYPES): # mock strategies return string not a PyAsn1 object
entry['dn'] = to_unicode(response['object'])
else:
entry['dn'] = to_unicode(bytes(response['object']), from_server=True)
else:
entry['raw_dn'] = b''
entry['dn'] = ''
entry['raw_attributes'] = raw_attributes_to_dict(response['attributes'])
if check_names:
entry['attributes'] = checked_attributes_to_dict(response['attributes'], schema, custom_formatter)
else:
entry['attributes'] = attributes_to_dict(response['attributes'])
return entry
def search_result_done_response_to_dict(response):
result = {'result': int(response['resultCode']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'message': str(response['diagnosticMessage']),
'dn': str(response['matchedDN']),
'referrals': referrals_to_list(response['referral'])}
if 'controls' in response: # used for returning controls in Mock strategies
result['controls'] = dict()
for control in response['controls']:
result['controls'][control[0]] = control[1]
return result
def search_result_reference_response_to_dict(response):
return {'uri': search_refs_to_list(response)}
def search_result_entry_response_to_dict_fast(response, schema, custom_formatter, check_names):
entry_dict = dict()
entry_dict['raw_dn'] = response[0][3]
entry_dict['dn'] = to_unicode(response[0][3], from_server=True)
entry_dict['raw_attributes'] = raw_attributes_to_dict_fast(response[1][3]) # attributes
if check_names:
entry_dict['attributes'] = checked_attributes_to_dict_fast(response[1][3], schema, custom_formatter) # attributes
else:
entry_dict['attributes'] = attributes_to_dict_fast(response[1][3]) # attributes
return entry_dict
def search_result_reference_response_to_dict_fast(response):
return {'uri': search_refs_to_list_fast([r[3] for r in response])}

View File

@ -0,0 +1,32 @@
"""
"""
# Created on 2013.09.03
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..protocol.rfc4511 import UnbindRequest
def unbind_operation():
# UnbindRequest ::= [APPLICATION 2] NULL
request = UnbindRequest()
return request

View File

@ -0,0 +1,40 @@
"""
"""
# Created on 2015.10.20
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .rfc4511 import Control, Criticality, LDAPOID
from ..utils.asn1 import encode
def build_control(oid, criticality, value, encode_control_value=True):
control = Control()
control.setComponentByName('controlType', LDAPOID(oid))
control.setComponentByName('criticality', Criticality(criticality))
if value is not None:
if encode_control_value:
control.setComponentByName('controlValue', encode(value))
else:
control.setComponentByName('controlValue', value)
return control

View File

@ -0,0 +1,206 @@
"""
"""
# Created on 2013.07.24
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.error import PyAsn1Error
from .. import SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
from ..core.exceptions import LDAPControlError, LDAPAttributeError, LDAPObjectClassError, LDAPInvalidValueError
from ..protocol.rfc4511 import Controls, Control
from ..utils.conv import to_raw, to_unicode, escape_filter_chars, is_filter_escaped
from ..protocol.formatters.standard import find_attribute_validator
def attribute_to_dict(attribute):
try:
return {'type': str(attribute['type']), 'values': [str(val) for val in attribute['vals']]}
except PyAsn1Error: # invalid encoding, return bytes value
return {'type': str(attribute['type']), 'values': [bytes(val) for val in attribute['vals']]}
def attributes_to_dict(attributes):
attributes_dict = dict()
for attribute in attributes:
attribute_dict = attribute_to_dict(attribute)
attributes_dict[attribute_dict['type']] = attribute_dict['values']
return attributes_dict
def referrals_to_list(referrals):
return [str(referral) for referral in referrals if referral] if referrals else None
def search_refs_to_list(search_refs):
return [str(search_ref) for search_ref in search_refs if search_ref] if search_refs else None
def search_refs_to_list_fast(search_refs):
return [to_unicode(search_ref) for search_ref in search_refs if search_ref] if search_refs else None
def sasl_to_dict(sasl):
return {'mechanism': str(sasl['mechanism']), 'credentials': bytes(sasl['credentials']) if sasl['credentials'] is not None and sasl['credentials'].hasValue() else None}
def authentication_choice_to_dict(authentication_choice):
return {'simple': str(authentication_choice['simple']) if authentication_choice.getName() == 'simple' else None, 'sasl': sasl_to_dict(authentication_choice['sasl']) if authentication_choice.getName() == 'sasl' else None}
def partial_attribute_to_dict(modification):
try:
return {'type': str(modification['type']), 'value': [str(value) for value in modification['vals']]}
except PyAsn1Error: # invalid encoding, return bytes value
return {'type': str(modification['type']), 'value': [bytes(value) for value in modification['vals']]}
def change_to_dict(change):
return {'operation': int(change['operation']), 'attribute': partial_attribute_to_dict(change['modification'])}
def changes_to_list(changes):
return [change_to_dict(change) for change in changes]
def attributes_to_list(attributes):
return [str(attribute) for attribute in attributes]
def ava_to_dict(ava):
try:
return {'attribute': str(ava['attributeDesc']), 'value': escape_filter_chars(str(ava['assertionValue']))}
except Exception: # invalid encoding, return bytes value
try:
return {'attribute': str(ava['attributeDesc']), 'value': escape_filter_chars(bytes(ava['assertionValue']))}
except Exception:
return {'attribute': str(ava['attributeDesc']), 'value': bytes(ava['assertionValue'])}
def substring_to_dict(substring):
return {'initial': substring['initial'] if substring['initial'] else '', 'any': [middle for middle in substring['any']] if substring['any'] else '', 'final': substring['final'] if substring['final'] else ''}
def prepare_changes_for_request(changes):
prepared = dict()
for change in changes:
attribute_name = change['attribute']['type']
if attribute_name not in prepared:
prepared[attribute_name] = []
prepared[attribute_name].append((change['operation'], change['attribute']['value']))
return prepared
def build_controls_list(controls):
"""controls is a sequence of Control() or sequences
each sequence must have 3 elements: the control OID, the criticality, the value
criticality must be a boolean
"""
if not controls:
return None
if not isinstance(controls, SEQUENCE_TYPES):
raise LDAPControlError('controls must be a sequence')
built_controls = Controls()
for idx, control in enumerate(controls):
if isinstance(control, Control):
built_controls.setComponentByPosition(idx, control)
elif len(control) == 3 and isinstance(control[1], bool):
built_control = Control()
built_control['controlType'] = control[0]
built_control['criticality'] = control[1]
if control[2] is not None:
built_control['controlValue'] = control[2]
built_controls.setComponentByPosition(idx, built_control)
else:
raise LDAPControlError('control must be a sequence of 3 elements: controlType, criticality (boolean) and controlValue (None if not provided)')
return built_controls
def validate_assertion_value(schema, name, value, auto_escape, auto_encode, validator, check_names):
value = to_unicode(value)
if auto_escape:
if '\\' in value and not is_filter_escaped(value):
value = escape_filter_chars(value)
value = validate_attribute_value(schema, name, value, auto_encode, validator=validator, check_names=check_names)
return value
def validate_attribute_value(schema, name, value, auto_encode, validator=None, check_names=False):
conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')]
conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
conf_utf8_syntaxes = get_config_parameter('UTF8_ENCODED_SYNTAXES')
conf_utf8_types = [v.lower() for v in get_config_parameter('UTF8_ENCODED_TYPES')]
if schema and schema.attribute_types:
if ';' in name:
name = name.split(';')[0]
if check_names and schema.object_classes and name.lower() == 'objectclass':
if to_unicode(value).lower() not in conf_classes_excluded_from_check and to_unicode(value) not in schema.object_classes:
raise LDAPObjectClassError('invalid class in objectClass attribute: ' + str(value))
elif check_names and name not in schema.attribute_types and name.lower() not in conf_attributes_excluded_from_check:
raise LDAPAttributeError('invalid attribute ' + name)
else: # try standard validators
validator = find_attribute_validator(schema, name, validator)
validated = validator(value)
if validated is False:
try: # checks if the value is a byte value erroneously converted to a string (as "b'1234'"), this is a common case in Python 3 when encoding is not specified
if value[0:2] == "b'" and value [-1] == "'":
value = to_raw(value[2:-1])
validated = validator(value)
except Exception:
raise LDAPInvalidValueError('value \'%s\' non valid for attribute \'%s\'' % (value, name))
if validated is False:
raise LDAPInvalidValueError('value \'%s\' non valid for attribute \'%s\'' % (value, name))
elif validated is not True: # a valid LDAP value equivalent to the actual value
value = validated
# converts to utf-8 for well known Unicode LDAP syntaxes
if auto_encode and ((name in schema.attribute_types and schema.attribute_types[name].syntax in conf_utf8_syntaxes) or name.lower() in conf_utf8_types):
value = to_unicode(value) # tries to convert from local encoding to Unicode
return to_raw(value)
def prepare_filter_for_sending(raw_string):
i = 0
ints = []
raw_string = to_raw(raw_string)
while i < len(raw_string):
if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2: # 92 is backslash
try:
ints.append(int(raw_string[i + 1: i + 3], 16))
i += 2
except ValueError: # not an ldap escaped value, sends as is
ints.append(92) # adds backslash
else:
if str is not bytes: # Python 3
ints.append(raw_string[i])
else: # Python 2
ints.append(ord(raw_string[i]))
i += 1
if str is not bytes: # Python 3
return bytes(ints)
else: # Python 2
return ''.join(chr(x) for x in ints)
def prepare_for_sending(raw_string):
return to_raw(raw_string) if isinstance(raw_string, STRING_TYPES) else raw_string

View File

@ -0,0 +1,407 @@
"""
"""
# Created on 2014.10.28
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import re
from binascii import hexlify
from uuid import UUID
from datetime import datetime, timedelta
from ...utils.conv import to_unicode
from ...core.timezone import OffsetTzInfo
def format_unicode(raw_value):
try:
if str is not bytes: # Python 3
return str(raw_value, 'utf-8', errors='strict')
else: # Python 2
return unicode(raw_value, 'utf-8', errors='strict')
except (TypeError, UnicodeDecodeError):
pass
return raw_value
def format_integer(raw_value):
try:
return int(raw_value)
except (TypeError, ValueError): # expected exceptions
pass
except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
pass
return raw_value
def format_binary(raw_value):
try:
return bytes(raw_value)
except TypeError: # expected exceptions
pass
except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
pass
return raw_value
def format_uuid(raw_value):
try:
return str(UUID(bytes=raw_value))
except (TypeError, ValueError):
return format_unicode(raw_value)
except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
pass
return raw_value
def format_uuid_le(raw_value):
try:
return '{' + str(UUID(bytes_le=raw_value)) + '}'
except (TypeError, ValueError):
return format_unicode(raw_value)
except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
pass
return raw_value
def format_boolean(raw_value):
if raw_value in [b'TRUE', b'true', b'True']:
return True
if raw_value in [b'FALSE', b'false', b'False']:
return False
return raw_value
def format_ad_timestamp(raw_value):
"""
Active Directory stores date/time values as the number of 100-nanosecond intervals
that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
"""
if raw_value == b'9223372036854775807': # max value to be stored in a 64 bit signed int
return datetime.max # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999)
try:
timestamp = int(raw_value)
if timestamp < 0: # ad timestamp cannot be negative
return raw_value
except Exception:
return raw_value
try:
return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600, tz=OffsetTzInfo(0, 'UTC')) # forces true division in python 2
except (OSError, OverflowError, ValueError): # on Windows backwards timestamps are not allowed
try:
unix_epoch = datetime.fromtimestamp(0, tz=OffsetTzInfo(0, 'UTC'))
diff_seconds = timedelta(seconds=timestamp/10000000.0 - 11644473600)
return unix_epoch + diff_seconds
except Exception:
pass
except Exception:
pass
return raw_value
try: # uses regular expressions and the timezone class (python3.2 and later)
from datetime import timezone
time_format = re.compile(
r'''
^
(?P<Year>[0-9]{4})
(?P<Month>0[1-9]|1[0-2])
(?P<Day>0[1-9]|[12][0-9]|3[01])
(?P<Hour>[01][0-9]|2[0-3])
(?:
(?P<Minute>[0-5][0-9])
(?P<Second>[0-5][0-9]|60)?
)?
(?:
[.,]
(?P<Fraction>[0-9]+)
)?
(?:
Z
|
(?:
(?P<Offset>[+-])
(?P<OffHour>[01][0-9]|2[0-3])
(?P<OffMinute>[0-5][0-9])?
)
)
$
''',
re.VERBOSE
)
def format_time(raw_value):
try:
match = time_format.fullmatch(to_unicode(raw_value))
if match is None:
return raw_value
matches = match.groupdict()
offset = timedelta(
hours=int(matches['OffHour'] or 0),
minutes=int(matches['OffMinute'] or 0)
)
if matches['Offset'] == '-':
offset *= -1
# Python does not support leap second in datetime (!)
if matches['Second'] == '60':
matches['Second'] = '59'
# According to RFC, fraction may be applied to an Hour/Minute (!)
fraction = float('0.' + (matches['Fraction'] or '0'))
if matches['Minute'] is None:
fraction *= 60
minute = int(fraction)
fraction -= minute
else:
minute = int(matches['Minute'])
if matches['Second'] is None:
fraction *= 60
second = int(fraction)
fraction -= second
else:
second = int(matches['Second'])
microseconds = int(fraction * 1000000)
return datetime(
int(matches['Year']),
int(matches['Month']),
int(matches['Day']),
int(matches['Hour']),
minute,
second,
microseconds,
timezone(offset),
)
except Exception: # exceptions should be investigated, anyway the formatter return the raw_value
pass
return raw_value
except ImportError:
def format_time(raw_value):
"""
From RFC4517:
A value of the Generalized Time syntax is a character string
representing a date and time. The LDAP-specific encoding of a value
of this syntax is a restriction of the format defined in [ISO8601],
and is described by the following ABNF:
GeneralizedTime = century year month day hour
[ minute [ second / leap-second ] ]
[ fraction ]
g-time-zone
century = 2(%x30-39) ; "00" to "99"
year = 2(%x30-39) ; "00" to "99"
month = ( %x30 %x31-39 ) ; "01" (January) to "09"
/ ( %x31 %x30-32 ) ; "10" to "12"
day = ( %x30 %x31-39 ) ; "01" to "09"
/ ( %x31-32 %x30-39 ) ; "10" to "29"
/ ( %x33 %x30-31 ) ; "30" to "31"
hour = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23"
minute = %x30-35 %x30-39 ; "00" to "59"
second = ( %x30-35 %x30-39 ) ; "00" to "59"
leap-second = ( %x36 %x30 ) ; "60"
fraction = ( DOT / COMMA ) 1*(%x30-39)
g-time-zone = %x5A ; "Z"
/ g-differential
g-differential = ( MINUS / PLUS ) hour [ minute ]
MINUS = %x2D ; minus sign ("-")
"""
if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (b'Z' in raw_value and not raw_value.endswith(b'Z')): # first ten characters are mandatory and must be numeric or timezone or fraction
return raw_value
# sets position for fixed values
year = int(raw_value[0: 4])
month = int(raw_value[4: 6])
day = int(raw_value[6: 8])
hour = int(raw_value[8: 10])
minute = 0
second = 0
microsecond = 0
remain = raw_value[10:]
if remain and remain.endswith(b'Z'): # uppercase 'Z'
sep = b'Z'
elif b'+' in remain: # timezone can be specified with +hh[mm] or -hh[mm]
sep = b'+'
elif b'-' in remain:
sep = b'-'
else: # timezone not specified
return raw_value
time, _, offset = remain.partition(sep)
if time and (b'.' in time or b',' in time):
# fraction time
if time[0] in b',.':
minute = 6 * int(time[1] if str is bytes else chr(time[1])) # Python 2 / Python 3
elif time[2] in b',.':
minute = int(raw_value[10: 12])
second = 6 * int(time[3] if str is bytes else chr(time[3])) # Python 2 / Python 3
elif time[4] in b',.':
minute = int(raw_value[10: 12])
second = int(raw_value[12: 14])
microsecond = 100000 * int(time[5] if str is bytes else chr(time[5])) # Python 2 / Python 3
elif len(time) == 2: # mmZ format
minute = int(raw_value[10: 12])
elif len(time) == 0: # Z format
pass
elif len(time) == 4: # mmssZ
minute = int(raw_value[10: 12])
second = int(raw_value[12: 14])
else:
return raw_value
if sep == b'Z': # UTC
timezone = OffsetTzInfo(0, 'UTC')
else: # build timezone
try:
if len(offset) == 2:
timezone_hour = int(offset[:2])
timezone_minute = 0
elif len(offset) == 4:
timezone_hour = int(offset[:2])
timezone_minute = int(offset[2:4])
else: # malformed timezone
raise ValueError
except ValueError:
return raw_value
if timezone_hour > 23 or timezone_minute > 59: # invalid timezone
return raw_value
if str is not bytes: # Python 3
timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), 'UTC' + str(sep + offset, encoding='utf-8'))
else: # Python 2
timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), unicode('UTC' + sep + offset, encoding='utf-8'))
try:
return datetime(year=year,
month=month,
day=day,
hour=hour,
minute=minute,
second=second,
microsecond=microsecond,
tzinfo=timezone)
except (TypeError, ValueError):
pass
return raw_value
def format_time_with_0_year(raw_value):
try:
if raw_value.startswith(b'0000'):
return raw_value
except Exception:
try:
if raw_value.startswith('0000'):
return raw_value
except Exception:
pass
return format_time(raw_value)
def format_sid(raw_value):
"""
SID= "S-1-" IdentifierAuthority 1*SubAuthority
IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
; If the identifier authority is < 2^32, the
; identifier authority is represented as a decimal
; number
; If the identifier authority is >= 2^32,
; the identifier authority is represented in
; hexadecimal
IdentifierAuthorityDec = 1*10DIGIT
; IdentifierAuthorityDec, top level authority of a
; security identifier is represented as a decimal number
IdentifierAuthorityHex = "0x" 12HEXDIG
; IdentifierAuthorityHex, the top-level authority of a
; security identifier is represented as a hexadecimal number
SubAuthority= "-" 1*10DIGIT
; Sub-Authority is always represented as a decimal number
; No leading "0" characters are allowed when IdentifierAuthority
; or SubAuthority is represented as a decimal number
; All hexadecimal digits must be output in string format,
; pre-pended by "0x"
Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
"""
try:
if raw_value.startswith(b'S-1-'):
return raw_value
except Exception:
try:
if raw_value.startswith('S-1-'):
return raw_value
except Exception:
pass
try:
if str is not bytes: # Python 3
revision = int(raw_value[0])
sub_authority_count = int(raw_value[1])
identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big')
if identifier_authority >= 4294967296: # 2 ^ 32
identifier_authority = hex(identifier_authority)
sub_authority = ''
i = 0
while i < sub_authority_count:
sub_authority += '-' + str(int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little')) # little endian
i += 1
else: # Python 2
revision = int(ord(raw_value[0]))
sub_authority_count = int(ord(raw_value[1]))
identifier_authority = int(hexlify(raw_value[2:8]), 16)
if identifier_authority >= 4294967296: # 2 ^ 32
identifier_authority = hex(identifier_authority)
sub_authority = ''
i = 0
while i < sub_authority_count:
sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16)) # little endian
i += 1
return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority
except Exception: # any exception should be investigated, anyway the formatter return the raw_value
pass
return raw_value

View File

@ -0,0 +1,232 @@
"""
"""
# Created on 2014.10.28
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ... import SEQUENCE_TYPES
from .formatters import format_ad_timestamp, format_binary, format_boolean,\
format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le, format_time_with_0_year
from .validators import validate_integer, validate_time, always_valid,\
validate_generic_single_value, validate_boolean, validate_ad_timestamp, validate_sid,\
validate_uuid_le, validate_uuid, validate_zero_and_minus_one_and_positive_int, validate_guid, validate_time_with_0_year
# for each syntax can be specified a format function and a input validation function
standard_formatter = {
'1.2.840.113556.1.4.903': (format_binary, None), # Object (DN-binary) - Microsoft
'1.2.840.113556.1.4.904': (format_unicode, None), # Object (DN-string) - Microsoft
'1.2.840.113556.1.4.905': (format_unicode, None), # String (Teletex) - Microsoft
'1.2.840.113556.1.4.906': (format_integer, validate_integer), # Large integer - Microsoft
'1.2.840.113556.1.4.907': (format_binary, None), # String (NT-sec-desc) - Microsoft
'1.2.840.113556.1.4.1221': (format_binary, None), # Object (OR-name) - Microsoft
'1.2.840.113556.1.4.1362': (format_unicode, None), # String (Case) - Microsoft
'1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None), # ACI item [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None), # Access point [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None), # Attribute type description
'1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None), # Audio [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None), # Binary [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None), # Bit String
'1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean), # Boolean
'1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None), # Certificate [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None), # Certificate List [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None), # Certificate Pair [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None), # Country String
'1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None), # Distinguished name (DN)
'1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None), # Data Quality Syntax [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None), # Delivery method
'1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None), # Directory string
'1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None), # DIT Content Rule Description
'1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None), # DIT Structure Rule Description
'1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None), # DL Submit Permission [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None), # DSA Quality Syntax [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None), # DSE Type [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None), # Enhanced Guide
'1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None), # Facsimile Telephone Number
'1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None), # Fax
'1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time), # Generalized time
'1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None), # Guide [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None), # IA5 string
'1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer), # Integer
'1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None), # JPEG
'1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None), # Master and Shadow Access Points [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None), # Matching rule description
'1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None), # Matching rule use description
'1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None), # Mail Preference [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None), # MHS OR Address [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None), # Name and optional UID
'1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None), # Name form description
'1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None), # Numeric string
'1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None), # Object class description
'1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None), # OID
'1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None), # Other mailbox
'1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None), # Octet string
'1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None), # Postal address
'1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None), # Protocol Information [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None), # Presentation Address [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None), # Printable string
'1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None), # Subtree specification [OBSOLETE
'1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None), # Supplier Information [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None), # Supplier Or Consumer [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None), # Supplier And Consumer [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None), # Supported Algorithm [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None), # Telephone number
'1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None), # Teletex terminal identifier
'1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None), # Teletex number
'1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time), # Utc time (deprecated)
'1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None), # LDAP syntax description
'1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None), # Modify rights [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None), # LDAP Schema Definition [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None), # LDAP Schema Description [OBSOLETE]
'1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None), # Substring assertion
'1.3.6.1.1.16.1': (format_uuid, validate_uuid), # UUID
'1.3.6.1.1.16.4': (format_uuid, validate_uuid), # entryUUID (RFC 4530)
'2.16.840.1.113719.1.1.4.1.501': (format_uuid, validate_guid), # GUID (Novell)
'2.16.840.1.113719.1.1.5.1.0': (format_binary, None), # Unknown (Novell)
'2.16.840.1.113719.1.1.5.1.6': (format_unicode, None), # Case Ignore List (Novell)
'2.16.840.1.113719.1.1.5.1.12': (format_binary, None), # Tagged Data (Novell)
'2.16.840.1.113719.1.1.5.1.13': (format_binary, None), # Octet List (Novell)
'2.16.840.1.113719.1.1.5.1.14': (format_unicode, None), # Tagged String (Novell)
'2.16.840.1.113719.1.1.5.1.15': (format_unicode, None), # Tagged Name And String (Novell)
'2.16.840.1.113719.1.1.5.1.16': (format_binary, None), # NDS Replica Pointer (Novell)
'2.16.840.1.113719.1.1.5.1.17': (format_unicode, None), # NDS ACL (Novell)
'2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time), # NDS Timestamp (Novell)
'2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer), # Counter (Novell)
'2.16.840.1.113719.1.1.5.1.23': (format_unicode, None), # Tagged Name (Novell)
'2.16.840.1.113719.1.1.5.1.25': (format_unicode, None), # Typed Name (Novell)
'supportedldapversion': (format_integer, None), # supportedLdapVersion (Microsoft)
'octetstring': (format_binary, validate_uuid_le), # octect string (Microsoft)
'1.2.840.113556.1.4.2': (format_uuid_le, validate_uuid_le), # object guid (Microsoft)
'1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp), # builtinCreationTime (Microsoft)
'1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp), # creationTime (Microsoft)
'1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp), # badPasswordTime (Microsoft)
'1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp), # lastLogoff (Microsoft)
'1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp), # lastLogon (Microsoft)
'1.2.840.113556.1.4.96': (format_ad_timestamp, validate_zero_and_minus_one_and_positive_int), # pwdLastSet (Microsoft, can be set to -1 only)
'1.2.840.113556.1.4.146': (format_sid, validate_sid), # objectSid (Microsoft)
'1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp), # accountExpires (Microsoft)
'1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp), # lockoutTime (Microsoft)
'1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp), # lastLogonTimestamp (Microsoft)
'1.3.6.1.4.1.42.2.27.8.1.17': (format_time_with_0_year, validate_time_with_0_year) # pwdAccountLockedTime (Novell)
}
def find_attribute_helpers(attr_type, name, custom_formatter):
"""
Tries to format following the OIDs info and format_helper specification.
Search for attribute oid, then attribute name (can be multiple), then attribute syntax
Precedence is:
1. attribute name
2. attribute oid(from schema)
3. attribute names (from oid_info)
4. attribute syntax (from schema)
Custom formatters can be defined in Server object and have precedence over the standard_formatters
If no formatter is found the raw_value is returned as bytes.
Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object
Formatter functions can return any kind of object
return a tuple (formatter, validator)
"""
formatter = None
if custom_formatter and isinstance(custom_formatter, dict): # if custom formatters are defined they have precedence over the standard formatters
if name in custom_formatter: # search for attribute name, as returned by the search operation
formatter = custom_formatter[name]
if not formatter and attr_type and attr_type.oid in custom_formatter: # search for attribute oid as returned by schema
formatter = custom_formatter[attr_type.oid]
if not formatter and attr_type and attr_type.oid_info:
if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info
for attr_name in attr_type.oid_info[2]:
if attr_name in custom_formatter:
formatter = custom_formatter[attr_name]
break
elif attr_type.oid_info[2] in custom_formatter: # search for name defined in oid_info
formatter = custom_formatter[attr_type.oid_info[2]]
if not formatter and attr_type and attr_type.syntax in custom_formatter: # search for syntax defined in schema
formatter = custom_formatter[attr_type.syntax]
if not formatter and name in standard_formatter: # search for attribute name, as returned by the search operation
formatter = standard_formatter[name]
if not formatter and attr_type and attr_type.oid in standard_formatter: # search for attribute oid as returned by schema
formatter = standard_formatter[attr_type.oid]
if not formatter and attr_type and attr_type.oid_info:
if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info
for attr_name in attr_type.oid_info[2]:
if attr_name in standard_formatter:
formatter = standard_formatter[attr_name]
break
elif attr_type.oid_info[2] in standard_formatter: # search for name defined in oid_info
formatter = standard_formatter[attr_type.oid_info[2]]
if not formatter and attr_type and attr_type.syntax in standard_formatter: # search for syntax defined in schema
formatter = standard_formatter[attr_type.syntax]
if formatter is None:
return None, None
return formatter
def format_attribute_values(schema, name, values, custom_formatter):
if not values: # RFCs states that attributes must always have values, but a flaky server returns empty values too
return []
if not isinstance(values, SEQUENCE_TYPES):
values = [values]
if schema and schema.attribute_types and name in schema.attribute_types:
attr_type = schema.attribute_types[name]
else:
attr_type = None
attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter)
if not isinstance(attribute_helpers, tuple): # custom formatter
formatter = attribute_helpers
else:
formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0]
formatted_values = [formatter(raw_value) for raw_value in values] # executes formatter
if formatted_values:
return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values
else: # RFCs states that attributes must always have values, but AD return empty values in DirSync
return []
def find_attribute_validator(schema, name, custom_validator):
if schema and schema.attribute_types and name in schema.attribute_types:
attr_type = schema.attribute_types[name]
else:
attr_type = None
attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator)
if not isinstance(attribute_helpers, tuple): # custom validator
validator = attribute_helpers
else:
if not attribute_helpers[1]:
if attr_type and attr_type.single_value:
validator = validate_generic_single_value # validate only single value
else:
validator = always_valid # unknown syntax, accepts single and multi value
else:
validator = attribute_helpers[1]
return validator

View File

@ -0,0 +1,461 @@
"""
"""
# Created on 2016.08.09
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from binascii import a2b_hex
from datetime import datetime
from calendar import timegm
from uuid import UUID
from struct import pack
from ... import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, INTEGER_TYPES
from .formatters import format_time, format_ad_timestamp
from ...utils.conv import to_raw, to_unicode, ldap_escape_to_bytes
# Validators return True if value is valid, False if value is not valid,
# or a value different from True and False that is a valid value to substitute to the input value
def check_type(input_value, value_type):
if isinstance(input_value, value_type):
return True
if isinstance(input_value, SEQUENCE_TYPES):
for value in input_value:
if not isinstance(value, value_type):
return False
return True
return False
# noinspection PyUnusedLocal
def always_valid(input_value):
return True
def validate_generic_single_value(input_value):
if not isinstance(input_value, SEQUENCE_TYPES):
return True
try: # object couldn't have a __len__ method
if len(input_value) == 1:
return True
except Exception:
pass
return False
def validate_zero_and_minus_one_and_positive_int(input_value):
"""Accept -1 only (used by pwdLastSet in AD)
"""
if not isinstance(input_value, SEQUENCE_TYPES):
if isinstance(input_value, NUMERIC_TYPES) or isinstance(input_value, STRING_TYPES):
return True if int(input_value) >= -1 else False
return False
else:
if len(input_value) == 1 and (isinstance(input_value[0], NUMERIC_TYPES) or isinstance(input_value[0], STRING_TYPES)):
return True if int(input_value[0]) >= -1 else False
return False
def validate_integer(input_value):
if check_type(input_value, (float, bool)):
return False
if check_type(input_value, INTEGER_TYPES):
return True
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = [] # builds a list of valid int values
from decimal import Decimal, InvalidOperation
for element in input_value:
try: # try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used
value = to_unicode(element) if isinstance(element, bytes) else element
decimal_value = Decimal(value)
int_value = int(value)
if decimal_value == int_value:
valid_values.append(int_value)
else:
return False
except (ValueError, TypeError, InvalidOperation):
return False
if sequence:
return valid_values
else:
return valid_values[0]
def validate_bytes(input_value):
return check_type(input_value, bytes)
def validate_boolean(input_value):
# it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed
if validate_generic_single_value(input_value): # valid only if a single value or a sequence with a single element
if isinstance(input_value, SEQUENCE_TYPES):
input_value = input_value[0]
if isinstance(input_value, bool):
if input_value:
return 'TRUE'
else:
return 'FALSE'
if str is not bytes and isinstance(input_value, bytes): # python3 try to converts bytes to string
input_value = to_unicode(input_value)
if isinstance(input_value, STRING_TYPES):
if input_value.lower() == 'true':
return 'TRUE'
elif input_value.lower() == 'false':
return 'FALSE'
return False
def validate_time_with_0_year(input_value):
# validates generalized time but accept a 0000 year too
# if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string
element = to_unicode(element)
if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time
if element.startswith('0000') or isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string
valid_values.append(element)
else:
return False
elif isinstance(element, datetime):
changed = True
if element.tzinfo: # a datetime with a timezone
valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
else: # datetime without timezone, assumed local and adjusted to UTC
offset = datetime.now() - datetime.utcnow()
valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
else:
return False
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True
def validate_time(input_value):
# if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string
element = to_unicode(element)
if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time
if isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string
valid_values.append(element)
else:
return False
elif isinstance(element, datetime):
changed = True
if element.tzinfo: # a datetime with a timezone
valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
else: # datetime without timezone, assumed local and adjusted to UTC
offset = datetime.now() - datetime.utcnow()
valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
else:
return False
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True
def validate_ad_timestamp(input_value):
"""
Active Directory stores date/time values as the number of 100-nanosecond intervals
that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
"""
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string
element = to_unicode(element)
if isinstance(element, NUMERIC_TYPES):
if 0 <= element <= 9223372036854775807: # min and max for the AD timestamp starting from 12:00 AM January 1, 1601
valid_values.append(element)
else:
return False
elif isinstance(element, STRING_TYPES): # tries to check if it is already be a AD timestamp
if isinstance(format_ad_timestamp(to_raw(element)), datetime): # valid Generalized Time string
valid_values.append(element)
else:
return False
elif isinstance(element, datetime):
changed = True
if element.tzinfo: # a datetime with a timezone
valid_values.append(to_raw((timegm(element.utctimetuple()) + 11644473600) * 10000000, encoding='ascii'))
else: # datetime without timezone, assumed local and adjusted to UTC
offset = datetime.now() - datetime.utcnow()
valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii'))
else:
return False
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True
def validate_guid(input_value):
"""
object guid in uuid format (Novell eDirectory)
"""
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if isinstance(element, STRING_TYPES):
try:
valid_values.append(UUID(element).bytes)
changed = True
except ValueError: # try if the value is an escaped byte sequence
try:
valid_values.append(UUID(element.replace('\\', '')).bytes)
changed = True
continue
except ValueError:
if str is not bytes: # python 3
pass
else:
valid_values.append(element)
continue
return False
elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid
valid_values.append(element)
else:
return False
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True
def validate_uuid(input_value):
"""
object entryUUID in uuid format
"""
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if isinstance(element, STRING_TYPES):
try:
valid_values.append(str(UUID(element)))
changed = True
except ValueError: # try if the value is an escaped byte sequence
try:
valid_values.append(str(UUID(element.replace('\\', ''))))
changed = True
continue
except ValueError:
if str is not bytes: # python 3
pass
else:
valid_values.append(element)
continue
return False
elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid
valid_values.append(element)
else:
return False
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True
def validate_uuid_le(input_value):
"""
Active Directory stores objectGUID in uuid_le format, follows RFC4122 and MS-DTYP:
"{07039e68-4373-264d-a0a7-07039e684373}": string representation big endian, converted to little endian (with or without brace curles)
"689e030773434d26a7a007039e684373": packet representation, already in little endian
"\68\9e\03\07\73\43\4d\26\a7\a0\07\03\9e\68\43\73": bytes representation, already in little endian
byte sequence: already in little endian
"""
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if isinstance(element, STRING_TYPES):
if element[0] == '{' and element[-1] == '}':
valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian
changed = True
elif '-' in element:
valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian
changed = True
elif '\\' in element:
valid_values.append(UUID(bytes_le=ldap_escape_to_bytes(element)).bytes_le) # byte representation, value in little endian
changed = True
elif '-' not in element: # value in little endian
valid_values.append(UUID(bytes_le=a2b_hex(element)).bytes_le) # packet representation, value in little endian, converts to little endian
changed = True
elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid uuid
valid_values.append(element) # value is untouched, must be in little endian
else:
return False
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True
def validate_sid(input_value):
"""
SID= "S-1-" IdentifierAuthority 1*SubAuthority
IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
; If the identifier authority is < 2^32, the
; identifier authority is represented as a decimal
; number
; If the identifier authority is >= 2^32,
; the identifier authority is represented in
; hexadecimal
IdentifierAuthorityDec = 1*10DIGIT
; IdentifierAuthorityDec, top level authority of a
; security identifier is represented as a decimal number
IdentifierAuthorityHex = "0x" 12HEXDIG
; IdentifierAuthorityHex, the top-level authority of a
; security identifier is represented as a hexadecimal number
SubAuthority= "-" 1*10DIGIT
; Sub-Authority is always represented as a decimal number
; No leading "0" characters are allowed when IdentifierAuthority
; or SubAuthority is represented as a decimal number
; All hexadecimal digits must be output in string format,
; pre-pended by "0x"
Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
If you have a SID like S-a-b-c-d-e-f-g-...
Then the bytes are
a (revision)
N (number of dashes minus two)
bbbbbb (six bytes of "b" treated as a 48-bit number in big-endian format)
cccc (four bytes of "c" treated as a 32-bit number in little-endian format)
dddd (four bytes of "d" treated as a 32-bit number in little-endian format)
eeee (four bytes of "e" treated as a 32-bit number in little-endian format)
ffff (four bytes of "f" treated as a 32-bit number in little-endian format)
"""
if not isinstance(input_value, SEQUENCE_TYPES):
sequence = False
input_value = [input_value]
else:
sequence = True # indicates if a sequence must be returned
valid_values = []
changed = False
for element in input_value:
if isinstance(element, STRING_TYPES):
if element.startswith('S-'):
parts = element.split('-')
sid_bytes = pack('<q', int(parts[1]))[0:1] # revision number
sid_bytes += pack('<q', len(parts[3:]))[0:1] # number of sub authorities
if len(parts[2]) <= 10:
sid_bytes += pack('>q', int(parts[2]))[2:] # authority (in dec)
else:
sid_bytes += pack('>q', int(parts[2], 16))[2:] # authority (in hex)
for sub_auth in parts[3:]:
sid_bytes += pack('<q', int(sub_auth))[0:4] # sub-authorities
valid_values.append(sid_bytes)
changed = True
if changed:
if sequence:
return valid_values
else:
return valid_values[0]
else:
return True

View File

@ -0,0 +1,139 @@
"""
"""
# Created on 2015.03.27
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import ctypes
from pyasn1.type.namedtype import NamedTypes, NamedType
from pyasn1.type.tag import Tag, tagClassApplication, tagFormatConstructed
from pyasn1.type.univ import Sequence, OctetString, Integer
from .rfc4511 import ResultCode, LDAPString
from .controls import build_control
class SicilyBindResponse(Sequence):
# SicilyBindResponse ::= [APPLICATION 1] SEQUENCE {
#
# resultCode ENUMERATED {
# success (0),
# protocolError (2),
# adminLimitExceeded (11),
# inappropriateAuthentication (48),
# invalidCredentials (49),
# busy (51),
# unavailable (52),
# unwillingToPerform (53),
# other (80) },
#
# serverCreds OCTET STRING,
# errorMessage LDAPString }
# BindResponse ::= [APPLICATION 1] SEQUENCE {
# COMPONENTS OF LDAPResult,
# serverSaslCreds [7] OCTET STRING OPTIONAL }
tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassApplication, tagFormatConstructed, 1))
componentType = NamedTypes(NamedType('resultCode', ResultCode()),
NamedType('serverCreds', OctetString()),
NamedType('errorMessage', LDAPString())
)
class DirSyncControlRequestValue(Sequence):
# DirSyncRequestValue ::= SEQUENCE {
# Flags integer
# MaxBytes integer
# Cookie OCTET STRING }
componentType = NamedTypes(NamedType('Flags', Integer()),
NamedType('MaxBytes', Integer()),
NamedType('Cookie', OctetString())
)
class DirSyncControlResponseValue(Sequence):
# DirSyncResponseValue ::= SEQUENCE {
# MoreResults INTEGER
# unused INTEGER
# CookieServer OCTET STRING
# }
componentType = NamedTypes(NamedType('MoreResults', Integer()),
NamedType('unused', Integer()),
NamedType('CookieServer', OctetString())
)
class SdFlags(Sequence):
# SDFlagsRequestValue ::= SEQUENCE {
# Flags INTEGER
# }
componentType = NamedTypes(NamedType('Flags', Integer())
)
class ExtendedDN(Sequence):
# A flag value 0 specifies that the GUID and SID values be returned in hexadecimal string
# A flag value of 1 will return the GUID and SID values in standard string format
componentType = NamedTypes(NamedType('option', Integer())
)
def dir_sync_control(criticality, object_security, ancestors_first, public_data_only, incremental_values, max_length, cookie):
control_value = DirSyncControlRequestValue()
flags = 0x0
if object_security:
flags |= 0x00000001
if ancestors_first:
flags |= 0x00000800
if public_data_only:
flags |= 0x00002000
if incremental_values:
flags |= 0x80000000
# converts flags to signed 32 bit (AD expects a 4 bytes long unsigned integer, but ASN.1 Integer type is signed
# so the BER encoder gives back a 5 bytes long signed integer
flags = ctypes.c_long(flags & 0xFFFFFFFF).value
control_value.setComponentByName('Flags', flags)
control_value.setComponentByName('MaxBytes', max_length)
if cookie:
control_value.setComponentByName('Cookie', cookie)
else:
control_value.setComponentByName('Cookie', OctetString(''))
return build_control('1.2.840.113556.1.4.841', criticality, control_value)
def extended_dn_control(criticality=False, hex_format=False):
control_value = ExtendedDN()
control_value.setComponentByName('option', Integer(not hex_format))
return build_control('1.2.840.113556.1.4.529', criticality, control_value)
def show_deleted_control(criticality=False):
return build_control('1.2.840.113556.1.4.417', criticality, value=None)
def security_descriptor_control(criticality=False, sdflags=0x0F):
sdcontrol = SdFlags()
sdcontrol.setComponentByName('Flags', sdflags)
return [build_control('1.2.840.113556.1.4.801', criticality, sdcontrol)]

View File

@ -0,0 +1,141 @@
"""
"""
# Created on 2014.06.27
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.type.univ import OctetString, Integer, Sequence, SequenceOf
from pyasn1.type.namedtype import NamedType, NamedTypes, OptionalNamedType
from pyasn1.type.tag import Tag, tagFormatSimple, tagClassUniversal, TagSet
NMAS_LDAP_EXT_VERSION = 1
class Identity(OctetString):
encoding = 'utf-8'
class LDAPDN(OctetString):
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatSimple, 4))
encoding = 'utf-8'
class Password(OctetString):
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatSimple, 4))
encoding = 'utf-8'
class LDAPOID(OctetString):
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatSimple, 4))
encoding = 'utf-8'
class GroupCookie(Integer):
tagSet = Integer.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatSimple, 2))
class NmasVer(Integer):
tagSet = Integer.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatSimple, 2))
class Error(Integer):
tagSet = Integer.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatSimple, 2))
class NmasGetUniversalPasswordRequestValue(Sequence):
componentType = NamedTypes(NamedType('nmasver', NmasVer()),
NamedType('reqdn', Identity())
)
class NmasGetUniversalPasswordResponseValue(Sequence):
componentType = NamedTypes(NamedType('nmasver', NmasVer()),
NamedType('err', Error()),
OptionalNamedType('passwd', Password())
)
class NmasSetUniversalPasswordRequestValue(Sequence):
componentType = NamedTypes(NamedType('nmasver', NmasVer()),
NamedType('reqdn', Identity()),
NamedType('new_passwd', Password())
)
class NmasSetUniversalPasswordResponseValue(Sequence):
componentType = NamedTypes(NamedType('nmasver', NmasVer()),
NamedType('err', Error())
)
class ReplicaList(SequenceOf):
componentType = OctetString()
class ReplicaInfoRequestValue(Sequence):
tagSet = TagSet()
componentType = NamedTypes(NamedType('server_dn', LDAPDN()),
NamedType('partition_dn', LDAPDN())
)
class ReplicaInfoResponseValue(Sequence):
# tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatConstructed, 3))
tagSet = TagSet()
componentType = NamedTypes(NamedType('partition_id', Integer()),
NamedType('replica_state', Integer()),
NamedType('modification_time', Integer()),
NamedType('purge_time', Integer()),
NamedType('local_partition_id', Integer()),
NamedType('partition_dn', LDAPDN()),
NamedType('replica_type', Integer()),
NamedType('flags', Integer())
)
class CreateGroupTypeRequestValue(Sequence):
componentType = NamedTypes(NamedType('createGroupType', LDAPOID()),
OptionalNamedType('createGroupValue', OctetString())
)
class CreateGroupTypeResponseValue(Sequence):
componentType = NamedTypes(NamedType('createGroupCookie', GroupCookie()),
OptionalNamedType('createGroupValue', OctetString())
)
class EndGroupTypeRequestValue(Sequence):
componentType = NamedTypes(NamedType('endGroupCookie', GroupCookie()),
OptionalNamedType('endGroupValue', OctetString())
)
class EndGroupTypeResponseValue(Sequence):
componentType = NamedTypes(OptionalNamedType('endGroupValue', OctetString())
)
class GroupingControlValue(Sequence):
componentType = NamedTypes(NamedType('groupingCookie', GroupCookie()),
OptionalNamedType('groupValue', OctetString())
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,85 @@
"""
"""
# Created on 2016.07.09
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.type.namedtype import NamedTypes, NamedType, OptionalNamedType
from pyasn1.type.namedval import NamedValues
from pyasn1.type.univ import Sequence, Integer, Boolean, Enumerated
from .rfc4511 import LDAPDN
from .controls import build_control
class PersistentSearchControl(Sequence):
# PersistentSearch ::= SEQUENCE {
# changeTypes INTEGER,
# changesOnly BOOLEAN,
# returnECs BOOLEAN
# }
componentType = NamedTypes(NamedType('changeTypes', Integer()),
NamedType('changesOnly', Boolean()),
NamedType('returnECs', Boolean())
)
class ChangeType(Enumerated):
# changeType ENUMERATED {
# add (1),
# delete (2),
# modify (4),
# modDN (8)
# }
namedValues = NamedValues(('add', 1),
('delete', 2),
('modify', 4),
('modDN', 8))
class EntryChangeNotificationControl(Sequence):
# EntryChangeNotification ::= SEQUENCE {
# changeType ENUMERATED {
# add (1),
# delete (2),
# modify (4),
# modDN (8)
# },
# previousDN LDAPDN OPTIONAL, -- modifyDN ops. only
# changeNumber INTEGER OPTIONAL -- if supported
# }
# tagSet = TagSet()
# tagSet = Sequence.tagSet.tagImplicitly(Tag(tagClassUniversal, tagFormatConstructed, 16))
componentType = NamedTypes(NamedType('changeType', ChangeType()),
OptionalNamedType('previousDN', LDAPDN()),
OptionalNamedType('changeNumber', Integer())
)
def persistent_search_control(change_types, changes_only=True, return_ecs=True, criticality=False):
control_value = PersistentSearchControl()
control_value.setComponentByName('changeTypes', Integer(change_types))
control_value.setComponentByName('changesOnly', Boolean(changes_only))
control_value.setComponentByName('returnECs', Boolean(return_ecs))
return build_control('2.16.840.1.113730.3.4.3', criticality, control_value)

View File

@ -0,0 +1,70 @@
"""
"""
# Created on 2013.10.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.type.univ import OctetString, Integer, Sequence
from pyasn1.type.namedtype import NamedTypes, NamedType
from pyasn1.type.constraint import ValueRangeConstraint
from .controls import build_control
# constants
# maxInt INTEGER ::= 2147483647 -- (2^^31 - 1) --
MAXINT = Integer(2147483647)
# constraints
rangeInt0ToMaxConstraint = ValueRangeConstraint(0, MAXINT)
class Integer0ToMax(Integer):
subtypeSpec = Integer.subtypeSpec + rangeInt0ToMaxConstraint
class Size(Integer0ToMax):
# Size INTEGER (0..maxInt)
pass
class Cookie(OctetString):
# cookie OCTET STRING
pass
class RealSearchControlValue(Sequence):
# realSearchControlValue ::= SEQUENCE {
# size INTEGER (0..maxInt),
# -- requested page size from client
# -- result set size estimate from server
# cookie OCTET STRING
componentType = NamedTypes(NamedType('size', Size()),
NamedType('cookie', Cookie()))
def paged_search_control(criticality=False, size=10, cookie=None):
control_value = RealSearchControlValue()
control_value.setComponentByName('size', Size(size))
control_value.setComponentByName('cookie', Cookie(cookie if cookie else ''))
return build_control('1.2.840.113556.1.4.319', criticality, control_value)

View File

@ -0,0 +1,283 @@
"""
"""
# Created on 2013.12.08
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from base64 import b64encode
from datetime import datetime
from .. import STRING_TYPES
from ..core.exceptions import LDAPLDIFError, LDAPExtensionError
from ..protocol.persistentSearch import EntryChangeNotificationControl
from ..utils.asn1 import decoder
# LDIF converter RFC 2849 compliant
LDIF_LINE_LENGTH = 78
def safe_ldif_string(bytes_value):
if not bytes_value:
return True
# check SAFE-INIT-CHAR: < 127, not NUL, LF, CR, SPACE, COLON, LESS-THAN
if bytes_value[0] > 127 or bytes_value[0] in [0, 10, 13, 32, 58, 60]:
return False
# check SAFE-CHAR: < 127 not NUL, LF, CR
if 0 in bytes_value or 10 in bytes_value or 13 in bytes_value:
return False
# check last char for SPACE
if bytes_value[-1] == 32:
return False
for byte in bytes_value:
if byte > 127:
return False
return True
def _convert_to_ldif(descriptor, value, base64):
if not value:
value = ''
if isinstance(value, STRING_TYPES):
value = bytearray(value, encoding='utf-8')
if base64 or not safe_ldif_string(value):
try:
encoded = b64encode(value)
except TypeError:
encoded = b64encode(str(value)) # patch for Python 2.6
if not isinstance(encoded, str): # in Python 3 b64encode returns bytes in Python 2 returns str
encoded = str(encoded, encoding='ascii') # Python 3
line = descriptor + ':: ' + encoded
else:
if str is not bytes: # Python 3
value = str(value, encoding='ascii')
else: # Python 2
value = str(value)
line = descriptor + ': ' + value
return line
def add_controls(controls, all_base64):
lines = []
if controls:
for control in controls:
line = 'control: ' + control[0]
line += ' ' + ('true' if control[1] else 'false')
if control[2]:
lines.append(_convert_to_ldif(line, control[2], all_base64))
return lines
def add_attributes(attributes, all_base64):
lines = []
oc_attr = None
# objectclass first, even if this is not specified in the RFC
for attr in attributes:
if attr.lower() == 'objectclass':
for val in attributes[attr]:
lines.append(_convert_to_ldif(attr, val, all_base64))
oc_attr = attr
break
# remaining attributes
for attr in attributes:
if attr != oc_attr:
for val in attributes[attr]:
lines.append(_convert_to_ldif(attr, val, all_base64))
return lines
def sort_ldif_lines(lines, sort_order):
# sort lines as per custom sort_order
# sort order is a list of descriptors, lines will be sorted following the same sequence
return sorted(lines, key=lambda x: ldif_sort(x, sort_order)) if sort_order else lines
def search_response_to_ldif(entries, all_base64, sort_order=None):
lines = []
for entry in entries:
if 'dn' in entry:
lines.append(_convert_to_ldif('dn', entry['dn'], all_base64))
lines.extend(add_attributes(entry['raw_attributes'], all_base64))
else:
raise LDAPLDIFError('unable to convert to LDIF-CONTENT - missing DN')
if sort_order:
lines = sort_ldif_lines(lines, sort_order)
lines.append('')
if lines:
lines.append('# total number of entries: ' + str(len(entries)))
return lines
def add_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
lines.extend(add_controls(entry['controls'], all_base64))
lines.append('changetype: add')
lines.extend(add_attributes(entry['attributes'], all_base64))
if sort_order:
lines = sort_ldif_lines(lines, sort_order)
else:
raise LDAPLDIFError('unable to convert to LDIF-CHANGE-ADD - missing DN ')
return lines
def delete_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
lines.append(add_controls(entry['controls'], all_base64))
lines.append('changetype: delete')
if sort_order:
lines = sort_ldif_lines(lines, sort_order)
else:
raise LDAPLDIFError('unable to convert to LDIF-CHANGE-DELETE - missing DN ')
return lines
def modify_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
lines.extend(add_controls(entry['controls'], all_base64))
lines.append('changetype: modify')
if 'changes' in entry:
for change in entry['changes']:
lines.append(['add', 'delete', 'replace', 'increment'][change['operation']] + ': ' + change['attribute']['type'])
for value in change['attribute']['value']:
lines.append(_convert_to_ldif(change['attribute']['type'], value, all_base64))
lines.append('-')
if sort_order:
lines = sort_ldif_lines(lines, sort_order)
return lines
def modify_dn_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
lines.extend(add_controls(entry['controls'], all_base64))
lines.append('changetype: modrdn') if 'newSuperior' in entry and entry['newSuperior'] else lines.append('changetype: moddn')
lines.append(_convert_to_ldif('newrdn', entry['newRdn'], all_base64))
lines.append('deleteoldrdn: ' + ('1' if entry['deleteOldRdn'] else '0'))
if 'newSuperior' in entry and entry['newSuperior']:
lines.append(_convert_to_ldif('newsuperior', entry['newSuperior'], all_base64))
if sort_order:
lines = sort_ldif_lines(lines, sort_order)
else:
raise LDAPLDIFError('unable to convert to LDIF-CHANGE-MODDN - missing DN ')
return lines
def operation_to_ldif(operation_type, entries, all_base64=False, sort_order=None):
if operation_type == 'searchResponse':
lines = search_response_to_ldif(entries, all_base64, sort_order)
elif operation_type == 'addRequest':
lines = add_request_to_ldif(entries, all_base64, sort_order)
elif operation_type == 'delRequest':
lines = delete_request_to_ldif(entries, all_base64, sort_order)
elif operation_type == 'modifyRequest':
lines = modify_request_to_ldif(entries, all_base64, sort_order)
elif operation_type == 'modDNRequest':
lines = modify_dn_request_to_ldif(entries, all_base64, sort_order)
else:
lines = []
ldif_record = []
# check max line length and split as per note 2 of RFC 2849
for line in lines:
if line:
ldif_record.append(line[0:LDIF_LINE_LENGTH])
ldif_record.extend([' ' + line[i: i + LDIF_LINE_LENGTH - 1] for i in range(LDIF_LINE_LENGTH, len(line), LDIF_LINE_LENGTH - 1)] if len(line) > LDIF_LINE_LENGTH else [])
else:
ldif_record.append('')
return ldif_record
def add_ldif_header(ldif_lines):
if ldif_lines:
ldif_lines.insert(0, 'version: 1')
return ldif_lines
def ldif_sort(line, sort_order):
for i, descriptor in enumerate(sort_order):
if line and line.startswith(descriptor):
return i
return len(sort_order) + 1
def decode_persistent_search_control(change):
if 'controls' in change and '2.16.840.1.113730.3.4.7' in change['controls']:
decoded = dict()
decoded_control, unprocessed = decoder.decode(change['controls']['2.16.840.1.113730.3.4.7']['value'], asn1Spec=EntryChangeNotificationControl())
if unprocessed:
raise LDAPExtensionError('unprocessed value in EntryChangeNotificationControl')
if decoded_control['changeType'] == 1: # add
decoded['changeType'] = 'add'
elif decoded_control['changeType'] == 2: # delete
decoded['changeType'] = 'delete'
elif decoded_control['changeType'] == 4: # modify
decoded['changeType'] = 'modify'
elif decoded_control['changeType'] == 8: # modify_dn
decoded['changeType'] = 'modify dn'
else:
raise LDAPExtensionError('unknown Persistent Search changeType ' + str(decoded_control['changeType']))
decoded['changeNumber'] = decoded_control['changeNumber'] if 'changeNumber' in decoded_control else None
decoded['previousDN'] = decoded_control['previousDN'] if 'previousDN' in decoded_control else None
return decoded
return None
def persistent_search_response_to_ldif(change):
ldif_lines = ['# ' + datetime.now().isoformat()]
control = decode_persistent_search_control(change)
if control:
if control['changeNumber']:
ldif_lines.append('# change number: ' + str(control['changeNumber']))
ldif_lines.append(control['changeType'])
if control['previousDN']:
ldif_lines.append('# previous dn: ' + str(control['previousDN']))
ldif_lines += operation_to_ldif('searchResponse', [change])
return ldif_lines[:-1] # removes "total number of entries"

View File

@ -0,0 +1,91 @@
"""
"""
# Created on 2014.04.28
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1.type.univ import OctetString, Sequence
from pyasn1.type.namedtype import NamedTypes, OptionalNamedType
from pyasn1.type.tag import Tag, tagClassContext, tagFormatSimple
# Modify password extended operation
# passwdModifyOID OBJECT IDENTIFIER ::= 1.3.6.1.4.1.4203.1.11.1
# PasswdModifyRequestValue ::= SEQUENCE {
# userIdentity [0] OCTET STRING OPTIONAL
# oldPasswd [1] OCTET STRING OPTIONAL
# newPasswd [2] OCTET STRING OPTIONAL }
#
# PasswdModifyResponseValue ::= SEQUENCE {
# genPasswd [0] OCTET STRING OPTIONAL }
class UserIdentity(OctetString):
"""
userIdentity [0] OCTET STRING OPTIONAL
"""
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0))
encoding = 'utf-8'
class OldPasswd(OctetString):
"""
oldPasswd [1] OCTET STRING OPTIONAL
"""
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 1))
encoding = 'utf-8'
class NewPasswd(OctetString):
"""
newPasswd [2] OCTET STRING OPTIONAL
"""
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 2))
encoding = 'utf-8'
class GenPasswd(OctetString):
"""
newPasswd [2] OCTET STRING OPTIONAL
"""
tagSet = OctetString.tagSet.tagImplicitly(Tag(tagClassContext, tagFormatSimple, 0))
encoding = 'utf-8'
class PasswdModifyRequestValue(Sequence):
"""
PasswdModifyRequestValue ::= SEQUENCE {
userIdentity [0] OCTET STRING OPTIONAL
oldPasswd [1] OCTET STRING OPTIONAL
newPasswd [2] OCTET STRING OPTIONAL }
"""
componentType = NamedTypes(OptionalNamedType('userIdentity', UserIdentity()),
OptionalNamedType('oldPasswd', OldPasswd()),
OptionalNamedType('newPasswd', NewPasswd()))
class PasswdModifyResponseValue(Sequence):
"""
PasswdModifyResponseValue ::= SEQUENCE {
genPasswd [0] OCTET STRING OPTIONAL }
"""
componentType = NamedTypes(OptionalNamedType('genPasswd', GenPasswd()))

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,846 @@
"""
"""
# Created on 2013.09.11
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from os import linesep
import re
import json
from .oid import CLASS_ABSTRACT, CLASS_STRUCTURAL, CLASS_AUXILIARY, ATTRIBUTE_USER_APPLICATION, \
ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION
from .. import SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
from ..utils.conv import escape_bytes, json_hook, check_json_dict, format_json, to_unicode
from ..utils.ciDict import CaseInsensitiveDict
from ..protocol.formatters.standard import format_attribute_values
from .oid import Oids, decode_oids, decode_syntax, oid_to_string
from ..core.exceptions import LDAPSchemaError, LDAPDefinitionError
def constant_to_class_kind(value):
if value == CLASS_STRUCTURAL:
return 'Structural'
elif value == CLASS_ABSTRACT:
return 'Abstract'
elif value == CLASS_AUXILIARY:
return 'Auxiliary'
else:
return '<unknown>'
def constant_to_attribute_usage(value):
if value == ATTRIBUTE_USER_APPLICATION:
return 'User Application'
elif value == ATTRIBUTE_DIRECTORY_OPERATION:
return "Directory operation"
elif value == ATTRIBUTE_DISTRIBUTED_OPERATION:
return 'Distributed operation'
elif value == ATTRIBUTE_DSA_OPERATION:
return 'DSA operation'
else:
return 'unknown'
def attribute_usage_to_constant(value):
if value == 'userApplications':
return ATTRIBUTE_USER_APPLICATION
elif value == 'directoryOperation':
return ATTRIBUTE_DIRECTORY_OPERATION
elif value == 'distributedOperation':
return ATTRIBUTE_DISTRIBUTED_OPERATION
elif value == 'dsaOperation':
return ATTRIBUTE_DSA_OPERATION
else:
return 'unknown'
def quoted_string_to_list(quoted_string):
string = quoted_string.strip()
if not string:
return list()
if string[0] == '(' and string[-1] == ')':
string = string[1:-1]
elements = string.split("'")
# return [check_escape(element.strip("'").strip()) for element in elements if element.strip()]
return [element.strip("'").strip() for element in elements if element.strip()]
def oids_string_to_list(oid_string):
string = oid_string.strip()
if string[0] == '(' and string[-1] == ')':
string = string[1:-1]
elements = string.split('$')
return [element.strip() for element in elements if element.strip()]
def extension_to_tuple(extension_string):
string = extension_string.strip()
name, _, values = string.partition(' ')
return name, quoted_string_to_list(values)
def list_to_string(list_object):
if not isinstance(list_object, SEQUENCE_TYPES):
return list_object
r = ''
for element in list_object:
r += (list_to_string(element) if isinstance(element, SEQUENCE_TYPES) else str(element)) + ', '
return r[:-2] if r else ''
class BaseServerInfo(object):
def __init__(self, raw_attributes):
self.raw = dict(raw_attributes)
@classmethod
def from_json(cls, json_definition, schema=None, custom_formatter=None):
conf_case_insensitive_schema = get_config_parameter('CASE_INSENSITIVE_SCHEMA_NAMES')
definition = json.loads(json_definition, object_hook=json_hook)
if 'raw' not in definition or 'type' not in definition:
raise LDAPDefinitionError('invalid JSON definition')
if conf_case_insensitive_schema:
attributes = CaseInsensitiveDict()
else:
attributes = dict()
if schema:
for attribute in definition['raw']:
# attributes[attribute] = format_attribute_values(schema, check_escape(attribute), [check_escape(value) for value in definition['raw'][attribute]], custom_formatter)
attributes[attribute] = format_attribute_values(schema, attribute, [value for value in definition['raw'][attribute]], custom_formatter)
else:
for attribute in definition['raw']:
# attributes[attribute] = [check_escape(value) for value in definition['raw'][attribute]]
attributes[attribute] = [value for value in definition['raw'][attribute]]
if cls.__name__ != definition['type']:
raise LDAPDefinitionError('JSON info not of type ' + cls.__name__)
if definition['type'] == 'DsaInfo':
return DsaInfo(attributes, definition['raw'])
elif definition['type'] == 'SchemaInfo':
if 'schema_entry' not in definition:
raise LDAPDefinitionError('invalid schema in JSON')
return SchemaInfo(definition['schema_entry'], attributes, definition['raw'])
raise LDAPDefinitionError('invalid Info type ' + str(definition['type']) + ' in JSON definition')
@classmethod
def from_file(cls, target, schema=None, custom_formatter=None):
if isinstance(target, STRING_TYPES):
target = open(target, 'r')
new = cls.from_json(target.read(), schema=schema, custom_formatter=custom_formatter)
target.close()
return new
def to_file(self,
target,
indent=4,
sort=True):
if isinstance(target, STRING_TYPES):
target = open(target, 'w+')
target.writelines(self.to_json(indent=indent, sort=sort))
target.close()
def __str__(self):
return self.__repr__()
def to_json(self,
indent=4,
sort=True):
json_dict = dict()
json_dict['type'] = self.__class__.__name__
json_dict['raw'] = self.raw
if isinstance(self, SchemaInfo):
json_dict['schema_entry'] = self.schema_entry
elif isinstance(self, DsaInfo):
pass
else:
raise LDAPDefinitionError('unable to convert ' + str(self) + ' to JSON')
if str is bytes: # Python 2
check_json_dict(json_dict)
return json.dumps(json_dict, ensure_ascii=False, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': '))
class DsaInfo(BaseServerInfo):
"""
This class contains info about the ldap server (DSA) read from DSE
as defined in RFC4512 and RFC3045. Unknown attributes are stored in the "other" dict
"""
def __init__(self, attributes, raw_attributes):
BaseServerInfo.__init__(self, raw_attributes)
self.alt_servers = attributes.pop('altServer', None)
self.naming_contexts = attributes.pop('namingContexts', None)
self.supported_controls = decode_oids(attributes.pop('supportedControl', None))
self.supported_extensions = decode_oids(attributes.pop('supportedExtension', None))
self.supported_features = decode_oids(attributes.pop('supportedFeatures', None)) + decode_oids(attributes.pop('supportedCapabilities', None))
self.supported_ldap_versions = attributes.pop('supportedLDAPVersion', None)
self.supported_sasl_mechanisms = attributes.pop('supportedSASLMechanisms', None)
self.vendor_name = attributes.pop('vendorName', None)
self.vendor_version = attributes.pop('vendorVersion', None)
self.schema_entry = attributes.pop('subschemaSubentry', None)
self.other = attributes # remaining schema definition attributes not in RFC4512
def __repr__(self):
r = 'DSA info (from DSE):' + linesep
if self.supported_ldap_versions:
if isinstance(self.supported_ldap_versions, SEQUENCE_TYPES):
r += (' Supported LDAP versions: ' + ', '.join([str(s) for s in self.supported_ldap_versions])) if self.supported_ldap_versions else ''
else:
r += (' Supported LDAP versions: ' + str(self.supported_ldap_versions))
r += linesep
if self.naming_contexts:
if isinstance(self.naming_contexts, SEQUENCE_TYPES):
r += (' Naming contexts: ' + linesep + linesep.join([' ' + str(s) for s in self.naming_contexts])) if self.naming_contexts else ''
else:
r += (' Naming contexts: ' + str(self.naming_contexts))
r += linesep
if self.alt_servers:
if isinstance(self.alt_servers, SEQUENCE_TYPES):
r += (' Alternative servers: ' + linesep + linesep.join([' ' + str(s) for s in self.alt_servers])) if self.alt_servers else ''
else:
r += (' Alternative servers: ' + str(self.alt_servers))
r += linesep
if self.supported_controls:
if isinstance(self.supported_controls, SEQUENCE_TYPES):
r += (' Supported controls: ' + linesep + linesep.join([' ' + oid_to_string(s) for s in self.supported_controls])) if self.supported_controls else ''
else:
r += (' Supported controls: ' + str(self.supported_controls))
r += linesep
if self.supported_extensions:
if isinstance(self.supported_extensions, SEQUENCE_TYPES):
r += (' Supported extensions: ' + linesep + linesep.join([' ' + oid_to_string(s) for s in self.supported_extensions])) if self.supported_extensions else ''
else:
r += (' Supported extensions: ' + str(self.supported_extensions))
r += linesep
if self.supported_features:
if self.supported_features:
if isinstance(self.supported_features, SEQUENCE_TYPES):
r += (' Supported features: ' + linesep + linesep.join([' ' + oid_to_string(s) for s in self.supported_features])) if self.supported_features else ''
else:
r += (' Supported features: ' + str(self.supported_features))
r += linesep
if self.supported_sasl_mechanisms:
if isinstance(self.supported_sasl_mechanisms, SEQUENCE_TYPES):
r += (' Supported SASL mechanisms: ' + linesep + ' ' + ', '.join([str(s) for s in self.supported_sasl_mechanisms])) if self.supported_sasl_mechanisms else ''
else:
r += (' Supported SASL mechanisms: ' + str(self.supported_sasl_mechanisms))
r += linesep
if self.schema_entry:
if isinstance(self.schema_entry, SEQUENCE_TYPES):
r += (' Schema entry: ' + linesep + linesep.join([' ' + str(s) for s in self.schema_entry])) if self.schema_entry else ''
else:
r += (' Schema entry: ' + str(self.schema_entry))
r += linesep
if self.vendor_name:
if isinstance(self.vendor_name, SEQUENCE_TYPES) and len(self.vendor_name) == 1:
r += 'Vendor name: ' + self.vendor_name[0]
else:
r += 'Vendor name: ' + str(self.vendor_name)
r += linesep
if self.vendor_version:
if isinstance(self.vendor_version, SEQUENCE_TYPES) and len(self.vendor_version) == 1:
r += 'Vendor version: ' + self.vendor_version[0]
else:
r += 'Vendor version: ' + str(self.vendor_version)
r += linesep
r += 'Other:' + linesep
for k, v in self.other.items():
r += ' ' + str(k) + ': ' + linesep
try:
r += (linesep.join([' ' + str(s) for s in v])) if isinstance(v, SEQUENCE_TYPES) else str(v)
except UnicodeDecodeError:
r += (linesep.join([' ' + str(escape_bytes(s)) for s in v])) if isinstance(v, SEQUENCE_TYPES) else str(escape_bytes(v))
r += linesep
return r
class SchemaInfo(BaseServerInfo):
"""
This class contains info about the ldap server schema read from an entry (default entry is DSE)
as defined in RFC4512. Unknown attributes are stored in the "other" dict
"""
def __init__(self, schema_entry, attributes, raw_attributes):
BaseServerInfo.__init__(self, raw_attributes)
self.schema_entry = schema_entry
self.create_time_stamp = attributes.pop('createTimestamp', None)
self.modify_time_stamp = attributes.pop('modifyTimestamp', None)
self.attribute_types = AttributeTypeInfo.from_definition(attributes.pop('attributeTypes', []))
self.object_classes = ObjectClassInfo.from_definition(attributes.pop('objectClasses', []))
self.matching_rules = MatchingRuleInfo.from_definition(attributes.pop('matchingRules', []))
self.matching_rule_uses = MatchingRuleUseInfo.from_definition(attributes.pop('matchingRuleUse', []))
self.dit_content_rules = DitContentRuleInfo.from_definition(attributes.pop('dITContentRules', []))
self.dit_structure_rules = DitStructureRuleInfo.from_definition(attributes.pop('dITStructureRules', []))
self.name_forms = NameFormInfo.from_definition(attributes.pop('nameForms', []))
self.ldap_syntaxes = LdapSyntaxInfo.from_definition(attributes.pop('ldapSyntaxes', []))
self.other = attributes # remaining schema definition attributes not in RFC4512
# links attributes to class objects
if self.object_classes and self.attribute_types:
for object_class in self.object_classes: # CaseInsensitiveDict return keys while iterating
for attribute in self.object_classes[object_class].must_contain:
try:
self.attribute_types[attribute].mandatory_in.append(object_class)
except KeyError:
pass
for attribute in self.object_classes[object_class].may_contain:
try:
self.attribute_types[attribute].optional_in.append(object_class)
except KeyError:
pass
def is_valid(self):
if self.object_classes or self.attribute_types or self.matching_rules or self.matching_rule_uses or self.dit_content_rules or self.dit_structure_rules or self.name_forms or self.ldap_syntaxes:
return True
return False
def __repr__(self):
r = 'DSA Schema from: ' + self.schema_entry
r += linesep
if isinstance(self.attribute_types, SEQUENCE_TYPES):
r += (' Attribute types:' + linesep + ' ' + ', '.join([str(self.attribute_types[s]) for s in self.attribute_types])) if self.attribute_types else ''
else:
r += (' Attribute types:' + str(self.attribute_types))
r += linesep
if isinstance(self.object_classes, SEQUENCE_TYPES):
r += (' Object classes:' + linesep + ' ' + ', '.join([str(self.object_classes[s]) for s in self.object_classes])) if self.object_classes else ''
else:
r += (' Object classes:' + str(self.object_classes))
r += linesep
if isinstance(self.matching_rules, SEQUENCE_TYPES):
r += (' Matching rules:' + linesep + ' ' + ', '.join([str(self.matching_rules[s]) for s in self.matching_rules])) if self.matching_rules else ''
else:
r += (' Matching rules:' + str(self.matching_rules))
r += linesep
if isinstance(self.matching_rule_uses, SEQUENCE_TYPES):
r += (' Matching rule uses:' + linesep + ' ' + ', '.join([str(self.matching_rule_uses[s]) for s in self.matching_rule_uses])) if self.matching_rule_uses else ''
else:
r += (' Matching rule uses:' + str(self.matching_rule_uses))
r += linesep
if isinstance(self.dit_content_rules, SEQUENCE_TYPES):
r += (' DIT content rules:' + linesep + ' ' + ', '.join([str(self.dit_content_rules[s]) for s in self.dit_content_rules])) if self.dit_content_rules else ''
else:
r += (' DIT content rules:' + str(self.dit_content_rules))
r += linesep
if isinstance(self.dit_structure_rules, SEQUENCE_TYPES):
r += (' DIT structure rules:' + linesep + ' ' + ', '.join([str(self.dit_structure_rules[s]) for s in self.dit_structure_rules])) if self.dit_structure_rules else ''
else:
r += (' DIT structure rules:' + str(self.dit_structure_rules))
r += linesep
if isinstance(self.name_forms, SEQUENCE_TYPES):
r += (' Name forms:' + linesep + ' ' + ', '.join([str(self.name_forms[s]) for s in self.name_forms])) if self.name_forms else ''
else:
r += (' Name forms:' + str(self.name_forms))
r += linesep
if isinstance(self.ldap_syntaxes, SEQUENCE_TYPES):
r += (' LDAP syntaxes:' + linesep + ' ' + ', '.join([str(self.ldap_syntaxes[s]) for s in self.ldap_syntaxes])) if self.ldap_syntaxes else ''
else:
r += (' LDAP syntaxes:' + str(self.ldap_syntaxes))
r += linesep
r += 'Other:' + linesep
for k, v in self.other.items():
r += ' ' + str(k) + ': ' + linesep
try:
r += (linesep.join([' ' + str(s) for s in v])) if isinstance(v, SEQUENCE_TYPES) else str(v)
except UnicodeDecodeError:
r += (linesep.join([' ' + str(escape_bytes(s)) for s in v])) if isinstance(v, SEQUENCE_TYPES) else str(escape_bytes(v))
r += linesep
return r
class BaseObjectInfo(object):
"""
Base class for objects defined in the schema as per RFC4512
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
extensions=None,
experimental=None,
definition=None):
self.oid = oid
self.name = name
self.description = description
self.obsolete = obsolete
self.extensions = extensions
self.experimental = experimental
self.raw_definition = definition
self._oid_info = None
@property
def oid_info(self):
if self._oid_info is None and self.oid:
self._oid_info = Oids.get(self.oid, '')
return self._oid_info if self._oid_info else None
def __str__(self):
return self.__repr__()
def __repr__(self):
r = ': ' + self.oid
r += ' [OBSOLETE]' if self.obsolete else ''
r += (linesep + ' Short name: ' + list_to_string(self.name)) if self.name else ''
r += (linesep + ' Description: ' + self.description) if self.description else ''
r += '<__desc__>'
r += (linesep + ' Extensions:' + linesep + linesep.join([' ' + s[0] + ': ' + list_to_string(s[1]) for s in self.extensions])) if self.extensions else ''
r += (linesep + ' Experimental:' + linesep + linesep.join([' ' + s[0] + ': ' + list_to_string(s[1]) for s in self.experimental])) if self.experimental else ''
r += (linesep + ' OidInfo: ' + str(self.oid_info)) if self.oid_info else ''
r += linesep
return r
@classmethod
def from_definition(cls, definitions):
conf_case_insensitive_schema = get_config_parameter('CASE_INSENSITIVE_SCHEMA_NAMES')
conf_ignore_malformed_schema = get_config_parameter('IGNORE_MALFORMED_SCHEMA')
ret_dict = CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
if not definitions:
return CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
for object_definition in definitions:
object_definition = to_unicode(object_definition.strip(), from_server=True)
if object_definition[0] == '(' and object_definition[-1] == ')':
if cls is MatchingRuleInfo:
pattern = '| SYNTAX '
elif cls is ObjectClassInfo:
pattern = '| SUP | ABSTRACT| STRUCTURAL| AUXILIARY| MUST | MAY '
elif cls is AttributeTypeInfo:
pattern = '| SUP | EQUALITY | ORDERING | SUBSTR | SYNTAX | SINGLE-VALUE| COLLECTIVE| NO-USER-MODIFICATION| USAGE '
elif cls is MatchingRuleUseInfo:
pattern = '| APPLIES '
elif cls is LdapSyntaxInfo:
pattern = ''
elif cls is DitContentRuleInfo:
pattern = '| AUX | MUST | MAY | NOT '
elif cls is DitStructureRuleInfo:
pattern = '| FORM | SUP '
elif cls is NameFormInfo:
pattern = '| OC | MUST | MAY '
else:
raise LDAPSchemaError('unknown schema definition class')
splitted = re.split('( NAME | DESC | OBSOLETE| X-| E-' + pattern + ')', object_definition[1:-1])
values = splitted[::2]
separators = splitted[1::2]
separators.insert(0, 'OID')
defs = list(zip(separators, values))
object_def = cls()
for d in defs:
key = d[0].strip()
value = d[1].strip()
if key == 'OID':
object_def.oid = value
elif key == 'NAME':
object_def.name = quoted_string_to_list(value)
elif key == 'DESC':
object_def.description = value.strip("'")
elif key == 'OBSOLETE':
object_def.obsolete = True
elif key == 'SYNTAX':
object_def.syntax = oids_string_to_list(value)
elif key == 'SUP':
object_def.superior = oids_string_to_list(value)
elif key == 'ABSTRACT':
object_def.kind = CLASS_ABSTRACT
elif key == 'STRUCTURAL':
object_def.kind = CLASS_STRUCTURAL
elif key == 'AUXILIARY':
object_def.kind = CLASS_AUXILIARY
elif key == 'MUST':
object_def.must_contain = oids_string_to_list(value)
elif key == 'MAY':
object_def.may_contain = oids_string_to_list(value)
elif key == 'EQUALITY':
object_def.equality = oids_string_to_list(value)
elif key == 'ORDERING':
object_def.ordering = oids_string_to_list(value)
elif key == 'SUBSTR':
object_def.substr = oids_string_to_list(value)
elif key == 'SINGLE-VALUE':
object_def.single_value = True
elif key == 'COLLECTIVE':
object_def.collective = True
elif key == 'NO-USER-MODIFICATION':
object_def.no_user_modification = True
elif key == 'USAGE':
object_def.usage = attribute_usage_to_constant(value)
elif key == 'APPLIES':
object_def.apply_to = oids_string_to_list(value)
elif key == 'AUX':
object_def.auxiliary_classes = oids_string_to_list(value)
elif key == 'FORM':
object_def.name_form = oids_string_to_list(value)
elif key == 'OC':
object_def.object_class = oids_string_to_list(value)
elif key == 'NOT':
object_def.not_contains = oids_string_to_list(value)
elif key == 'X-':
if not object_def.extensions:
object_def.extensions = []
object_def.extensions.append(extension_to_tuple('X-' + value))
elif key == 'E-':
if not object_def.experimental:
object_def.experimental = []
object_def.experimental.append(extension_to_tuple('E-' + value))
else:
if not conf_ignore_malformed_schema:
raise LDAPSchemaError('malformed schema definition key:' + key + ' - use get_info=NONE in Server definition')
else:
return CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
object_def.raw_definition = object_definition
if hasattr(object_def, 'syntax') and object_def.syntax and len(object_def.syntax) == 1:
object_def.min_length = None
if object_def.syntax[0].endswith('}'):
try:
object_def.min_length = int(object_def.syntax[0][object_def.syntax[0].index('{') + 1:-1])
object_def.syntax[0] = object_def.syntax[0][:object_def.syntax[0].index('{')]
except Exception:
pass
else:
object_def.min_length = None
object_def.syntax[0] = object_def.syntax[0].strip("'")
object_def.syntax = object_def.syntax[0]
if hasattr(object_def, 'name') and object_def.name:
for name in object_def.name:
ret_dict[name] = object_def
else:
ret_dict[object_def.oid] = object_def
else:
if not conf_ignore_malformed_schema:
raise LDAPSchemaError('malformed schema definition, use get_info=NONE in Server definition')
else:
return CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
return ret_dict
class MatchingRuleInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.3)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
syntax=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.syntax = syntax
def __repr__(self):
r = (linesep + ' Syntax: ' + list_to_string(self.syntax)) if self.syntax else ''
return 'Matching rule' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)
class MatchingRuleUseInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.4)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
apply_to=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.apply_to = apply_to
def __repr__(self):
r = (linesep + ' Apply to: ' + list_to_string(self.apply_to)) if self.apply_to else ''
return 'Matching rule use' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)
class ObjectClassInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.1)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
superior=None,
kind=None,
must_contain=None,
may_contain=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.superior = superior
self.kind = kind
self.must_contain = must_contain or []
self.may_contain = may_contain or []
def __repr__(self):
r = ''
r += (linesep + ' Type: ' + constant_to_class_kind(self.kind)) if self.kind else ''
r += (linesep + ' Superior: ' + list_to_string(self.superior)) if self.superior else ''
r += (linesep + ' Must contain attributes: ' + list_to_string(self.must_contain)) if self.must_contain else ''
r += (linesep + ' May contain attributes: ' + list_to_string(self.may_contain)) if self.may_contain else ''
return 'Object class' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)
class AttributeTypeInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.2)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
superior=None,
equality=None,
ordering=None,
substring=None,
syntax=None,
min_length=None,
single_value=False,
collective=False,
no_user_modification=False,
usage=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.superior = superior
self.equality = equality
self.ordering = ordering
self.substring = substring
self.syntax = syntax
self.min_length = min_length
self.single_value = single_value
self.collective = collective
self.no_user_modification = no_user_modification
self.usage = usage
self.mandatory_in = []
self.optional_in = []
def __repr__(self):
r = ''
r += linesep + ' Single value: ' + str(self.single_value)
r += linesep + ' Collective: True' if self.collective else ''
r += (linesep + ' Superior: ' + list_to_string(self.superior)) if self.superior else ''
r += linesep + ' No user modification: True' if self.no_user_modification else ''
r += (linesep + ' Usage: ' + constant_to_attribute_usage(self.usage)) if self.usage else ''
r += (linesep + ' Equality rule: ' + list_to_string(self.equality)) if self.equality else ''
r += (linesep + ' Ordering rule: ' + list_to_string(self.ordering)) if self.ordering else ''
r += (linesep + ' Substring rule: ' + list_to_string(self.substring)) if self.substring else ''
r += (linesep + ' Syntax: ' + (self.syntax + (' [' + str(decode_syntax(self.syntax)))) + ']') if self.syntax else ''
r += (linesep + ' Minimum length: ' + str(self.min_length)) if isinstance(self.min_length, int) else ''
r += linesep + ' Mandatory in: ' + list_to_string(self.mandatory_in) if self.mandatory_in else ''
r += linesep + ' Optional in: ' + list_to_string(self.optional_in) if self.optional_in else ''
return 'Attribute type' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)
class LdapSyntaxInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.5)
"""
def __init__(self,
oid=None,
description=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=None,
description=description,
obsolete=False,
extensions=extensions,
experimental=experimental,
definition=definition)
def __repr__(self):
return 'LDAP syntax' + BaseObjectInfo.__repr__(self).replace('<__desc__>', '')
class DitContentRuleInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.6)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
auxiliary_classes=None,
must_contain=None,
may_contain=None,
not_contains=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.auxiliary_classes = auxiliary_classes
self.must_contain = must_contain
self.may_contain = may_contain
self.not_contains = not_contains
def __repr__(self):
r = (linesep + ' Auxiliary classes: ' + list_to_string(self.auxiliary_classes)) if self.auxiliary_classes else ''
r += (linesep + ' Must contain: ' + list_to_string(self.must_contain)) if self.must_contain else ''
r += (linesep + ' May contain: ' + list_to_string(self.may_contain)) if self.may_contain else ''
r += (linesep + ' Not contains: ' + list_to_string(self.not_contains)) if self.not_contains else ''
return 'DIT content rule' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)
class DitStructureRuleInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.7.1)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
name_form=None,
superior=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.superior = superior
self.name_form = name_form
def __repr__(self):
r = (linesep + ' Superior rules: ' + list_to_string(self.superior)) if self.superior else ''
r += (linesep + ' Name form: ' + list_to_string(self.name_form)) if self.name_form else ''
return 'DIT content rule' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)
class NameFormInfo(BaseObjectInfo):
"""
As per RFC 4512 (4.1.7.2)
"""
def __init__(self,
oid=None,
name=None,
description=None,
obsolete=False,
object_class=None,
must_contain=None,
may_contain=None,
extensions=None,
experimental=None,
definition=None):
BaseObjectInfo.__init__(self,
oid=oid,
name=name,
description=description,
obsolete=obsolete,
extensions=extensions,
experimental=experimental,
definition=definition)
self.object_class = object_class
self.must_contain = must_contain
self.may_contain = may_contain
def __repr__(self):
r = (linesep + ' Object class: ' + list_to_string(self.object_class)) if self.object_class else ''
r += (linesep + ' Must contain: ' + list_to_string(self.must_contain)) if self.must_contain else ''
r += (linesep + ' May contain: ' + list_to_string(self.may_contain)) if self.may_contain else ''
return 'DIT content rule' + BaseObjectInfo.__repr__(self).replace('<__desc__>', r)

View File

@ -0,0 +1,57 @@
"""
"""
# Created on 2016.12.23
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .. import NO_ATTRIBUTES, ALL_ATTRIBUTES, STRING_TYPES
from ..operation.search import build_attribute_selection
from .controls import build_control
def _read_control(oid, attributes, criticality=False):
if not attributes:
attributes = [NO_ATTRIBUTES]
elif attributes == ALL_ATTRIBUTES:
attributes = [ALL_ATTRIBUTES]
if isinstance(attributes, STRING_TYPES):
attributes = [attributes]
value = build_attribute_selection(attributes, None)
return build_control(oid, criticality, value)
def pre_read_control(attributes, criticality=False):
"""Create a pre-read control for a request.
When passed as a control to the controls parameter of an operation, it will
return the value in `Connection.result` before the operation took place.
"""
return _read_control('1.3.6.1.1.13.1', attributes, criticality)
def post_read_control(attributes, criticality=False):
"""Create a post-read control for a request.
When passed as a control to the controls parameter of an operation, it will
return the value in `Connection.result` after the operation took place.
"""
return _read_control('1.3.6.1.1.13.2', attributes, criticality)

View File

@ -0,0 +1,152 @@
"""
"""
# Created on 2014.01.04
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from binascii import hexlify
import hashlib
import hmac
from ... import SEQUENCE_TYPES
from ...protocol.sasl.sasl import abort_sasl_negotiation, send_sasl_negotiation, random_hex_string
STATE_KEY = 0
STATE_VALUE = 1
def md5_h(value):
if not isinstance(value, bytes):
value = value.encode()
return hashlib.md5(value).digest()
def md5_kd(k, s):
if not isinstance(k, bytes):
k = k.encode()
if not isinstance(s, bytes):
s = s.encode()
return md5_h(k + b':' + s)
def md5_hex(value):
if not isinstance(value, bytes):
value = value.encode()
return hexlify(value)
def md5_hmac(k, s):
if not isinstance(k, bytes):
k = k.encode()
if not isinstance(s, bytes):
s = s.encode()
return hmac.new(k, s).hexdigest()
def sasl_digest_md5(connection, controls):
# sasl_credential must be a tuple made up of the following elements: (realm, user, password, authorization_id)
# if realm is None will be used the realm received from the server, if available
if not isinstance(connection.sasl_credentials, SEQUENCE_TYPES) or not len(connection.sasl_credentials) == 4:
return None
# step One of RFC2831
result = send_sasl_negotiation(connection, controls, None)
if 'saslCreds' in result and result['saslCreds'] is not None:
server_directives = decode_directives(result['saslCreds'])
else:
return None
if 'realm' not in server_directives or 'nonce' not in server_directives or 'algorithm' not in server_directives: # mandatory directives, as per RFC2831
abort_sasl_negotiation(connection, controls)
return None
# step Two of RFC2831
charset = server_directives['charset'] if 'charset' in server_directives and server_directives['charset'].lower() == 'utf-8' else 'iso8859-1'
user = connection.sasl_credentials[1].encode(charset)
realm = (connection.sasl_credentials[0] if connection.sasl_credentials[0] else (server_directives['realm'] if 'realm' in server_directives else '')).encode(charset)
password = connection.sasl_credentials[2].encode(charset)
authz_id = connection.sasl_credentials[3].encode(charset) if connection.sasl_credentials[3] else b''
nonce = server_directives['nonce'].encode(charset)
cnonce = random_hex_string(16).encode(charset)
uri = b'ldap/'
qop = b'auth'
digest_response = b'username="' + user + b'",'
digest_response += b'realm="' + realm + b'",'
digest_response += (b'authzid="' + authz_id + b'",') if authz_id else b''
digest_response += b'nonce="' + nonce + b'",'
digest_response += b'cnonce="' + cnonce + b'",'
digest_response += b'digest-uri="' + uri + b'",'
digest_response += b'qop=' + qop + b','
digest_response += b'nc=00000001' + b','
if charset == 'utf-8':
digest_response += b'charset="utf-8",'
a0 = md5_h(b':'.join([user, realm, password]))
a1 = b':'.join([a0, nonce, cnonce, authz_id]) if authz_id else b':'.join([a0, nonce, cnonce])
a2 = b'AUTHENTICATE:' + uri + (':00000000000000000000000000000000' if qop in [b'auth-int', b'auth-conf'] else b'')
digest_response += b'response="' + md5_hex(md5_kd(md5_hex(md5_h(a1)), b':'.join([nonce, b'00000001', cnonce, qop, md5_hex(md5_h(a2))]))) + b'"'
result = send_sasl_negotiation(connection, controls, digest_response)
return result
def decode_directives(directives_string):
"""
converts directives to dict, unquote values
"""
# old_directives = dict((attr[0], attr[1].strip('"')) for attr in [line.split('=') for line in directives_string.split(',')])
state = STATE_KEY
tmp_buffer = ''
quoting = False
key = ''
directives = dict()
for c in directives_string.decode('utf-8'):
if state == STATE_KEY and c == '=':
key = tmp_buffer
tmp_buffer = ''
state = STATE_VALUE
elif state == STATE_VALUE and c == '"' and not quoting and not tmp_buffer:
quoting = True
elif state == STATE_VALUE and c == '"' and quoting:
quoting = False
elif state == STATE_VALUE and c == ',' and not quoting:
directives[key] = tmp_buffer
tmp_buffer = ''
key = ''
state = STATE_KEY
else:
tmp_buffer += c
if key and tmp_buffer:
directives[key] = tmp_buffer
return directives

View File

@ -0,0 +1,32 @@
"""
"""
# Created on 2014.01.04
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ...protocol.sasl.sasl import send_sasl_negotiation
def sasl_external(connection, controls):
result = send_sasl_negotiation(connection, controls, connection.sasl_credentials)
return result

View File

@ -0,0 +1,112 @@
"""
"""
# Created on 2015.04.08
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
# original code by Hugh Cole-Baker, modified by Peter Foley
# it needs the gssapi package
import socket
from ...core.exceptions import LDAPPackageUnavailableError, LDAPCommunicationError
try:
# noinspection PyPackageRequirements,PyUnresolvedReferences
import gssapi
except ImportError:
raise LDAPPackageUnavailableError('package gssapi missing')
from .sasl import send_sasl_negotiation, abort_sasl_negotiation
NO_SECURITY_LAYER = 1
INTEGRITY_PROTECTION = 2
CONFIDENTIALITY_PROTECTION = 4
def sasl_gssapi(connection, controls):
"""
Performs a bind using the Kerberos v5 ("GSSAPI") SASL mechanism
from RFC 4752. Does not support any security layers, only authentication!
sasl_credentials can be empty or a tuple with one or two elements.
The first element determines which service principal to request a ticket for and can be one of the following:
- None or False, to use the hostname from the Server object
- True to perform a reverse DNS lookup to retrieve the canonical hostname for the hosts IP address
- A string containing the hostname
The optional second element is what authorization ID to request.
- If omitted or None, the authentication ID is used as the authorization ID
- If a string, the authorization ID to use. Should start with "dn:" or "user:".
"""
target_name = None
authz_id = b""
if connection.sasl_credentials:
if len(connection.sasl_credentials) >= 1 and connection.sasl_credentials[0]:
if connection.sasl_credentials[0] is True:
hostname = socket.gethostbyaddr(connection.socket.getpeername()[0])[0]
target_name = gssapi.Name('ldap@' + hostname, gssapi.NameType.hostbased_service)
else:
target_name = gssapi.Name('ldap@' + connection.sasl_credentials[0], gssapi.NameType.hostbased_service)
if len(connection.sasl_credentials) >= 2 and connection.sasl_credentials[1]:
authz_id = connection.sasl_credentials[1].encode("utf-8")
if target_name is None:
target_name = gssapi.Name('ldap@' + connection.server.host, gssapi.NameType.hostbased_service)
creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate') if connection.user else None
ctx = gssapi.SecurityContext(name=target_name, mech=gssapi.MechType.kerberos, creds=creds)
in_token = None
try:
while True:
out_token = ctx.step(in_token)
if out_token is None:
out_token = ''
result = send_sasl_negotiation(connection, controls, out_token)
in_token = result['saslCreds']
try:
# This raised an exception in gssapi<1.1.2 if the context was
# incomplete, but was fixed in
# https://github.com/pythongssapi/python-gssapi/pull/70
if ctx.complete:
break
except gssapi.exceptions.MissingContextError:
pass
unwrapped_token = ctx.unwrap(in_token)
if len(unwrapped_token.message) != 4:
raise LDAPCommunicationError("Incorrect response from server")
server_security_layers = unwrapped_token.message[0]
if not isinstance(server_security_layers, int):
server_security_layers = ord(server_security_layers)
if server_security_layers in (0, NO_SECURITY_LAYER):
if unwrapped_token.message[1:] != '\x00\x00\x00':
raise LDAPCommunicationError("Server max buffer size must be 0 if no security layer")
if not (server_security_layers & NO_SECURITY_LAYER):
raise LDAPCommunicationError("Server requires a security layer, but this is not implemented")
client_security_layers = bytearray([NO_SECURITY_LAYER, 0, 0, 0])
out_token = ctx.wrap(bytes(client_security_layers)+authz_id, False)
return send_sasl_negotiation(connection, controls, out_token.message)
except (gssapi.exceptions.GSSError, LDAPCommunicationError):
abort_sasl_negotiation(connection, controls)
raise

View File

@ -0,0 +1,70 @@
"""
"""
# Created on 2014.01.04
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
# payload for PLAIN mechanism
# message = [authzid] UTF8NUL authcid UTF8NUL passwd
# authcid = 1*SAFE ; MUST accept up to 255 octets
# authzid = 1*SAFE ; MUST accept up to 255 octets
# passwd = 1*SAFE ; MUST accept up to 255 octets
# UTF8NUL = %x00 ; UTF-8 encoded NUL character
#
# SAFE = UTF1 / UTF2 / UTF3 / UTF4
# ;; any UTF-8 encoded Unicode character except NUL
#
# UTF1 = %x01-7F ;; except NUL
# UTF2 = %xC2-DF UTF0
# UTF3 = %xE0 %xA0-BF UTF0 / %xE1-EC 2(UTF0) /
# %xED %x80-9F UTF0 / %xEE-EF 2(UTF0)
# UTF4 = %xF0 %x90-BF 2(UTF0) / %xF1-F3 3(UTF0) /
# %xF4 %x80-8F 2(UTF0)
# UTF0 = %x80-BF
from ...protocol.sasl.sasl import send_sasl_negotiation
from .sasl import sasl_prep
from ...utils.conv import to_raw, to_unicode
def sasl_plain(connection, controls):
authzid = connection.sasl_credentials[0]
authcid = connection.sasl_credentials[1]
passwd = connection.sasl_credentials[2]
payload = b''
if authzid:
payload += to_raw(sasl_prep(to_unicode(authzid)))
payload += b'\0'
if authcid:
payload += to_raw(sasl_prep(to_unicode(authcid)))
payload += b'\0'
if passwd:
payload += to_raw(sasl_prep(to_unicode(passwd)))
result = send_sasl_negotiation(connection, controls, payload)
return result

View File

@ -0,0 +1,171 @@
"""
"""
# Created on 2013.09.11
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import stringprep
from unicodedata import ucd_3_2_0 as unicode32
from os import urandom
from binascii import hexlify
from ... import SASL
from ...core.results import RESULT_AUTH_METHOD_NOT_SUPPORTED
from ...core.exceptions import LDAPSASLPrepError, LDAPPasswordIsMandatoryError
def sasl_prep(data):
"""
implement SASLPrep profile as per RFC4013:
it defines the "SASLprep" profile of the "stringprep" algorithm [StringPrep].
The profile is designed for use in Simple Authentication and Security
Layer ([SASL]) mechanisms, such as [PLAIN], [CRAM-MD5], and
[DIGEST-MD5]. It may be applicable where simple user names and
passwords are used. This profile is not intended for use in
preparing identity strings that are not simple user names (e.g.,
email addresses, domain names, distinguished names), or where
identity or password strings that are not character data, or require
different handling (e.g., case folding).
"""
# mapping
prepared_data = ''
for c in data:
if stringprep.in_table_c12(c):
# non-ASCII space characters [StringPrep, C.1.2] that can be mapped to SPACE (U+0020)
prepared_data += ' '
elif stringprep.in_table_b1(c):
# the "commonly mapped to nothing" characters [StringPrep, B.1] that can be mapped to nothing.
pass
else:
prepared_data += c
# normalizing
# This profile specifies using Unicode normalization form KC
# The repertoire is Unicode 3.2 as per RFC 4013 (2)
prepared_data = unicode32.normalize('NFKC', prepared_data)
if not prepared_data:
raise LDAPSASLPrepError('SASLprep error: unable to normalize string')
# prohibit
for c in prepared_data:
if stringprep.in_table_c12(c):
# Non-ASCII space characters [StringPrep, C.1.2]
raise LDAPSASLPrepError('SASLprep error: non-ASCII space character present')
elif stringprep.in_table_c21(c):
# ASCII control characters [StringPrep, C.2.1]
raise LDAPSASLPrepError('SASLprep error: ASCII control character present')
elif stringprep.in_table_c22(c):
# Non-ASCII control characters [StringPrep, C.2.2]
raise LDAPSASLPrepError('SASLprep error: non-ASCII control character present')
elif stringprep.in_table_c3(c):
# Private Use characters [StringPrep, C.3]
raise LDAPSASLPrepError('SASLprep error: private character present')
elif stringprep.in_table_c4(c):
# Non-character code points [StringPrep, C.4]
raise LDAPSASLPrepError('SASLprep error: non-character code point present')
elif stringprep.in_table_c5(c):
# Surrogate code points [StringPrep, C.5]
raise LDAPSASLPrepError('SASLprep error: surrogate code point present')
elif stringprep.in_table_c6(c):
# Inappropriate for plain text characters [StringPrep, C.6]
raise LDAPSASLPrepError('SASLprep error: inappropriate for plain text character present')
elif stringprep.in_table_c7(c):
# Inappropriate for canonical representation characters [StringPrep, C.7]
raise LDAPSASLPrepError('SASLprep error: inappropriate for canonical representation character present')
elif stringprep.in_table_c8(c):
# Change display properties or deprecated characters [StringPrep, C.8]
raise LDAPSASLPrepError('SASLprep error: change display property or deprecated character present')
elif stringprep.in_table_c9(c):
# Tagging characters [StringPrep, C.9]
raise LDAPSASLPrepError('SASLprep error: tagging character present')
# check bidi
# if a string contains any r_and_al_cat character, the string MUST NOT contain any l_cat character.
flag_r_and_al_cat = False
flag_l_cat = False
for c in prepared_data:
if stringprep.in_table_d1(c):
flag_r_and_al_cat = True
elif stringprep.in_table_d2(c):
flag_l_cat = True
if flag_r_and_al_cat and flag_l_cat:
raise LDAPSASLPrepError('SASLprep error: string cannot contain (R or AL) and L bidirectional chars')
# If a string contains any r_and_al_cat character, a r_and_al_cat character MUST be the first character of the string
# and a r_and_al_cat character MUST be the last character of the string.
if flag_r_and_al_cat and not stringprep.in_table_d1(prepared_data[0]) and not stringprep.in_table_d2(prepared_data[-1]):
raise LDAPSASLPrepError('r_and_al_cat character present, must be first and last character of the string')
return prepared_data
def validate_simple_password(password, accept_empty=False):
"""
validate simple password as per RFC4013 using sasl_prep:
"""
if accept_empty and not password:
return password
elif not password:
raise LDAPPasswordIsMandatoryError("simple password can't be empty")
if not isinstance(password, bytes): # bytes are returned raw, as per RFC (4.2)
password = sasl_prep(password)
if not isinstance(password, bytes):
password = password.encode('utf-8')
return password
def abort_sasl_negotiation(connection, controls):
from ...operation.bind import bind_operation
request = bind_operation(connection.version, SASL, None, None, '', None)
response = connection.post_send_single_response(connection.send('bindRequest', request, controls))
if connection.strategy.sync:
result = connection.result
else:
result = connection.get_response(response)[0][0]
return True if result['result'] == RESULT_AUTH_METHOD_NOT_SUPPORTED else False
def send_sasl_negotiation(connection, controls, payload):
from ...operation.bind import bind_operation
request = bind_operation(connection.version, SASL, None, None, connection.sasl_mechanism, payload)
response = connection.post_send_single_response(connection.send('bindRequest', request, controls))
if connection.strategy.sync:
result = connection.result
else:
_, result = connection.get_response(response)
return result
def random_hex_string(size):
return str(hexlify(urandom(size)).decode('ascii')) # str fix for Python 2

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,758 @@
"""
"""
# Created on 2014.10.21
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
slapd_2_4_schema = """
{
"raw": {
"attributeTypes": [
"( 2.5.4.0 NAME 'objectClass' DESC 'RFC4512: object classes of the entity' EQUALITY objectIdentifierMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 )",
"( 2.5.21.9 NAME 'structuralObjectClass' DESC 'RFC4512: structural object class of entry' EQUALITY objectIdentifierMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.1 NAME 'createTimestamp' DESC 'RFC4512: time which object was created' EQUALITY generalizedTimeMatch ORDERING generalizedTimeOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.2 NAME 'modifyTimestamp' DESC 'RFC4512: time which object was last modified' EQUALITY generalizedTimeMatch ORDERING generalizedTimeOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.3 NAME 'creatorsName' DESC 'RFC4512: name of creator' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.4 NAME 'modifiersName' DESC 'RFC4512: name of last modifier' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.9 NAME 'hasSubordinates' DESC 'X.501: entry has children' EQUALITY booleanMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 2.5.18.10 NAME 'subschemaSubentry' DESC 'RFC4512: name of controlling subschema entry' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.1.20 NAME 'entryDN' DESC 'DN of the entry' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.1.16.4 NAME 'entryUUID' DESC 'UUID of the entry' EQUALITY UUIDMatch ORDERING UUIDOrderingMatch SYNTAX 1.3.6.1.1.16.1 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.1466.101.120.6 NAME 'altServer' DESC 'RFC4512: alternative servers' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 USAGE dSAOperation )",
"( 1.3.6.1.4.1.1466.101.120.5 NAME 'namingContexts' DESC 'RFC4512: naming contexts' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE dSAOperation )",
"( 1.3.6.1.4.1.1466.101.120.13 NAME 'supportedControl' DESC 'RFC4512: supported controls' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 USAGE dSAOperation )",
"( 1.3.6.1.4.1.1466.101.120.7 NAME 'supportedExtension' DESC 'RFC4512: supported extended operations' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 USAGE dSAOperation )",
"( 1.3.6.1.4.1.1466.101.120.15 NAME 'supportedLDAPVersion' DESC 'RFC4512: supported LDAP versions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 USAGE dSAOperation )",
"( 1.3.6.1.4.1.1466.101.120.14 NAME 'supportedSASLMechanisms' DESC 'RFC4512: supported SASL mechanisms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 USAGE dSAOperation )",
"( 1.3.6.1.4.1.4203.1.3.5 NAME 'supportedFeatures' DESC 'RFC4512: features supported by the server' EQUALITY objectIdentifierMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 USAGE dSAOperation )",
"( 1.3.6.1.1.4 NAME 'vendorName' DESC 'RFC3045: name of implementation vendor' EQUALITY caseExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE NO-USER-MODIFICATION USAGE dSAOperation )",
"( 1.3.6.1.1.5 NAME 'vendorVersion' DESC 'RFC3045: version of implementation' EQUALITY caseExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE NO-USER-MODIFICATION USAGE dSAOperation )",
"( 2.5.21.4 NAME 'matchingRules' DESC 'RFC4512: matching rules' EQUALITY objectIdentifierFirstComponentMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.30 USAGE directoryOperation )",
"( 2.5.21.5 NAME 'attributeTypes' DESC 'RFC4512: attribute types' EQUALITY objectIdentifierFirstComponentMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.3 USAGE directoryOperation )",
"( 2.5.21.6 NAME 'objectClasses' DESC 'RFC4512: object classes' EQUALITY objectIdentifierFirstComponentMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.37 USAGE directoryOperation )",
"( 2.5.21.8 NAME 'matchingRuleUse' DESC 'RFC4512: matching rule uses' EQUALITY objectIdentifierFirstComponentMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.31 USAGE directoryOperation )",
"( 1.3.6.1.4.1.1466.101.120.16 NAME 'ldapSyntaxes' DESC 'RFC4512: LDAP syntaxes' EQUALITY objectIdentifierFirstComponentMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.54 USAGE directoryOperation )",
"( 2.5.4.1 NAME ( 'aliasedObjectName' 'aliasedEntryName' ) DESC 'RFC4512: name of aliased object' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 2.16.840.1.113730.3.1.34 NAME 'ref' DESC 'RFC3296: subordinate referral URL' EQUALITY caseExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 USAGE distributedOperation )",
"( 1.3.6.1.4.1.1466.101.119.3 NAME 'entryTtl' DESC 'RFC2589: entry time-to-live' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE dSAOperation )",
"( 1.3.6.1.4.1.1466.101.119.4 NAME 'dynamicSubtrees' DESC 'RFC2589: dynamic subtrees' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE dSAOperation )",
"( 2.5.4.49 NAME 'distinguishedName' DESC 'RFC4519: common supertype of DN attributes' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.5.4.41 NAME 'name' DESC 'RFC4519: common supertype of name attributes' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} )",
"( 2.5.4.3 NAME ( 'cn' 'commonName' ) DESC 'RFC4519: common name(s) for which the entity is known by' SUP name )",
"( 0.9.2342.19200300.100.1.1 NAME ( 'uid' 'userid' ) DESC 'RFC4519: user identifier' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 1.3.6.1.1.1.1.0 NAME 'uidNumber' DESC 'RFC2307: An integer uniquely identifying a user in an administrative domain' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.1 NAME 'gidNumber' DESC 'RFC2307: An integer uniquely identifying a group in an administrative domain' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 2.5.4.35 NAME 'userPassword' DESC 'RFC4519/2307: password of user' EQUALITY octetStringMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{128} )",
"( 1.3.6.1.4.1.250.1.57 NAME 'labeledURI' DESC 'RFC2079: Uniform Resource Identifier with optional label' EQUALITY caseExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.5.4.13 NAME 'description' DESC 'RFC4519: descriptive information' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{1024} )",
"( 2.5.4.34 NAME 'seeAlso' DESC 'RFC4519: DN of related object' SUP distinguishedName )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.78 NAME 'olcConfigFile' DESC 'File for slapd configuration directives' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.79 NAME 'olcConfigDir' DESC 'Directory for slapd configuration backend' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.1 NAME 'olcAccess' DESC 'Access Control List' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.86 NAME 'olcAddContentAcl' DESC 'Check ACLs against content of Add ops' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.2 NAME 'olcAllows' DESC 'Allowed set of deprecated features' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.3 NAME 'olcArgsFile' DESC 'File for slapd command line options' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.5 NAME 'olcAttributeOptions' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.4 NAME 'olcAttributeTypes' DESC 'OpenLDAP attributeTypes' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.6 NAME 'olcAuthIDRewrite' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.7 NAME 'olcAuthzPolicy' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.8 NAME 'olcAuthzRegexp' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.9 NAME 'olcBackend' DESC 'A type of backend' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE X-ORDERED 'SIBLINGS' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.10 NAME 'olcConcurrency' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.11 NAME 'olcConnMaxPending' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.12 NAME 'olcConnMaxPendingAuth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.13 NAME 'olcDatabase' DESC 'The backend type for a database instance' SUP olcBackend SINGLE-VALUE X-ORDERED 'SIBLINGS' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.14 NAME 'olcDefaultSearchBase' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.15 NAME 'olcDisallows' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.16 NAME 'olcDitContentRules' DESC 'OpenLDAP DIT content rules' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.20 NAME 'olcExtraAttrs' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.17 NAME 'olcGentleHUP' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.17 NAME 'olcHidden' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.18 NAME 'olcIdleTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.19 NAME 'olcInclude' SUP labeledURI )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.20 NAME 'olcIndexSubstrIfMinLen' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.21 NAME 'olcIndexSubstrIfMaxLen' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.22 NAME 'olcIndexSubstrAnyLen' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.23 NAME 'olcIndexSubstrAnyStep' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.84 NAME 'olcIndexIntLen' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.4 NAME 'olcLastMod' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.85 NAME 'olcLdapSyntaxes' DESC 'OpenLDAP ldapSyntax' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.5 NAME 'olcLimits' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.93 NAME 'olcListenerThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.26 NAME 'olcLocalSSF' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.27 NAME 'olcLogFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.28 NAME 'olcLogLevel' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.6 NAME 'olcMaxDerefDepth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.16 NAME 'olcMirrorMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.30 NAME 'olcModuleLoad' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.31 NAME 'olcModulePath' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.18 NAME 'olcMonitoring' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.32 NAME 'olcObjectClasses' DESC 'OpenLDAP object classes' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.33 NAME 'olcObjectIdentifier' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.34 NAME 'olcOverlay' SUP olcDatabase SINGLE-VALUE X-ORDERED 'SIBLINGS' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.35 NAME 'olcPasswordCryptSaltFormat' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.36 NAME 'olcPasswordHash' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.37 NAME 'olcPidFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.38 NAME 'olcPlugin' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.39 NAME 'olcPluginLogFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.40 NAME 'olcReadOnly' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.41 NAME 'olcReferral' SUP labeledURI SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.7 NAME 'olcReplica' SUP labeledURI EQUALITY caseIgnoreMatch X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.43 NAME 'olcReplicaArgsFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.44 NAME 'olcReplicaPidFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.45 NAME 'olcReplicationInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.46 NAME 'olcReplogFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.47 NAME 'olcRequires' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.48 NAME 'olcRestrict' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.49 NAME 'olcReverseLookup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.8 NAME 'olcRootDN' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.51 NAME 'olcRootDSE' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.9 NAME 'olcRootPW' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.89 NAME 'olcSaslAuxprops' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.53 NAME 'olcSaslHost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.54 NAME 'olcSaslRealm' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.56 NAME 'olcSaslSecProps' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.58 NAME 'olcSchemaDN' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.59 NAME 'olcSecurity' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.81 NAME 'olcServerID' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.60 NAME 'olcSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.61 NAME 'olcSockbufMaxIncoming' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.62 NAME 'olcSockbufMaxIncomingAuth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.83 NAME 'olcSortVals' DESC 'Attributes whose values will always be sorted' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.15 NAME 'olcSubordinate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.10 NAME 'olcSuffix' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.19 NAME 'olcSyncUseSubentry' DESC 'Store sync context in a subentry' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.11 NAME 'olcSyncrepl' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.90 NAME 'olcTCPBuffer' DESC 'Custom TCP buffer size' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.66 NAME 'olcThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.67 NAME 'olcTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.68 NAME 'olcTLSCACertificateFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.69 NAME 'olcTLSCACertificatePath' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.70 NAME 'olcTLSCertificateFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.71 NAME 'olcTLSCertificateKeyFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.72 NAME 'olcTLSCipherSuite' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.73 NAME 'olcTLSCRLCheck' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.82 NAME 'olcTLSCRLFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.74 NAME 'olcTLSRandFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.75 NAME 'olcTLSVerifyClient' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.77 NAME 'olcTLSDHParamFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.87 NAME 'olcTLSProtocolMin' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.80 NAME 'olcToolThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.12 NAME 'olcUpdateDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.13 NAME 'olcUpdateRef' SUP labeledURI EQUALITY caseIgnoreMatch )",
"( 1.3.6.1.4.1.4203.1.12.2.3.0.88 NAME 'olcWriteTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.1 NAME 'olcDbDirectory' DESC 'Directory for database content' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.11 NAME 'olcDbCacheFree' DESC 'Number of extra entries to free when max is reached' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.1 NAME 'olcDbCacheSize' DESC 'Entry cache size in entries' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.2 NAME 'olcDbCheckpoint' DESC 'Database checkpoint interval in kbytes and minutes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.16 NAME 'olcDbChecksum' DESC 'Enable database checksum validation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.13 NAME 'olcDbCryptFile' DESC 'Pathname of file containing the DB encryption key' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.14 NAME 'olcDbCryptKey' DESC 'DB encryption key' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.3 NAME 'olcDbConfig' DESC 'BerkeleyDB DB_CONFIG configuration directives' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.4 NAME 'olcDbNoSync' DESC 'Disable synchronous database writes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.15 NAME 'olcDbPageSize' DESC 'Page size of specified DB, in Kbytes' EQUALITY caseExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.5 NAME 'olcDbDirtyRead' DESC 'Allow reads of uncommitted data' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.12 NAME 'olcDbDNcacheSize' DESC 'DN cache size' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.6 NAME 'olcDbIDLcacheSize' DESC 'IDL cache size in IDLs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.2 NAME 'olcDbIndex' DESC 'Attribute index parameters' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.7 NAME 'olcDbLinearIndex' DESC 'Index attributes one at a time' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.8 NAME 'olcDbLockDetect' DESC 'Deadlock detection algorithm' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.3 NAME 'olcDbMode' DESC 'Unix permissions of database files' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.9 NAME 'olcDbSearchStack' DESC 'Depth of search stack in IDLs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.1.10 NAME 'olcDbShmKey' DESC 'Key for shared memory region' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.0.14 NAME 'olcDbURI' DESC 'URI (list) for remote DSA' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.1 NAME 'olcDbStartTLS' DESC 'StartTLS' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.2 NAME 'olcDbACLAuthcDn' DESC 'Remote ACL administrative identity' OBSOLETE SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.3 NAME 'olcDbACLPasswd' DESC 'Remote ACL administrative identity credentials' OBSOLETE SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.4 NAME 'olcDbACLBind' DESC 'Remote ACL administrative identity auth bind configuration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.5 NAME 'olcDbIDAssertAuthcDn' DESC 'Remote Identity Assertion administrative identity' OBSOLETE SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.6 NAME 'olcDbIDAssertPasswd' DESC 'Remote Identity Assertion administrative identity credentials' OBSOLETE SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.7 NAME 'olcDbIDAssertBind' DESC 'Remote Identity Assertion administrative identity auth bind configuration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.8 NAME 'olcDbIDAssertMode' DESC 'Remote Identity Assertion mode' OBSOLETE SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.9 NAME 'olcDbIDAssertAuthzFrom' DESC 'Remote Identity Assertion authz rules' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.10 NAME 'olcDbRebindAsUser' DESC 'Rebind as user' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.11 NAME 'olcDbChaseReferrals' DESC 'Chase referrals' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.12 NAME 'olcDbTFSupport' DESC 'Absolute filters support' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.13 NAME 'olcDbProxyWhoAmI' DESC 'Proxy whoAmI exop' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.14 NAME 'olcDbTimeout' DESC 'Per-operation timeouts' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.15 NAME 'olcDbIdleTimeout' DESC 'connection idle timeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.16 NAME 'olcDbConnTtl' DESC 'connection ttl' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.17 NAME 'olcDbNetworkTimeout' DESC 'connection network timeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.18 NAME 'olcDbProtocolVersion' DESC 'protocol version' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.19 NAME 'olcDbSingleConn' DESC 'cache a single connection per identity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.20 NAME 'olcDbCancel' DESC 'abandon/ignore/exop operations when appropriate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.21 NAME 'olcDbQuarantine' DESC 'Quarantine database if connection fails and retry according to rule' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.22 NAME 'olcDbUseTemporaryConn' DESC 'Use temporary connections if the cached one is busy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.23 NAME 'olcDbConnectionPoolMax' DESC 'Max size of privileged connections pool' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.25 NAME 'olcDbNoRefs' DESC 'Do not return search reference responses' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.26 NAME 'olcDbNoUndefFilter' DESC 'Do not propagate undefined search filters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.3.27 NAME 'olcDbIDAssertPassThru' DESC 'Remote Identity Assertion passthru rules' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.3.1 NAME 'olcChainingBehavior' DESC 'Chaining behavior control parameters (draft-sermersheim-ldap-chaining)' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.3.2 NAME 'olcChainCacheURI' DESC 'Enables caching of URIs not present in configuration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.3.3 NAME 'olcChainMaxReferralDepth' DESC 'max referral depth' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.3.4 NAME 'olcChainReturnError' DESC 'Errors are returned instead of the original referral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.2.5.1 NAME 'olcRelay' DESC 'Relay DN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.1 NAME 'olcAccessLogDB' DESC 'Suffix of database for log content' SUP distinguishedName SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.2 NAME 'olcAccessLogOps' DESC 'Operation types to log' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.3 NAME 'olcAccessLogPurge' DESC 'Log cleanup parameters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.4 NAME 'olcAccessLogSuccess' DESC 'Log successful ops only' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.5 NAME 'olcAccessLogOld' DESC 'Log old values when modifying entries matching the filter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.6 NAME 'olcAccessLogOldAttr' DESC 'Log old values of these attributes even if unmodified' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.4.7 NAME 'olcAccessLogBase' DESC 'Operation types to log under a specific branch' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.15.1 NAME 'olcAuditlogFile' DESC 'Filename for auditlogging' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.19.1 NAME 'olcCollectInfo' DESC 'DN of entry and attribute to distribute' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.13.1 NAME 'olcConstraintAttribute' DESC 'constraint for list of attributes' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.1 NAME 'olcDDSstate' DESC 'RFC2589 Dynamic directory services state' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.2 NAME 'olcDDSmaxTtl' DESC 'RFC2589 Dynamic directory services max TTL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.3 NAME 'olcDDSminTtl' DESC 'RFC2589 Dynamic directory services min TTL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.4 NAME 'olcDDSdefaultTtl' DESC 'RFC2589 Dynamic directory services default TTL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.5 NAME 'olcDDSinterval' DESC 'RFC2589 Dynamic directory services expiration task run interval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.6 NAME 'olcDDStolerance' DESC 'RFC2589 Dynamic directory services additional TTL in expiration scheduling' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.9.7 NAME 'olcDDSmaxDynamicObjects' DESC 'RFC2589 Dynamic directory services max number of dynamic objects' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.17.1 NAME 'olcDGAttrPair' DESC 'Member and MemberURL attribute pair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.8.1 NAME 'olcDlAttrSet' DESC 'Dynamic list: <group objectClass>, <URL attributeDescription>, <member attributeDescription>' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.2.840.113556.1.2.102 NAME 'memberOf' DESC 'Group that the entry belongs to' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE dSAOperation X-ORIGIN 'iPlanet Delegated Administrator' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.0 NAME 'olcMemberOfDN' DESC 'DN to be used as modifiersName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.1 NAME 'olcMemberOfDangling' DESC 'Behavior with respect to dangling members, constrained to ignore, drop, error' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.2 NAME 'olcMemberOfRefInt' DESC 'Take care of referential integrity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.3 NAME 'olcMemberOfGroupOC' DESC 'Group objectClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.4 NAME 'olcMemberOfMemberAD' DESC 'member attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.5 NAME 'olcMemberOfMemberOfAD' DESC 'memberOf attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.18.7 NAME 'olcMemberOfDanglingError' DESC 'Error code returned in case of dangling back reference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.42.2.27.8.1.16 NAME 'pwdChangedTime' DESC 'The time the password was last changed' EQUALITY generalizedTimeMatch ORDERING generalizedTimeOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.17 NAME 'pwdAccountLockedTime' DESC 'The time an user account was locked' EQUALITY generalizedTimeMatch ORDERING generalizedTimeOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.19 NAME 'pwdFailureTime' DESC 'The timestamps of the last consecutive authentication failures' EQUALITY generalizedTimeMatch ORDERING generalizedTimeOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.20 NAME 'pwdHistory' DESC 'The history of users passwords' EQUALITY octetStringMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.21 NAME 'pwdGraceUseTime' DESC 'The timestamps of the grace login once the password has expired' EQUALITY generalizedTimeMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 NO-USER-MODIFICATION USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.22 NAME 'pwdReset' DESC 'The indication that the password has been reset' EQUALITY booleanMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE USAGE directoryOperation )",
"( 1.3.6.1.4.1.42.2.27.8.1.23 NAME 'pwdPolicySubentry' DESC 'The pwdPolicy subentry in effect for this object' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.12.1 NAME 'olcPPolicyDefault' DESC 'DN of a pwdPolicy object for uncustomized objects' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.12.2 NAME 'olcPPolicyHashCleartext' DESC 'Hash passwords on add or modify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.12.4 NAME 'olcPPolicyForwardUpdates' DESC 'Allow policy state updates to be forwarded via updateref' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.12.3 NAME 'olcPPolicyUseLockout' DESC 'Warn clients with AccountLocked' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.1 NAME ( 'olcPcache' 'olcProxyCache' ) DESC 'Proxy Cache basic parameters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.2 NAME ( 'olcPcacheAttrset' 'olcProxyAttrset' ) DESC 'A set of attributes to cache' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.3 NAME ( 'olcPcacheTemplate' 'olcProxyCacheTemplate' ) DESC 'Filter template, attrset, cache TTL, optional negative TTL, optional sizelimit TTL, optional TTR' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.4 NAME 'olcPcachePosition' DESC 'Response callback position in overlay stack' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.5 NAME ( 'olcPcacheMaxQueries' 'olcProxyCacheQueries' ) DESC 'Maximum number of queries to cache' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.6 NAME ( 'olcPcachePersist' 'olcProxySaveQueries' ) DESC 'Save cached queries for hot restart' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.7 NAME ( 'olcPcacheValidate' 'olcProxyCheckCacheability' ) DESC 'Check whether the results of a query are cacheable, e.g. for schema issues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.8 NAME 'olcPcacheOffline' DESC 'Set cache to offline mode and disable expiration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.2.9 NAME 'olcPcacheBind' DESC 'Parameters for caching Binds' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.11.1 NAME 'olcRefintAttribute' DESC 'Attributes for referential integrity' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.11.2 NAME 'olcRefintNothing' DESC 'Replacement DN to supply when needed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.11.3 NAME 'olcRefintModifiersName' DESC 'The DN to use as modifiersName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.20.1 NAME 'olcRetcodeParent' DESC '' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.20.2 NAME 'olcRetcodeItem' DESC '' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.20.3 NAME 'olcRetcodeInDir' DESC '' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.20.4 NAME 'olcRetcodeSleep' DESC '' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.16.1 NAME 'olcRwmRewrite' DESC 'Rewrites strings' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.16.2 NAME 'olcRwmTFSupport' DESC 'Absolute filters support' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.16.3 NAME 'olcRwmMap' DESC 'maps attributes/objectClasses' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORDERED 'VALUES' )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.16.4 NAME 'olcRwmNormalizeMapped' DESC 'Normalize mapped attributes/objectClasses' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.16.5 NAME 'olcRwmDropUnrequested' DESC 'Drop unrequested attributes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.21.1 NAME 'olcSssVlvMax' DESC 'Maximum number of concurrent Sort requests' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.21.2 NAME 'olcSssVlvMaxKeys' DESC 'Maximum number of Keys in a Sort request' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.21.3 NAME 'olcSssVlvMaxPerConn' DESC 'Maximum number of concurrent paged search requests per connection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.1.1 NAME 'olcSpCheckpoint' DESC 'ContextCSN checkpoint interval in ops and minutes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.1.2 NAME 'olcSpSessionlog' DESC 'Session log size in ops' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.1.3 NAME 'olcSpNoPresent' DESC 'Omit Present phase processing' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.1.4 NAME 'olcSpReloadHint' DESC 'Observe Reload Hint in Request control' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.14.1 NAME 'olcTranslucentStrict' DESC 'Reveal attribute deletion constraint violations' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.14.2 NAME 'olcTranslucentNoGlue' DESC 'Disable automatic glue records for ADD and MODRDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.14.3 NAME 'olcTranslucentLocal' DESC 'Attributes to use in local search filter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.14.4 NAME 'olcTranslucentRemote' DESC 'Attributes to use in remote search filter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.14.5 NAME 'olcTranslucentBindLocal' DESC 'Enable local bind' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.14.6 NAME 'olcTranslucentPwModLocal' DESC 'Enable local RFC 3062 Password Modify extended operation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.10.1 NAME 'olcUniqueBase' DESC 'Subtree for uniqueness searches' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.10.2 NAME 'olcUniqueIgnore' DESC 'Attributes for which uniqueness shall not be enforced' EQUALITY caseIgnoreMatch ORDERING caseIgnoreOrderingMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.10.3 NAME 'olcUniqueAttribute' DESC 'Attributes for which uniqueness shall be enforced' EQUALITY caseIgnoreMatch ORDERING caseIgnoreOrderingMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.10.4 NAME 'olcUniqueStrict' DESC 'Enforce uniqueness of null values' EQUALITY booleanMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.10.5 NAME 'olcUniqueURI' DESC 'List of keywords and LDAP URIs for a uniqueness domain' EQUALITY caseExactMatch ORDERING caseExactOrderingMatch SUBSTR caseExactSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.4203.1.12.2.3.3.5.1 NAME 'olcValSortAttr' DESC 'Sorting rule for attribute under given DN' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.5.4.2 NAME 'knowledgeInformation' DESC 'RFC2256: knowledge information' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} )",
"( 2.5.4.4 NAME ( 'sn' 'surname' ) DESC 'RFC2256: last (family) name(s) for which the entity is known by' SUP name )",
"( 2.5.4.5 NAME 'serialNumber' DESC 'RFC2256: serial number of the entity' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} )",
"( 2.5.4.6 NAME ( 'c' 'countryName' ) DESC 'RFC4519: two-letter ISO-3166 country code' SUP name SYNTAX 1.3.6.1.4.1.1466.115.121.1.11 SINGLE-VALUE )",
"( 2.5.4.7 NAME ( 'l' 'localityName' ) DESC 'RFC2256: locality which this object resides in' SUP name )",
"( 2.5.4.8 NAME ( 'st' 'stateOrProvinceName' ) DESC 'RFC2256: state or province which this object resides in' SUP name )",
"( 2.5.4.9 NAME ( 'street' 'streetAddress' ) DESC 'RFC2256: street address of this object' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )",
"( 2.5.4.10 NAME ( 'o' 'organizationName' ) DESC 'RFC2256: organization this object belongs to' SUP name )",
"( 2.5.4.11 NAME ( 'ou' 'organizationalUnitName' ) DESC 'RFC2256: organizational unit this object belongs to' SUP name )",
"( 2.5.4.12 NAME 'title' DESC 'RFC2256: title associated with the entity' SUP name )",
"( 2.5.4.14 NAME 'searchGuide' DESC 'RFC2256: search guide, deprecated by enhancedSearchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.25 )",
"( 2.5.4.15 NAME 'businessCategory' DESC 'RFC2256: business category' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )",
"( 2.5.4.16 NAME 'postalAddress' DESC 'RFC2256: postal address' EQUALITY caseIgnoreListMatch SUBSTR caseIgnoreListSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.41 )",
"( 2.5.4.17 NAME 'postalCode' DESC 'RFC2256: postal code' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} )",
"( 2.5.4.18 NAME 'postOfficeBox' DESC 'RFC2256: Post Office Box' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} )",
"( 2.5.4.19 NAME 'physicalDeliveryOfficeName' DESC 'RFC2256: Physical Delivery Office Name' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )",
"( 2.5.4.20 NAME 'telephoneNumber' DESC 'RFC2256: Telephone Number' EQUALITY telephoneNumberMatch SUBSTR telephoneNumberSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{32} )",
"( 2.5.4.21 NAME 'telexNumber' DESC 'RFC2256: Telex Number' SYNTAX 1.3.6.1.4.1.1466.115.121.1.52 )",
"( 2.5.4.22 NAME 'teletexTerminalIdentifier' DESC 'RFC2256: Teletex Terminal Identifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.51 )",
"( 2.5.4.23 NAME ( 'facsimileTelephoneNumber' 'fax' ) DESC 'RFC2256: Facsimile (Fax) Telephone Number' SYNTAX 1.3.6.1.4.1.1466.115.121.1.22 )",
"( 2.5.4.24 NAME 'x121Address' DESC 'RFC2256: X.121 Address' EQUALITY numericStringMatch SUBSTR numericStringSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{15} )",
"( 2.5.4.25 NAME 'internationaliSDNNumber' DESC 'RFC2256: international ISDN number' EQUALITY numericStringMatch SUBSTR numericStringSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{16} )",
"( 2.5.4.26 NAME 'registeredAddress' DESC 'RFC2256: registered postal address' SUP postalAddress SYNTAX 1.3.6.1.4.1.1466.115.121.1.41 )",
"( 2.5.4.27 NAME 'destinationIndicator' DESC 'RFC2256: destination indicator' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{128} )",
"( 2.5.4.28 NAME 'preferredDeliveryMethod' DESC 'RFC2256: preferred delivery method' SYNTAX 1.3.6.1.4.1.1466.115.121.1.14 SINGLE-VALUE )",
"( 2.5.4.29 NAME 'presentationAddress' DESC 'RFC2256: presentation address' EQUALITY presentationAddressMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.43 SINGLE-VALUE )",
"( 2.5.4.30 NAME 'supportedApplicationContext' DESC 'RFC2256: supported application context' EQUALITY objectIdentifierMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 )",
"( 2.5.4.31 NAME 'member' DESC 'RFC2256: member of a group' SUP distinguishedName )",
"( 2.5.4.32 NAME 'owner' DESC 'RFC2256: owner (of the object)' SUP distinguishedName )",
"( 2.5.4.33 NAME 'roleOccupant' DESC 'RFC2256: occupant of role' SUP distinguishedName )",
"( 2.5.4.36 NAME 'userCertificate' DESC 'RFC2256: X.509 user certificate, use ;binary' EQUALITY certificateExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.8 )",
"( 2.5.4.37 NAME 'cACertificate' DESC 'RFC2256: X.509 CA certificate, use ;binary' EQUALITY certificateExactMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.8 )",
"( 2.5.4.38 NAME 'authorityRevocationList' DESC 'RFC2256: X.509 authority revocation list, use ;binary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.9 )",
"( 2.5.4.39 NAME 'certificateRevocationList' DESC 'RFC2256: X.509 certificate revocation list, use ;binary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.9 )",
"( 2.5.4.40 NAME 'crossCertificatePair' DESC 'RFC2256: X.509 cross certificate pair, use ;binary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.10 )",
"( 2.5.4.42 NAME ( 'givenName' 'gn' ) DESC 'RFC2256: first name(s) for which the entity is known by' SUP name )",
"( 2.5.4.43 NAME 'initials' DESC 'RFC2256: initials of some or all of names, but not the surname(s).' SUP name )",
"( 2.5.4.44 NAME 'generationQualifier' DESC 'RFC2256: name qualifier indicating a generation' SUP name )",
"( 2.5.4.45 NAME 'x500UniqueIdentifier' DESC 'RFC2256: X.500 unique identifier' EQUALITY bitStringMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.6 )",
"( 2.5.4.46 NAME 'dnQualifier' DESC 'RFC2256: DN qualifier' EQUALITY caseIgnoreMatch ORDERING caseIgnoreOrderingMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.44 )",
"( 2.5.4.47 NAME 'enhancedSearchGuide' DESC 'RFC2256: enhanced search guide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.21 )",
"( 2.5.4.48 NAME 'protocolInformation' DESC 'RFC2256: protocol information' EQUALITY protocolInformationMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.42 )",
"( 2.5.4.50 NAME 'uniqueMember' DESC 'RFC2256: unique member of a group' EQUALITY uniqueMemberMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.34 )",
"( 2.5.4.51 NAME 'houseIdentifier' DESC 'RFC2256: house identifier' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} )",
"( 2.5.4.52 NAME 'supportedAlgorithms' DESC 'RFC2256: supported algorithms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.49 )",
"( 2.5.4.53 NAME 'deltaRevocationList' DESC 'RFC2256: delta revocation list; use ;binary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.9 )",
"( 2.5.4.54 NAME 'dmdName' DESC 'RFC2256: name of DMD' SUP name )",
"( 2.5.4.65 NAME 'pseudonym' DESC 'X.520(4th): pseudonym for the object' SUP name )",
"( 0.9.2342.19200300.100.1.3 NAME ( 'mail' 'rfc822Mailbox' ) DESC 'RFC1274: RFC822 Mailbox' EQUALITY caseIgnoreIA5Match SUBSTR caseIgnoreIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{256} )",
"( 0.9.2342.19200300.100.1.25 NAME ( 'dc' 'domainComponent' ) DESC 'RFC1274/2247: domain component' EQUALITY caseIgnoreIA5Match SUBSTR caseIgnoreIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.37 NAME 'associatedDomain' DESC 'RFC1274: domain associated with object' EQUALITY caseIgnoreIA5Match SUBSTR caseIgnoreIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.2.840.113549.1.9.1 NAME ( 'email' 'emailAddress' 'pkcs9email' ) DESC 'RFC3280: legacy attribute for email addresses in DNs' EQUALITY caseIgnoreIA5Match SUBSTR caseIgnoreIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{128} )",
"( 0.9.2342.19200300.100.1.2 NAME 'textEncodedORAddress' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.4 NAME 'info' DESC 'RFC1274: general information' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{2048} )",
"( 0.9.2342.19200300.100.1.5 NAME ( 'drink' 'favouriteDrink' ) DESC 'RFC1274: favorite drink' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.6 NAME 'roomNumber' DESC 'RFC1274: room number' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.7 NAME 'photo' DESC 'RFC1274: photo (G3 fax)' SYNTAX 1.3.6.1.4.1.1466.115.121.1.23{25000} )",
"( 0.9.2342.19200300.100.1.8 NAME 'userClass' DESC 'RFC1274: category of user' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.9 NAME 'host' DESC 'RFC1274: host computer' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.10 NAME 'manager' DESC 'RFC1274: DN of manager' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 0.9.2342.19200300.100.1.11 NAME 'documentIdentifier' DESC 'RFC1274: unique identifier of document' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.12 NAME 'documentTitle' DESC 'RFC1274: title of document' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.13 NAME 'documentVersion' DESC 'RFC1274: version of document' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.14 NAME 'documentAuthor' DESC 'RFC1274: DN of author of document' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 0.9.2342.19200300.100.1.15 NAME 'documentLocation' DESC 'RFC1274: location of document original' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.20 NAME ( 'homePhone' 'homeTelephoneNumber' ) DESC 'RFC1274: home telephone number' EQUALITY telephoneNumberMatch SUBSTR telephoneNumberSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.50 )",
"( 0.9.2342.19200300.100.1.21 NAME 'secretary' DESC 'RFC1274: DN of secretary' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 0.9.2342.19200300.100.1.22 NAME 'otherMailbox' SYNTAX 1.3.6.1.4.1.1466.115.121.1.39 )",
"( 0.9.2342.19200300.100.1.26 NAME 'aRecord' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 0.9.2342.19200300.100.1.27 NAME 'mDRecord' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 0.9.2342.19200300.100.1.28 NAME 'mXRecord' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 0.9.2342.19200300.100.1.29 NAME 'nSRecord' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 0.9.2342.19200300.100.1.30 NAME 'sOARecord' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 0.9.2342.19200300.100.1.31 NAME 'cNAMERecord' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 0.9.2342.19200300.100.1.38 NAME 'associatedName' DESC 'RFC1274: DN of entry associated with domain' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 0.9.2342.19200300.100.1.39 NAME 'homePostalAddress' DESC 'RFC1274: home postal address' EQUALITY caseIgnoreListMatch SUBSTR caseIgnoreListSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.41 )",
"( 0.9.2342.19200300.100.1.40 NAME 'personalTitle' DESC 'RFC1274: personal title' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.41 NAME ( 'mobile' 'mobileTelephoneNumber' ) DESC 'RFC1274: mobile telephone number' EQUALITY telephoneNumberMatch SUBSTR telephoneNumberSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.50 )",
"( 0.9.2342.19200300.100.1.42 NAME ( 'pager' 'pagerTelephoneNumber' ) DESC 'RFC1274: pager telephone number' EQUALITY telephoneNumberMatch SUBSTR telephoneNumberSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.50 )",
"( 0.9.2342.19200300.100.1.43 NAME ( 'co' 'friendlyCountryName' ) DESC 'RFC1274: friendly country name' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 0.9.2342.19200300.100.1.44 NAME 'uniqueIdentifier' DESC 'RFC1274: unique identifer' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.45 NAME 'organizationalStatus' DESC 'RFC1274: organizational status' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.46 NAME 'janetMailbox' DESC 'RFC1274: Janet mailbox' EQUALITY caseIgnoreIA5Match SUBSTR caseIgnoreIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{256} )",
"( 0.9.2342.19200300.100.1.47 NAME 'mailPreferenceOption' DESC 'RFC1274: mail preference option' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 0.9.2342.19200300.100.1.48 NAME 'buildingName' DESC 'RFC1274: name of building' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )",
"( 0.9.2342.19200300.100.1.49 NAME 'dSAQuality' DESC 'RFC1274: DSA Quality' SYNTAX 1.3.6.1.4.1.1466.115.121.1.19 SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.50 NAME 'singleLevelQuality' DESC 'RFC1274: Single Level Quality' SYNTAX 1.3.6.1.4.1.1466.115.121.1.13 SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.51 NAME 'subtreeMinimumQuality' DESC 'RFC1274: Subtree Mininum Quality' SYNTAX 1.3.6.1.4.1.1466.115.121.1.13 SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.52 NAME 'subtreeMaximumQuality' DESC 'RFC1274: Subtree Maximun Quality' SYNTAX 1.3.6.1.4.1.1466.115.121.1.13 SINGLE-VALUE )",
"( 0.9.2342.19200300.100.1.53 NAME 'personalSignature' DESC 'RFC1274: Personal Signature (G3 fax)' SYNTAX 1.3.6.1.4.1.1466.115.121.1.23 )",
"( 0.9.2342.19200300.100.1.54 NAME 'dITRedirect' DESC 'RFC1274: DIT Redirect' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 0.9.2342.19200300.100.1.55 NAME 'audio' DESC 'RFC1274: audio (u-law)' SYNTAX 1.3.6.1.4.1.1466.115.121.1.4{25000} )",
"( 0.9.2342.19200300.100.1.56 NAME 'documentPublisher' DESC 'RFC1274: publisher of document' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.16.840.1.113730.3.1.1 NAME 'carLicense' DESC 'RFC2798: vehicle license or registration plate' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.16.840.1.113730.3.1.2 NAME 'departmentNumber' DESC 'RFC2798: identifies a department within an organization' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.16.840.1.113730.3.1.241 NAME 'displayName' DESC 'RFC2798: preferred name to be used when displaying entries' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 2.16.840.1.113730.3.1.3 NAME 'employeeNumber' DESC 'RFC2798: numerically identifies an employee within an organization' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 2.16.840.1.113730.3.1.4 NAME 'employeeType' DESC 'RFC2798: type of employment for a person' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 0.9.2342.19200300.100.1.60 NAME 'jpegPhoto' DESC 'RFC2798: a JPEG image' SYNTAX 1.3.6.1.4.1.1466.115.121.1.28 )",
"( 2.16.840.1.113730.3.1.39 NAME 'preferredLanguage' DESC 'RFC2798: preferred written or spoken language for a person' EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 2.16.840.1.113730.3.1.40 NAME 'userSMIMECertificate' DESC 'RFC2798: PKCS#7 SignedData used to support S/MIME' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 )",
"( 2.16.840.1.113730.3.1.216 NAME 'userPKCS12' DESC 'RFC2798: personal identity information, a PKCS #12 PFX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 )",
"( 1.3.6.1.1.1.1.2 NAME 'gecos' DESC 'The GECOS field; the common name' EQUALITY caseIgnoreIA5Match SUBSTR caseIgnoreIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.3 NAME 'homeDirectory' DESC 'The absolute path to the home directory' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.4 NAME 'loginShell' DESC 'The path to the login shell' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.5 NAME 'shadowLastChange' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.6 NAME 'shadowMin' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.7 NAME 'shadowMax' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.8 NAME 'shadowWarning' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.9 NAME 'shadowInactive' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.10 NAME 'shadowExpire' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.11 NAME 'shadowFlag' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.12 NAME 'memberUid' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.13 NAME 'memberNisNetgroup' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.14 NAME 'nisNetgroupTriple' DESC 'Netgroup triple' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.15 NAME 'ipServicePort' DESC 'Service port number' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.16 NAME 'ipServiceProtocol' DESC 'Service protocol name' SUP name )",
"( 1.3.6.1.1.1.1.17 NAME 'ipProtocolNumber' DESC 'IP protocol number' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.18 NAME 'oncRpcNumber' DESC 'ONC RPC number' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.19 NAME 'ipHostNumber' DESC 'IPv4 addresses as a dotted decimal omitting leading zeros or IPv6 addresses as defined in RFC2373' SUP name )",
"( 1.3.6.1.1.1.1.20 NAME 'ipNetworkNumber' DESC 'IP network as a dotted decimal, eg. 192.168, omitting leading zeros' SUP name SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.21 NAME 'ipNetmaskNumber' DESC 'IP netmask as a dotted decimal, eg. 255.255.255.0, omitting leading zeros' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.22 NAME 'macAddress' DESC 'MAC address in maximal, colon separated hex notation, eg. 00:00:92:90:ee:e2' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.23 NAME 'bootParameter' DESC 'rpc.bootparamd parameter' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.24 NAME 'bootFile' DESC 'Boot image name' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.26 NAME 'nisMapName' DESC 'Name of a A generic NIS map' SUP name )",
"( 1.3.6.1.1.1.1.27 NAME 'nisMapEntry' DESC 'A generic NIS entry' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.28 NAME 'nisPublicKey' DESC 'NIS public key' EQUALITY octetStringMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.29 NAME 'nisSecretKey' DESC 'NIS secret key' EQUALITY octetStringMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.30 NAME 'nisDomain' DESC 'NIS domain' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.1.1.1.31 NAME 'automountMapName' DESC 'automount Map Name' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.32 NAME 'automountKey' DESC 'Automount Key value' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.1.1.1.33 NAME 'automountInformation' DESC 'Automount information' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.2 NAME 'suseDefaultBase' DESC 'Base DN where new Objects should be created by default' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.3 NAME 'suseNextUniqueId' DESC 'Next unused unique ID, can be used to generate directory wide uniqe IDs' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.4 NAME 'suseMinUniqueId' DESC 'lower Border for Unique IDs' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.5 NAME 'suseMaxUniqueId' DESC 'upper Border for Unique IDs' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.6 NAME 'suseDefaultTemplate' DESC 'The DN of a template that should be used by default' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.7 NAME 'suseSearchFilter' DESC 'Search filter to localize Objects' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.11 NAME 'suseDefaultValue' DESC 'an Attribute-Value-Assertions to define defaults for specific Attributes' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.7057.10.1.2.2.12 NAME 'suseNamingAttribute' DESC 'AttributeType that should be used as the RDN' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.15 NAME 'suseSecondaryGroup' DESC 'seconday group DN' EQUALITY distinguishedNameMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 1.3.6.1.4.1.7057.10.1.2.2.16 NAME 'suseMinPasswordLength' DESC 'minimum Password length for new users' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.17 NAME 'suseMaxPasswordLength' DESC 'maximum Password length for new users' EQUALITY integerMatch ORDERING integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.18 NAME 'susePasswordHash' DESC 'Hash method to use for new users' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.19 NAME 'suseSkelDir' DESC '' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.4.1.7057.10.1.2.2.20 NAME 'susePlugin' DESC 'plugin to use upon user/ group creation' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.7057.10.1.2.2.21 NAME 'suseMapAttribute' DESC '' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.3.6.1.4.1.7057.10.1.2.2.22 NAME 'suseImapServer' DESC '' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.23 NAME 'suseImapAdmin' DESC '' EQUALITY caseIgnoreMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.24 NAME 'suseImapDefaultQuota' DESC '' EQUALITY integerMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
"( 1.3.6.1.4.1.7057.10.1.2.2.25 NAME 'suseImapUseSsl' DESC '' EQUALITY booleanMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )"
],
"cn": [
"Subschema"
],
"createTimestamp": [
"20141024204149Z"
],
"entryDN": [
"cn=Subschema"
],
"ldapSyntaxes": [
"( 1.3.6.1.4.1.1466.115.121.1.4 DESC 'Audio' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.5 DESC 'Binary' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.6 DESC 'Bit String' )",
"( 1.3.6.1.4.1.1466.115.121.1.7 DESC 'Boolean' )",
"( 1.3.6.1.4.1.1466.115.121.1.8 DESC 'Certificate' X-BINARY-TRANSFER-REQUIRED 'TRUE' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.9 DESC 'Certificate List' X-BINARY-TRANSFER-REQUIRED 'TRUE' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.10 DESC 'Certificate Pair' X-BINARY-TRANSFER-REQUIRED 'TRUE' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.4203.666.11.10.2.1 DESC 'X.509 AttributeCertificate' X-BINARY-TRANSFER-REQUIRED 'TRUE' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.12 DESC 'Distinguished Name' )",
"( 1.2.36.79672281.1.5.0 DESC 'RDN' )",
"( 1.3.6.1.4.1.1466.115.121.1.14 DESC 'Delivery Method' )",
"( 1.3.6.1.4.1.1466.115.121.1.15 DESC 'Directory String' )",
"( 1.3.6.1.4.1.1466.115.121.1.22 DESC 'Facsimile Telephone Number' )",
"( 1.3.6.1.4.1.1466.115.121.1.24 DESC 'Generalized Time' )",
"( 1.3.6.1.4.1.1466.115.121.1.26 DESC 'IA5 String' )",
"( 1.3.6.1.4.1.1466.115.121.1.27 DESC 'Integer' )",
"( 1.3.6.1.4.1.1466.115.121.1.28 DESC 'JPEG' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.34 DESC 'Name And Optional UID' )",
"( 1.3.6.1.4.1.1466.115.121.1.36 DESC 'Numeric String' )",
"( 1.3.6.1.4.1.1466.115.121.1.38 DESC 'OID' )",
"( 1.3.6.1.4.1.1466.115.121.1.39 DESC 'Other Mailbox' )",
"( 1.3.6.1.4.1.1466.115.121.1.40 DESC 'Octet String' )",
"( 1.3.6.1.4.1.1466.115.121.1.41 DESC 'Postal Address' )",
"( 1.3.6.1.4.1.1466.115.121.1.44 DESC 'Printable String' )",
"( 1.3.6.1.4.1.1466.115.121.1.11 DESC 'Country String' )",
"( 1.3.6.1.4.1.1466.115.121.1.45 DESC 'SubtreeSpecification' )",
"( 1.3.6.1.4.1.1466.115.121.1.49 DESC 'Supported Algorithm' X-BINARY-TRANSFER-REQUIRED 'TRUE' X-NOT-HUMAN-READABLE 'TRUE' )",
"( 1.3.6.1.4.1.1466.115.121.1.50 DESC 'Telephone Number' )",
"( 1.3.6.1.4.1.1466.115.121.1.52 DESC 'Telex Number' )",
"( 1.3.6.1.1.1.0.0 DESC 'RFC2307 NIS Netgroup Triple' )",
"( 1.3.6.1.1.1.0.1 DESC 'RFC2307 Boot Parameter' )",
"( 1.3.6.1.1.16.1 DESC 'UUID' )"
],
"matchingRuleUse": [
"( 1.2.840.113556.1.4.804 NAME 'integerBitOrMatch' APPLIES ( supportedLDAPVersion $ entryTtl $ uidNumber $ gidNumber $ olcConcurrency $ olcConnMaxPending $ olcConnMaxPendingAuth $ olcIdleTimeout $ olcIndexSubstrIfMinLen $ olcIndexSubstrIfMaxLen $ olcIndexSubstrAnyLen $ olcIndexSubstrAnyStep $ olcIndexIntLen $ olcListenerThreads $ olcLocalSSF $ olcMaxDerefDepth $ olcReplicationInterval $ olcSockbufMaxIncoming $ olcSockbufMaxIncomingAuth $ olcThreads $ olcToolThreads $ olcWriteTimeout $ olcDbCacheFree $ olcDbCacheSize $ olcDbDNcacheSize $ olcDbIDLcacheSize $ olcDbSearchStack $ olcDbShmKey $ olcDbProtocolVersion $ olcDbConnectionPoolMax $ olcChainMaxReferralDepth $ olcDDSmaxDynamicObjects $ olcPcacheMaxQueries $ olcRetcodeSleep $ olcSssVlvMax $ olcSssVlvMaxKeys $ olcSssVlvMaxPerConn $ olcSpSessionlog $ mailPreferenceOption $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipServicePort $ ipProtocolNumber $ oncRpcNumber $ suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseMinPasswordLength $ suseMaxPasswordLength $ suseImapDefaultQuota ) )",
"( 1.2.840.113556.1.4.803 NAME 'integerBitAndMatch' APPLIES ( supportedLDAPVersion $ entryTtl $ uidNumber $ gidNumber $ olcConcurrency $ olcConnMaxPending $ olcConnMaxPendingAuth $ olcIdleTimeout $ olcIndexSubstrIfMinLen $ olcIndexSubstrIfMaxLen $ olcIndexSubstrAnyLen $ olcIndexSubstrAnyStep $ olcIndexIntLen $ olcListenerThreads $ olcLocalSSF $ olcMaxDerefDepth $ olcReplicationInterval $ olcSockbufMaxIncoming $ olcSockbufMaxIncomingAuth $ olcThreads $ olcToolThreads $ olcWriteTimeout $ olcDbCacheFree $ olcDbCacheSize $ olcDbDNcacheSize $ olcDbIDLcacheSize $ olcDbSearchStack $ olcDbShmKey $ olcDbProtocolVersion $ olcDbConnectionPoolMax $ olcChainMaxReferralDepth $ olcDDSmaxDynamicObjects $ olcPcacheMaxQueries $ olcRetcodeSleep $ olcSssVlvMax $ olcSssVlvMaxKeys $ olcSssVlvMaxPerConn $ olcSpSessionlog $ mailPreferenceOption $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipServicePort $ ipProtocolNumber $ oncRpcNumber $ suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseMinPasswordLength $ suseMaxPasswordLength $ suseImapDefaultQuota ) )",
"( 1.3.6.1.4.1.1466.109.114.2 NAME 'caseIgnoreIA5Match' APPLIES ( altServer $ olcDbConfig $ c $ mail $ dc $ associatedDomain $ email $ aRecord $ mDRecord $ mXRecord $ nSRecord $ sOARecord $ cNAMERecord $ janetMailbox $ gecos $ homeDirectory $ loginShell $ memberUid $ memberNisNetgroup $ nisNetgroupTriple $ ipNetmaskNumber $ macAddress $ bootParameter $ bootFile $ nisMapEntry $ nisDomain $ automountMapName $ automountKey $ automountInformation $ suseNamingAttribute $ susePasswordHash $ suseSkelDir ) )",
"( 1.3.6.1.4.1.1466.109.114.1 NAME 'caseExactIA5Match' APPLIES ( altServer $ olcDbConfig $ c $ mail $ dc $ associatedDomain $ email $ aRecord $ mDRecord $ mXRecord $ nSRecord $ sOARecord $ cNAMERecord $ janetMailbox $ gecos $ homeDirectory $ loginShell $ memberUid $ memberNisNetgroup $ nisNetgroupTriple $ ipNetmaskNumber $ macAddress $ bootParameter $ bootFile $ nisMapEntry $ nisDomain $ automountMapName $ automountKey $ automountInformation $ suseNamingAttribute $ susePasswordHash $ suseSkelDir ) )",
"( 2.5.13.38 NAME 'certificateListExactMatch' APPLIES ( authorityRevocationList $ certificateRevocationList $ deltaRevocationList ) )",
"( 2.5.13.34 NAME 'certificateExactMatch' APPLIES ( userCertificate $ cACertificate ) )",
"( 2.5.13.30 NAME 'objectIdentifierFirstComponentMatch' APPLIES ( supportedControl $ supportedExtension $ supportedFeatures $ ldapSyntaxes $ supportedApplicationContext ) )",
"( 2.5.13.29 NAME 'integerFirstComponentMatch' APPLIES ( supportedLDAPVersion $ entryTtl $ uidNumber $ gidNumber $ olcConcurrency $ olcConnMaxPending $ olcConnMaxPendingAuth $ olcIdleTimeout $ olcIndexSubstrIfMinLen $ olcIndexSubstrIfMaxLen $ olcIndexSubstrAnyLen $ olcIndexSubstrAnyStep $ olcIndexIntLen $ olcListenerThreads $ olcLocalSSF $ olcMaxDerefDepth $ olcReplicationInterval $ olcSockbufMaxIncoming $ olcSockbufMaxIncomingAuth $ olcThreads $ olcToolThreads $ olcWriteTimeout $ olcDbCacheFree $ olcDbCacheSize $ olcDbDNcacheSize $ olcDbIDLcacheSize $ olcDbSearchStack $ olcDbShmKey $ olcDbProtocolVersion $ olcDbConnectionPoolMax $ olcChainMaxReferralDepth $ olcDDSmaxDynamicObjects $ olcPcacheMaxQueries $ olcRetcodeSleep $ olcSssVlvMax $ olcSssVlvMaxKeys $ olcSssVlvMaxPerConn $ olcSpSessionlog $ mailPreferenceOption $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipServicePort $ ipProtocolNumber $ oncRpcNumber $ suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseMinPasswordLength $ suseMaxPasswordLength $ suseImapDefaultQuota ) )",
"( 2.5.13.28 NAME 'generalizedTimeOrderingMatch' APPLIES ( createTimestamp $ modifyTimestamp $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ pwdGraceUseTime ) )",
"( 2.5.13.27 NAME 'generalizedTimeMatch' APPLIES ( createTimestamp $ modifyTimestamp $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ pwdGraceUseTime ) )",
"( 2.5.13.24 NAME 'protocolInformationMatch' APPLIES protocolInformation )",
"( 2.5.13.23 NAME 'uniqueMemberMatch' APPLIES uniqueMember )",
"( 2.5.13.22 NAME 'presentationAddressMatch' APPLIES presentationAddress )",
"( 2.5.13.20 NAME 'telephoneNumberMatch' APPLIES ( telephoneNumber $ homePhone $ mobile $ pager ) )",
"( 2.5.13.18 NAME 'octetStringOrderingMatch' APPLIES ( userPassword $ olcDbCryptKey $ pwdHistory $ nisPublicKey $ nisSecretKey ) )",
"( 2.5.13.17 NAME 'octetStringMatch' APPLIES ( userPassword $ olcDbCryptKey $ pwdHistory $ nisPublicKey $ nisSecretKey ) )",
"( 2.5.13.16 NAME 'bitStringMatch' APPLIES x500UniqueIdentifier )",
"( 2.5.13.15 NAME 'integerOrderingMatch' APPLIES ( supportedLDAPVersion $ entryTtl $ uidNumber $ gidNumber $ olcConcurrency $ olcConnMaxPending $ olcConnMaxPendingAuth $ olcIdleTimeout $ olcIndexSubstrIfMinLen $ olcIndexSubstrIfMaxLen $ olcIndexSubstrAnyLen $ olcIndexSubstrAnyStep $ olcIndexIntLen $ olcListenerThreads $ olcLocalSSF $ olcMaxDerefDepth $ olcReplicationInterval $ olcSockbufMaxIncoming $ olcSockbufMaxIncomingAuth $ olcThreads $ olcToolThreads $ olcWriteTimeout $ olcDbCacheFree $ olcDbCacheSize $ olcDbDNcacheSize $ olcDbIDLcacheSize $ olcDbSearchStack $ olcDbShmKey $ olcDbProtocolVersion $ olcDbConnectionPoolMax $ olcChainMaxReferralDepth $ olcDDSmaxDynamicObjects $ olcPcacheMaxQueries $ olcRetcodeSleep $ olcSssVlvMax $ olcSssVlvMaxKeys $ olcSssVlvMaxPerConn $ olcSpSessionlog $ mailPreferenceOption $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipServicePort $ ipProtocolNumber $ oncRpcNumber $ suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseMinPasswordLength $ suseMaxPasswordLength $ suseImapDefaultQuota ) )",
"( 2.5.13.14 NAME 'integerMatch' APPLIES ( supportedLDAPVersion $ entryTtl $ uidNumber $ gidNumber $ olcConcurrency $ olcConnMaxPending $ olcConnMaxPendingAuth $ olcIdleTimeout $ olcIndexSubstrIfMinLen $ olcIndexSubstrIfMaxLen $ olcIndexSubstrAnyLen $ olcIndexSubstrAnyStep $ olcIndexIntLen $ olcListenerThreads $ olcLocalSSF $ olcMaxDerefDepth $ olcReplicationInterval $ olcSockbufMaxIncoming $ olcSockbufMaxIncomingAuth $ olcThreads $ olcToolThreads $ olcWriteTimeout $ olcDbCacheFree $ olcDbCacheSize $ olcDbDNcacheSize $ olcDbIDLcacheSize $ olcDbSearchStack $ olcDbShmKey $ olcDbProtocolVersion $ olcDbConnectionPoolMax $ olcChainMaxReferralDepth $ olcDDSmaxDynamicObjects $ olcPcacheMaxQueries $ olcRetcodeSleep $ olcSssVlvMax $ olcSssVlvMaxKeys $ olcSssVlvMaxPerConn $ olcSpSessionlog $ mailPreferenceOption $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag $ ipServicePort $ ipProtocolNumber $ oncRpcNumber $ suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseMinPasswordLength $ suseMaxPasswordLength $ suseImapDefaultQuota ) )",
"( 2.5.13.13 NAME 'booleanMatch' APPLIES ( hasSubordinates $ olcAddContentAcl $ olcGentleHUP $ olcHidden $ olcLastMod $ olcMirrorMode $ olcMonitoring $ olcReadOnly $ olcReverseLookup $ olcSyncUseSubentry $ olcDbChecksum $ olcDbNoSync $ olcDbDirtyRead $ olcDbLinearIndex $ olcDbRebindAsUser $ olcDbChaseReferrals $ olcDbProxyWhoAmI $ olcDbSingleConn $ olcDbUseTemporaryConn $ olcDbNoRefs $ olcDbNoUndefFilter $ olcChainCacheURI $ olcChainReturnError $ olcAccessLogSuccess $ olcDDSstate $ olcMemberOfRefInt $ pwdReset $ olcPPolicyHashCleartext $ olcPPolicyForwardUpdates $ olcPPolicyUseLockout $ olcPcachePersist $ olcPcacheValidate $ olcPcacheOffline $ olcRetcodeInDir $ olcRwmNormalizeMapped $ olcRwmDropUnrequested $ olcSpNoPresent $ olcSpReloadHint $ olcTranslucentStrict $ olcTranslucentNoGlue $ olcTranslucentBindLocal $ olcTranslucentPwModLocal $ olcUniqueStrict $ suseImapUseSsl ) )",
"( 2.5.13.11 NAME 'caseIgnoreListMatch' APPLIES ( postalAddress $ registeredAddress $ homePostalAddress ) )",
"( 2.5.13.9 NAME 'numericStringOrderingMatch' APPLIES ( x121Address $ internationaliSDNNumber ) )",
"( 2.5.13.8 NAME 'numericStringMatch' APPLIES ( x121Address $ internationaliSDNNumber ) )",
"( 2.5.13.7 NAME 'caseExactSubstringsMatch' APPLIES ( serialNumber $ destinationIndicator $ dnQualifier ) )",
"( 2.5.13.6 NAME 'caseExactOrderingMatch' APPLIES ( supportedSASLMechanisms $ vendorName $ vendorVersion $ ref $ name $ cn $ uid $ labeledURI $ description $ olcConfigFile $ olcConfigDir $ olcAccess $ olcAllows $ olcArgsFile $ olcAttributeOptions $ olcAttributeTypes $ olcAuthIDRewrite $ olcAuthzPolicy $ olcAuthzRegexp $ olcBackend $ olcDatabase $ olcDisallows $ olcDitContentRules $ olcExtraAttrs $ olcInclude $ olcLdapSyntaxes $ olcLimits $ olcLogFile $ olcLogLevel $ olcModuleLoad $ olcModulePath $ olcObjectClasses $ olcObjectIdentifier $ olcOverlay $ olcPasswordCryptSaltFormat $ olcPasswordHash $ olcPidFile $ olcPlugin $ olcPluginLogFile $ olcReferral $ olcReplica $ olcReplicaArgsFile $ olcReplicaPidFile $ olcReplogFile $ olcRequires $ olcRestrict $ olcRootDSE $ olcRootPW $ olcSaslAuxprops $ olcSaslHost $ olcSaslRealm $ olcSaslSecProps $ olcSecurity $ olcServerID $ olcSizeLimit $ olcSortVals $ olcSubordinate $ olcSyncrepl $ olcTCPBuffer $ olcTimeLimit $ olcTLSCACertificateFile $ olcTLSCACertificatePath $ olcTLSCertificateFile $ olcTLSCertificateKeyFile $ olcTLSCipherSuite $ olcTLSCRLCheck $ olcTLSCRLFile $ olcTLSRandFile $ olcTLSVerifyClient $ olcTLSDHParamFile $ olcTLSProtocolMin $ olcUpdateRef $ olcDbDirectory $ olcDbCheckpoint $ olcDbCryptFile $ olcDbPageSize $ olcDbIndex $ olcDbLockDetect $ olcDbMode $ olcDbURI $ olcDbStartTLS $ olcDbACLPasswd $ olcDbACLBind $ olcDbIDAssertPasswd $ olcDbIDAssertBind $ olcDbIDAssertMode $ olcDbIDAssertAuthzFrom $ olcDbTFSupport $ olcDbTimeout $ olcDbIdleTimeout $ olcDbConnTtl $ olcDbNetworkTimeout $ olcDbCancel $ olcDbQuarantine $ olcDbIDAssertPassThru $ olcChainingBehavior $ olcAccessLogOps $ olcAccessLogPurge $ olcAccessLogOld $ olcAccessLogOldAttr $ olcAccessLogBase $ olcAuditlogFile $ olcCollectInfo $ olcConstraintAttribute $ olcDDSmaxTtl $ olcDDSminTtl $ olcDDSdefaultTtl $ olcDDSinterval $ olcDDStolerance $ olcDGAttrPair $ olcDlAttrSet $ olcMemberOfDangling $ olcMemberOfGroupOC $ olcMemberOfMemberAD $ olcMemberOfMemberOfAD $ olcMemberOfDanglingError $ olcPcache $ olcPcacheAttrset $ olcPcacheTemplate $ olcPcachePosition $ olcPcacheBind $ olcRefintAttribute $ olcRetcodeItem $ olcRwmRewrite $ olcRwmTFSupport $ olcRwmMap $ olcSpCheckpoint $ olcTranslucentLocal $ olcTranslucentRemote $ olcUniqueIgnore $ olcUniqueAttribute $ olcUniqueURI $ olcValSortAttr $ knowledgeInformation $ sn $ serialNumber $ c $ l $ st $ street $ o $ ou $ title $ businessCategory $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ destinationIndicator $ givenName $ initials $ generationQualifier $ dnQualifier $ houseIdentifier $ dmdName $ pseudonym $ textEncodedORAddress $ info $ drink $ roomNumber $ userClass $ host $ documentIdentifier $ documentTitle $ documentVersion $ documentLocation $ personalTitle $ co $ uniqueIdentifier $ organizationalStatus $ buildingName $ documentPublisher $ carLicense $ departmentNumber $ displayName $ employeeNumber $ employeeType $ preferredLanguage $ ipServiceProtocol $ ipHostNumber $ ipNetworkNumber $ nisMapName $ suseSearchFilter $ suseDefaultValue $ susePlugin $ suseMapAttribute $ suseImapServer $ suseImapAdmin ) )",
"( 2.5.13.5 NAME 'caseExactMatch' APPLIES ( supportedSASLMechanisms $ vendorName $ vendorVersion $ ref $ name $ cn $ uid $ labeledURI $ description $ olcConfigFile $ olcConfigDir $ olcAccess $ olcAllows $ olcArgsFile $ olcAttributeOptions $ olcAttributeTypes $ olcAuthIDRewrite $ olcAuthzPolicy $ olcAuthzRegexp $ olcBackend $ olcDatabase $ olcDisallows $ olcDitContentRules $ olcExtraAttrs $ olcInclude $ olcLdapSyntaxes $ olcLimits $ olcLogFile $ olcLogLevel $ olcModuleLoad $ olcModulePath $ olcObjectClasses $ olcObjectIdentifier $ olcOverlay $ olcPasswordCryptSaltFormat $ olcPasswordHash $ olcPidFile $ olcPlugin $ olcPluginLogFile $ olcReferral $ olcReplica $ olcReplicaArgsFile $ olcReplicaPidFile $ olcReplogFile $ olcRequires $ olcRestrict $ olcRootDSE $ olcRootPW $ olcSaslAuxprops $ olcSaslHost $ olcSaslRealm $ olcSaslSecProps $ olcSecurity $ olcServerID $ olcSizeLimit $ olcSortVals $ olcSubordinate $ olcSyncrepl $ olcTCPBuffer $ olcTimeLimit $ olcTLSCACertificateFile $ olcTLSCACertificatePath $ olcTLSCertificateFile $ olcTLSCertificateKeyFile $ olcTLSCipherSuite $ olcTLSCRLCheck $ olcTLSCRLFile $ olcTLSRandFile $ olcTLSVerifyClient $ olcTLSDHParamFile $ olcTLSProtocolMin $ olcUpdateRef $ olcDbDirectory $ olcDbCheckpoint $ olcDbCryptFile $ olcDbPageSize $ olcDbIndex $ olcDbLockDetect $ olcDbMode $ olcDbURI $ olcDbStartTLS $ olcDbACLPasswd $ olcDbACLBind $ olcDbIDAssertPasswd $ olcDbIDAssertBind $ olcDbIDAssertMode $ olcDbIDAssertAuthzFrom $ olcDbTFSupport $ olcDbTimeout $ olcDbIdleTimeout $ olcDbConnTtl $ olcDbNetworkTimeout $ olcDbCancel $ olcDbQuarantine $ olcDbIDAssertPassThru $ olcChainingBehavior $ olcAccessLogOps $ olcAccessLogPurge $ olcAccessLogOld $ olcAccessLogOldAttr $ olcAccessLogBase $ olcAuditlogFile $ olcCollectInfo $ olcConstraintAttribute $ olcDDSmaxTtl $ olcDDSminTtl $ olcDDSdefaultTtl $ olcDDSinterval $ olcDDStolerance $ olcDGAttrPair $ olcDlAttrSet $ olcMemberOfDangling $ olcMemberOfGroupOC $ olcMemberOfMemberAD $ olcMemberOfMemberOfAD $ olcMemberOfDanglingError $ olcPcache $ olcPcacheAttrset $ olcPcacheTemplate $ olcPcachePosition $ olcPcacheBind $ olcRefintAttribute $ olcRetcodeItem $ olcRwmRewrite $ olcRwmTFSupport $ olcRwmMap $ olcSpCheckpoint $ olcTranslucentLocal $ olcTranslucentRemote $ olcUniqueIgnore $ olcUniqueAttribute $ olcUniqueURI $ olcValSortAttr $ knowledgeInformation $ sn $ serialNumber $ c $ l $ st $ street $ o $ ou $ title $ businessCategory $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ destinationIndicator $ givenName $ initials $ generationQualifier $ dnQualifier $ houseIdentifier $ dmdName $ pseudonym $ textEncodedORAddress $ info $ drink $ roomNumber $ userClass $ host $ documentIdentifier $ documentTitle $ documentVersion $ documentLocation $ personalTitle $ co $ uniqueIdentifier $ organizationalStatus $ buildingName $ documentPublisher $ carLicense $ departmentNumber $ displayName $ employeeNumber $ employeeType $ preferredLanguage $ ipServiceProtocol $ ipHostNumber $ ipNetworkNumber $ nisMapName $ suseSearchFilter $ suseDefaultValue $ susePlugin $ suseMapAttribute $ suseImapServer $ suseImapAdmin ) )",
"( 2.5.13.4 NAME 'caseIgnoreSubstringsMatch' APPLIES ( serialNumber $ destinationIndicator $ dnQualifier ) )",
"( 2.5.13.3 NAME 'caseIgnoreOrderingMatch' APPLIES ( supportedSASLMechanisms $ vendorName $ vendorVersion $ ref $ name $ cn $ uid $ labeledURI $ description $ olcConfigFile $ olcConfigDir $ olcAccess $ olcAllows $ olcArgsFile $ olcAttributeOptions $ olcAttributeTypes $ olcAuthIDRewrite $ olcAuthzPolicy $ olcAuthzRegexp $ olcBackend $ olcDatabase $ olcDisallows $ olcDitContentRules $ olcExtraAttrs $ olcInclude $ olcLdapSyntaxes $ olcLimits $ olcLogFile $ olcLogLevel $ olcModuleLoad $ olcModulePath $ olcObjectClasses $ olcObjectIdentifier $ olcOverlay $ olcPasswordCryptSaltFormat $ olcPasswordHash $ olcPidFile $ olcPlugin $ olcPluginLogFile $ olcReferral $ olcReplica $ olcReplicaArgsFile $ olcReplicaPidFile $ olcReplogFile $ olcRequires $ olcRestrict $ olcRootDSE $ olcRootPW $ olcSaslAuxprops $ olcSaslHost $ olcSaslRealm $ olcSaslSecProps $ olcSecurity $ olcServerID $ olcSizeLimit $ olcSortVals $ olcSubordinate $ olcSyncrepl $ olcTCPBuffer $ olcTimeLimit $ olcTLSCACertificateFile $ olcTLSCACertificatePath $ olcTLSCertificateFile $ olcTLSCertificateKeyFile $ olcTLSCipherSuite $ olcTLSCRLCheck $ olcTLSCRLFile $ olcTLSRandFile $ olcTLSVerifyClient $ olcTLSDHParamFile $ olcTLSProtocolMin $ olcUpdateRef $ olcDbDirectory $ olcDbCheckpoint $ olcDbCryptFile $ olcDbPageSize $ olcDbIndex $ olcDbLockDetect $ olcDbMode $ olcDbURI $ olcDbStartTLS $ olcDbACLPasswd $ olcDbACLBind $ olcDbIDAssertPasswd $ olcDbIDAssertBind $ olcDbIDAssertMode $ olcDbIDAssertAuthzFrom $ olcDbTFSupport $ olcDbTimeout $ olcDbIdleTimeout $ olcDbConnTtl $ olcDbNetworkTimeout $ olcDbCancel $ olcDbQuarantine $ olcDbIDAssertPassThru $ olcChainingBehavior $ olcAccessLogOps $ olcAccessLogPurge $ olcAccessLogOld $ olcAccessLogOldAttr $ olcAccessLogBase $ olcAuditlogFile $ olcCollectInfo $ olcConstraintAttribute $ olcDDSmaxTtl $ olcDDSminTtl $ olcDDSdefaultTtl $ olcDDSinterval $ olcDDStolerance $ olcDGAttrPair $ olcDlAttrSet $ olcMemberOfDangling $ olcMemberOfGroupOC $ olcMemberOfMemberAD $ olcMemberOfMemberOfAD $ olcMemberOfDanglingError $ olcPcache $ olcPcacheAttrset $ olcPcacheTemplate $ olcPcachePosition $ olcPcacheBind $ olcRefintAttribute $ olcRetcodeItem $ olcRwmRewrite $ olcRwmTFSupport $ olcRwmMap $ olcSpCheckpoint $ olcTranslucentLocal $ olcTranslucentRemote $ olcUniqueIgnore $ olcUniqueAttribute $ olcUniqueURI $ olcValSortAttr $ knowledgeInformation $ sn $ serialNumber $ c $ l $ st $ street $ o $ ou $ title $ businessCategory $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ destinationIndicator $ givenName $ initials $ generationQualifier $ dnQualifier $ houseIdentifier $ dmdName $ pseudonym $ textEncodedORAddress $ info $ drink $ roomNumber $ userClass $ host $ documentIdentifier $ documentTitle $ documentVersion $ documentLocation $ personalTitle $ co $ uniqueIdentifier $ organizationalStatus $ buildingName $ documentPublisher $ carLicense $ departmentNumber $ displayName $ employeeNumber $ employeeType $ preferredLanguage $ ipServiceProtocol $ ipHostNumber $ ipNetworkNumber $ nisMapName $ suseSearchFilter $ suseDefaultValue $ susePlugin $ suseMapAttribute $ suseImapServer $ suseImapAdmin ) )",
"( 2.5.13.2 NAME 'caseIgnoreMatch' APPLIES ( supportedSASLMechanisms $ vendorName $ vendorVersion $ ref $ name $ cn $ uid $ labeledURI $ description $ olcConfigFile $ olcConfigDir $ olcAccess $ olcAllows $ olcArgsFile $ olcAttributeOptions $ olcAttributeTypes $ olcAuthIDRewrite $ olcAuthzPolicy $ olcAuthzRegexp $ olcBackend $ olcDatabase $ olcDisallows $ olcDitContentRules $ olcExtraAttrs $ olcInclude $ olcLdapSyntaxes $ olcLimits $ olcLogFile $ olcLogLevel $ olcModuleLoad $ olcModulePath $ olcObjectClasses $ olcObjectIdentifier $ olcOverlay $ olcPasswordCryptSaltFormat $ olcPasswordHash $ olcPidFile $ olcPlugin $ olcPluginLogFile $ olcReferral $ olcReplica $ olcReplicaArgsFile $ olcReplicaPidFile $ olcReplogFile $ olcRequires $ olcRestrict $ olcRootDSE $ olcRootPW $ olcSaslAuxprops $ olcSaslHost $ olcSaslRealm $ olcSaslSecProps $ olcSecurity $ olcServerID $ olcSizeLimit $ olcSortVals $ olcSubordinate $ olcSyncrepl $ olcTCPBuffer $ olcTimeLimit $ olcTLSCACertificateFile $ olcTLSCACertificatePath $ olcTLSCertificateFile $ olcTLSCertificateKeyFile $ olcTLSCipherSuite $ olcTLSCRLCheck $ olcTLSCRLFile $ olcTLSRandFile $ olcTLSVerifyClient $ olcTLSDHParamFile $ olcTLSProtocolMin $ olcUpdateRef $ olcDbDirectory $ olcDbCheckpoint $ olcDbCryptFile $ olcDbPageSize $ olcDbIndex $ olcDbLockDetect $ olcDbMode $ olcDbURI $ olcDbStartTLS $ olcDbACLPasswd $ olcDbACLBind $ olcDbIDAssertPasswd $ olcDbIDAssertBind $ olcDbIDAssertMode $ olcDbIDAssertAuthzFrom $ olcDbTFSupport $ olcDbTimeout $ olcDbIdleTimeout $ olcDbConnTtl $ olcDbNetworkTimeout $ olcDbCancel $ olcDbQuarantine $ olcDbIDAssertPassThru $ olcChainingBehavior $ olcAccessLogOps $ olcAccessLogPurge $ olcAccessLogOld $ olcAccessLogOldAttr $ olcAccessLogBase $ olcAuditlogFile $ olcCollectInfo $ olcConstraintAttribute $ olcDDSmaxTtl $ olcDDSminTtl $ olcDDSdefaultTtl $ olcDDSinterval $ olcDDStolerance $ olcDGAttrPair $ olcDlAttrSet $ olcMemberOfDangling $ olcMemberOfGroupOC $ olcMemberOfMemberAD $ olcMemberOfMemberOfAD $ olcMemberOfDanglingError $ olcPcache $ olcPcacheAttrset $ olcPcacheTemplate $ olcPcachePosition $ olcPcacheBind $ olcRefintAttribute $ olcRetcodeItem $ olcRwmRewrite $ olcRwmTFSupport $ olcRwmMap $ olcSpCheckpoint $ olcTranslucentLocal $ olcTranslucentRemote $ olcUniqueIgnore $ olcUniqueAttribute $ olcUniqueURI $ olcValSortAttr $ knowledgeInformation $ sn $ serialNumber $ c $ l $ st $ street $ o $ ou $ title $ businessCategory $ postalCode $ postOfficeBox $ physicalDeliveryOfficeName $ destinationIndicator $ givenName $ initials $ generationQualifier $ dnQualifier $ houseIdentifier $ dmdName $ pseudonym $ textEncodedORAddress $ info $ drink $ roomNumber $ userClass $ host $ documentIdentifier $ documentTitle $ documentVersion $ documentLocation $ personalTitle $ co $ uniqueIdentifier $ organizationalStatus $ buildingName $ documentPublisher $ carLicense $ departmentNumber $ displayName $ employeeNumber $ employeeType $ preferredLanguage $ ipServiceProtocol $ ipHostNumber $ ipNetworkNumber $ nisMapName $ suseSearchFilter $ suseDefaultValue $ susePlugin $ suseMapAttribute $ suseImapServer $ suseImapAdmin ) )",
"( 2.5.13.1 NAME 'distinguishedNameMatch' APPLIES ( creatorsName $ modifiersName $ subschemaSubentry $ entryDN $ namingContexts $ aliasedObjectName $ dynamicSubtrees $ distinguishedName $ seeAlso $ olcDefaultSearchBase $ olcRootDN $ olcSchemaDN $ olcSuffix $ olcUpdateDN $ olcDbACLAuthcDn $ olcDbIDAssertAuthcDn $ olcRelay $ olcAccessLogDB $ memberOf $ olcMemberOfDN $ pwdPolicySubentry $ olcPPolicyDefault $ olcRefintNothing $ olcRefintModifiersName $ olcRetcodeParent $ olcUniqueBase $ member $ owner $ roleOccupant $ manager $ documentAuthor $ secretary $ associatedName $ dITRedirect $ suseDefaultBase $ suseDefaultTemplate $ suseSecondaryGroup ) )",
"( 2.5.13.0 NAME 'objectIdentifierMatch' APPLIES ( supportedControl $ supportedExtension $ supportedFeatures $ supportedApplicationContext ) )"
],
"matchingRules": [
"( 1.3.6.1.1.16.3 NAME 'UUIDOrderingMatch' SYNTAX 1.3.6.1.1.16.1 )",
"( 1.3.6.1.1.16.2 NAME 'UUIDMatch' SYNTAX 1.3.6.1.1.16.1 )",
"( 1.2.840.113556.1.4.804 NAME 'integerBitOrMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 1.2.840.113556.1.4.803 NAME 'integerBitAndMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 1.3.6.1.4.1.4203.1.2.1 NAME 'caseExactIA5SubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.4.1.1466.109.114.3 NAME 'caseIgnoreIA5SubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.4.1.1466.109.114.2 NAME 'caseIgnoreIA5Match' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 1.3.6.1.4.1.1466.109.114.1 NAME 'caseExactIA5Match' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )",
"( 2.5.13.38 NAME 'certificateListExactMatch' SYNTAX 1.3.6.1.1.15.5 )",
"( 2.5.13.34 NAME 'certificateExactMatch' SYNTAX 1.3.6.1.1.15.1 )",
"( 2.5.13.30 NAME 'objectIdentifierFirstComponentMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 )",
"( 2.5.13.29 NAME 'integerFirstComponentMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 2.5.13.28 NAME 'generalizedTimeOrderingMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 )",
"( 2.5.13.27 NAME 'generalizedTimeMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 )",
"( 2.5.13.23 NAME 'uniqueMemberMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.34 )",
"( 2.5.13.21 NAME 'telephoneNumberSubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.58 )",
"( 2.5.13.20 NAME 'telephoneNumberMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50 )",
"( 2.5.13.19 NAME 'octetStringSubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 )",
"( 2.5.13.18 NAME 'octetStringOrderingMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 )",
"( 2.5.13.17 NAME 'octetStringMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 )",
"( 2.5.13.16 NAME 'bitStringMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.6 )",
"( 2.5.13.15 NAME 'integerOrderingMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 2.5.13.14 NAME 'integerMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )",
"( 2.5.13.13 NAME 'booleanMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 )",
"( 2.5.13.11 NAME 'caseIgnoreListMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41 )",
"( 2.5.13.10 NAME 'numericStringSubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.58 )",
"( 2.5.13.9 NAME 'numericStringOrderingMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36 )",
"( 2.5.13.8 NAME 'numericStringMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36 )",
"( 2.5.13.7 NAME 'caseExactSubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.58 )",
"( 2.5.13.6 NAME 'caseExactOrderingMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.5.13.5 NAME 'caseExactMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.5.13.4 NAME 'caseIgnoreSubstringsMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.58 )",
"( 2.5.13.3 NAME 'caseIgnoreOrderingMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 2.5.13.2 NAME 'caseIgnoreMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )",
"( 1.2.36.79672281.1.13.3 NAME 'rdnMatch' SYNTAX 1.2.36.79672281.1.5.0 )",
"( 2.5.13.1 NAME 'distinguishedNameMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
"( 2.5.13.0 NAME 'objectIdentifierMatch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 )"
],
"modifyTimestamp": [
"20141024204149Z"
],
"objectClass": [
"top",
"subentry",
"subschema",
"extensibleObject"
],
"objectClasses": [
"( 2.5.6.0 NAME 'top' DESC 'top of the superclass chain' ABSTRACT MUST objectClass )",
"( 1.3.6.1.4.1.1466.101.120.111 NAME 'extensibleObject' DESC 'RFC4512: extensible object' SUP top AUXILIARY )",
"( 2.5.6.1 NAME 'alias' DESC 'RFC4512: an alias' SUP top STRUCTURAL MUST aliasedObjectName )",
"( 2.16.840.1.113730.3.2.6 NAME 'referral' DESC 'namedref: named subordinate referral' SUP top STRUCTURAL MUST ref )",
"( 1.3.6.1.4.1.4203.1.4.1 NAME ( 'OpenLDAProotDSE' 'LDAProotDSE' ) DESC 'OpenLDAP Root DSE object' SUP top STRUCTURAL MAY cn )",
"( 2.5.17.0 NAME 'subentry' DESC 'RFC3672: subentry' SUP top STRUCTURAL MUST ( cn $ subtreeSpecification ) )",
"( 2.5.20.1 NAME 'subschema' DESC 'RFC4512: controlling subschema (sub)entry' AUXILIARY MAY ( dITStructureRules $ nameForms $ dITContentRules $ objectClasses $ attributeTypes $ matchingRules $ matchingRuleUse ) )",
"( 1.3.6.1.4.1.1466.101.119.2 NAME 'dynamicObject' DESC 'RFC2589: Dynamic Object' SUP top AUXILIARY )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.0 NAME 'olcConfig' DESC 'OpenLDAP configuration object' SUP top ABSTRACT )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.1 NAME 'olcGlobal' DESC 'OpenLDAP Global configuration options' SUP olcConfig STRUCTURAL MAY ( cn $ olcConfigFile $ olcConfigDir $ olcAllows $ olcArgsFile $ olcAttributeOptions $ olcAuthIDRewrite $ olcAuthzPolicy $ olcAuthzRegexp $ olcConcurrency $ olcConnMaxPending $ olcConnMaxPendingAuth $ olcDisallows $ olcGentleHUP $ olcIdleTimeout $ olcIndexSubstrIfMaxLen $ olcIndexSubstrIfMinLen $ olcIndexSubstrAnyLen $ olcIndexSubstrAnyStep $ olcIndexIntLen $ olcLocalSSF $ olcLogFile $ olcLogLevel $ olcPasswordCryptSaltFormat $ olcPasswordHash $ olcPidFile $ olcPluginLogFile $ olcReadOnly $ olcReferral $ olcReplogFile $ olcRequires $ olcRestrict $ olcReverseLookup $ olcRootDSE $ olcSaslAuxprops $ olcSaslHost $ olcSaslRealm $ olcSaslSecProps $ olcSecurity $ olcServerID $ olcSizeLimit $ olcSockbufMaxIncoming $ olcSockbufMaxIncomingAuth $ olcTCPBuffer $ olcThreads $ olcTimeLimit $ olcTLSCACertificateFile $ olcTLSCACertificatePath $ olcTLSCertificateFile $ olcTLSCertificateKeyFile $ olcTLSCipherSuite $ olcTLSCRLCheck $ olcTLSRandFile $ olcTLSVerifyClient $ olcTLSDHParamFile $ olcTLSCRLFile $ olcToolThreads $ olcWriteTimeout $ olcObjectIdentifier $ olcAttributeTypes $ olcObjectClasses $ olcDitContentRules $ olcLdapSyntaxes ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.2 NAME 'olcSchemaConfig' DESC 'OpenLDAP schema object' SUP olcConfig STRUCTURAL MAY ( cn $ olcObjectIdentifier $ olcAttributeTypes $ olcObjectClasses $ olcDitContentRules $ olcLdapSyntaxes ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.3 NAME 'olcBackendConfig' DESC 'OpenLDAP Backend-specific options' SUP olcConfig STRUCTURAL MUST olcBackend )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.4 NAME 'olcDatabaseConfig' DESC 'OpenLDAP Database-specific options' SUP olcConfig STRUCTURAL MUST olcDatabase MAY ( olcHidden $ olcSuffix $ olcSubordinate $ olcAccess $ olcAddContentAcl $ olcLastMod $ olcLimits $ olcMaxDerefDepth $ olcPlugin $ olcReadOnly $ olcReplica $ olcReplicaArgsFile $ olcReplicaPidFile $ olcReplicationInterval $ olcReplogFile $ olcRequires $ olcRestrict $ olcRootDN $ olcRootPW $ olcSchemaDN $ olcSecurity $ olcSizeLimit $ olcSyncUseSubentry $ olcSyncrepl $ olcTimeLimit $ olcUpdateDN $ olcUpdateRef $ olcMirrorMode $ olcMonitoring $ olcExtraAttrs ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.5 NAME 'olcOverlayConfig' DESC 'OpenLDAP Overlay-specific options' SUP olcConfig STRUCTURAL MUST olcOverlay )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.6 NAME 'olcIncludeFile' DESC 'OpenLDAP configuration include file' SUP olcConfig STRUCTURAL MUST olcInclude MAY ( cn $ olcRootDSE ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.7 NAME 'olcFrontendConfig' DESC 'OpenLDAP frontend configuration' AUXILIARY MAY ( olcDefaultSearchBase $ olcPasswordHash $ olcSortVals ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.0.8 NAME 'olcModuleList' DESC 'OpenLDAP dynamic module info' SUP olcConfig STRUCTURAL MAY ( cn $ olcModulePath $ olcModuleLoad ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.2.2.1 NAME 'olcLdifConfig' DESC 'LDIF backend configuration' SUP olcDatabaseConfig STRUCTURAL MUST olcDbDirectory )",
"( 1.3.6.1.4.1.4203.1.12.2.4.2.4.1 NAME 'olcMonitorConfig' DESC 'Monitor backend configuration' SUP olcDatabaseConfig STRUCTURAL )",
"( 1.3.6.1.4.1.4203.1.12.2.4.2.1.1 NAME 'olcBdbConfig' DESC 'BDB backend configuration' SUP olcDatabaseConfig STRUCTURAL MUST olcDbDirectory MAY ( olcDbCacheSize $ olcDbCheckpoint $ olcDbConfig $ olcDbCryptFile $ olcDbCryptKey $ olcDbNoSync $ olcDbDirtyRead $ olcDbIDLcacheSize $ olcDbIndex $ olcDbLinearIndex $ olcDbLockDetect $ olcDbMode $ olcDbSearchStack $ olcDbShmKey $ olcDbCacheFree $ olcDbDNcacheSize $ olcDbPageSize ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.2.1.2 NAME 'olcHdbConfig' DESC 'HDB backend configuration' SUP olcDatabaseConfig STRUCTURAL MUST olcDbDirectory MAY ( olcDbCacheSize $ olcDbCheckpoint $ olcDbConfig $ olcDbCryptFile $ olcDbCryptKey $ olcDbNoSync $ olcDbDirtyRead $ olcDbIDLcacheSize $ olcDbIndex $ olcDbLinearIndex $ olcDbLockDetect $ olcDbMode $ olcDbSearchStack $ olcDbShmKey $ olcDbCacheFree $ olcDbDNcacheSize $ olcDbPageSize ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.2.3.1 NAME 'olcLDAPConfig' DESC 'LDAP backend configuration' SUP olcDatabaseConfig STRUCTURAL MAY ( olcDbURI $ olcDbStartTLS $ olcDbACLAuthcDn $ olcDbACLPasswd $ olcDbACLBind $ olcDbIDAssertAuthcDn $ olcDbIDAssertPasswd $ olcDbIDAssertBind $ olcDbIDAssertMode $ olcDbIDAssertAuthzFrom $ olcDbIDAssertPassThru $ olcDbRebindAsUser $ olcDbChaseReferrals $ olcDbTFSupport $ olcDbProxyWhoAmI $ olcDbTimeout $ olcDbIdleTimeout $ olcDbConnTtl $ olcDbNetworkTimeout $ olcDbProtocolVersion $ olcDbSingleConn $ olcDbCancel $ olcDbQuarantine $ olcDbUseTemporaryConn $ olcDbConnectionPoolMax $ olcDbNoRefs $ olcDbNoUndefFilter ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.3.1 NAME 'olcChainConfig' DESC 'Chain configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcChainingBehavior $ olcChainCacheURI $ olcChainMaxReferralDepth $ olcChainReturnError ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.3.2 NAME 'olcChainDatabase' DESC 'Chain remote server configuration' AUXILIARY )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.3.3 NAME 'olcPBindConfig' DESC 'Proxy Bind configuration' SUP olcOverlayConfig STRUCTURAL MUST olcDbURI MAY ( olcDbStartTLS $ olcDbNetworkTimeout $ olcDbQuarantine ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.7.1 NAME 'olcDistProcConfig' DESC 'Distributed procedures <draft-sermersheim-ldap-distproc> configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcChainingBehavior $ olcChainCacheURI ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.7.2 NAME 'olcDistProcDatabase' DESC 'Distributed procedure remote server configuration' AUXILIARY )",
"( 1.3.6.1.4.1.4203.1.12.2.4.2.5.1 NAME 'olcRelayConfig' DESC 'Relay backend configuration' SUP olcDatabaseConfig STRUCTURAL MAY olcRelay )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.4.1 NAME 'olcAccessLogConfig' DESC 'Access log configuration' SUP olcOverlayConfig STRUCTURAL MUST olcAccessLogDB MAY ( olcAccessLogOps $ olcAccessLogPurge $ olcAccessLogSuccess $ olcAccessLogOld $ olcAccessLogOldAttr $ olcAccessLogBase ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.15.1 NAME 'olcAuditlogConfig' DESC 'Auditlog configuration' SUP olcOverlayConfig STRUCTURAL MAY olcAuditlogFile )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.19.1 NAME 'olcCollectConfig' DESC 'Collective Attribute configuration' SUP olcOverlayConfig STRUCTURAL MAY olcCollectInfo )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.13.1 NAME 'olcConstraintConfig' DESC 'Constraint overlay configuration' SUP olcOverlayConfig STRUCTURAL MAY olcConstraintAttribute )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.9.1 NAME 'olcDDSConfig' DESC 'RFC2589 Dynamic directory services configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcDDSstate $ olcDDSmaxTtl $ olcDDSminTtl $ olcDDSdefaultTtl $ olcDDSinterval $ olcDDStolerance $ olcDDSmaxDynamicObjects ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.17.1 NAME 'olcDGConfig' DESC 'Dynamic Group configuration' SUP olcOverlayConfig STRUCTURAL MAY olcDGAttrPair )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.8.1 NAME 'olcDynamicList' DESC 'Dynamic list configuration' SUP olcOverlayConfig STRUCTURAL MAY olcDLattrSet )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.18.1 NAME 'olcMemberOf' DESC 'Member-of configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcMemberOfDN $ olcMemberOfDangling $ olcMemberOfDanglingError $ olcMemberOfRefInt $ olcMemberOfGroupOC $ olcMemberOfMemberAD $ olcMemberOfMemberOfAD ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.12.1 NAME 'olcPPolicyConfig' DESC 'Password Policy configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcPPolicyDefault $ olcPPolicyHashCleartext $ olcPPolicyUseLockout $ olcPPolicyForwardUpdates ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.2.1 NAME 'olcPcacheConfig' DESC 'ProxyCache configuration' SUP olcOverlayConfig STRUCTURAL MUST ( olcPcache $ olcPcacheAttrset $ olcPcacheTemplate ) MAY ( olcPcachePosition $ olcPcacheMaxQueries $ olcPcachePersist $ olcPcacheValidate $ olcPcacheOffline $ olcPcacheBind ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.2.2 NAME 'olcPcacheDatabase' DESC 'Cache database configuration' AUXILIARY )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.11.1 NAME 'olcRefintConfig' DESC 'Referential integrity configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcRefintAttribute $ olcRefintNothing $ olcRefintModifiersName ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.20.1 NAME 'olcRetcodeConfig' DESC 'Retcode configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcRetcodeParent $ olcRetcodeItem $ olcRetcodeInDir $ olcRetcodeSleep ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.16.1 NAME 'olcRwmConfig' DESC 'Rewrite/remap configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcRwmRewrite $ olcRwmTFSupport $ olcRwmMap $ olcRwmNormalizeMapped ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.21.1 NAME 'olcSssVlvConfig' DESC 'SSS VLV configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcSssVlvMax $ olcSssVlvMaxKeys ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.1.1 NAME 'olcSyncProvConfig' DESC 'SyncRepl Provider configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcSpCheckpoint $ olcSpSessionlog $ olcSpNoPresent $ olcSpReloadHint ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.14.1 NAME 'olcTranslucentConfig' DESC 'Translucent configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcTranslucentStrict $ olcTranslucentNoGlue $ olcTranslucentLocal $ olcTranslucentRemote $ olcTranslucentBindLocal $ olcTranslucentPwModLocal ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.14.2 NAME 'olcTranslucentDatabase' DESC 'Translucent target database configuration' AUXILIARY )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.10.1 NAME 'olcUniqueConfig' DESC 'Attribute value uniqueness configuration' SUP olcOverlayConfig STRUCTURAL MAY ( olcUniqueBase $ olcUniqueIgnore $ olcUniqueAttribute $ olcUniqueStrict $ olcUniqueURI ) )",
"( 1.3.6.1.4.1.4203.1.12.2.4.3.5.1 NAME 'olcValSortConfig' DESC 'Value Sorting configuration' SUP olcOverlayConfig STRUCTURAL MUST olcValSortAttr )",
"( 2.5.6.2 NAME 'country' DESC 'RFC2256: a country' SUP top STRUCTURAL MUST c MAY ( searchGuide $ description ) )",
"( 2.5.6.3 NAME 'locality' DESC 'RFC2256: a locality' SUP top STRUCTURAL MAY ( street $ seeAlso $ searchGuide $ st $ l $ description ) )",
"( 2.5.6.4 NAME 'organization' DESC 'RFC2256: an organization' SUP top STRUCTURAL MUST o MAY ( userPassword $ searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ st $ l $ description ) )",
"( 2.5.6.5 NAME 'organizationalUnit' DESC 'RFC2256: an organizational unit' SUP top STRUCTURAL MUST ou MAY ( userPassword $ searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ st $ l $ description ) )",
"( 2.5.6.6 NAME 'person' DESC 'RFC2256: a person' SUP top STRUCTURAL MUST ( sn $ cn ) MAY ( userPassword $ telephoneNumber $ seeAlso $ description ) )",
"( 2.5.6.7 NAME 'organizationalPerson' DESC 'RFC2256: an organizational person' SUP person STRUCTURAL MAY ( title $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ ou $ st $ l ) )",
"( 2.5.6.8 NAME 'organizationalRole' DESC 'RFC2256: an organizational role' SUP top STRUCTURAL MUST cn MAY ( x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ seeAlso $ roleOccupant $ preferredDeliveryMethod $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ ou $ st $ l $ description ) )",
"( 2.5.6.9 NAME 'groupOfNames' DESC 'RFC2256: a group of names (DNs)' SUP top STRUCTURAL MUST ( member $ cn ) MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ description ) )",
"( 2.5.6.10 NAME 'residentialPerson' DESC 'RFC2256: an residential person' SUP person STRUCTURAL MUST l MAY ( businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ preferredDeliveryMethod $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ st $ l ) )",
"( 2.5.6.11 NAME 'applicationProcess' DESC 'RFC2256: an application process' SUP top STRUCTURAL MUST cn MAY ( seeAlso $ ou $ l $ description ) )",
"( 2.5.6.12 NAME 'applicationEntity' DESC 'RFC2256: an application entity' SUP top STRUCTURAL MUST ( presentationAddress $ cn ) MAY ( supportedApplicationContext $ seeAlso $ ou $ o $ l $ description ) )",
"( 2.5.6.13 NAME 'dSA' DESC 'RFC2256: a directory system agent (a server)' SUP applicationEntity STRUCTURAL MAY knowledgeInformation )",
"( 2.5.6.14 NAME 'device' DESC 'RFC2256: a device' SUP top STRUCTURAL MUST cn MAY ( serialNumber $ seeAlso $ owner $ ou $ o $ l $ description ) )",
"( 2.5.6.15 NAME 'strongAuthenticationUser' DESC 'RFC2256: a strong authentication user' SUP top AUXILIARY MUST userCertificate )",
"( 2.5.6.16 NAME 'certificationAuthority' DESC 'RFC2256: a certificate authority' SUP top AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY crossCertificatePair )",
"( 2.5.6.17 NAME 'groupOfUniqueNames' DESC 'RFC2256: a group of unique names (DN and Unique Identifier)' SUP top STRUCTURAL MUST ( uniqueMember $ cn ) MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ description ) )",
"( 2.5.6.18 NAME 'userSecurityInformation' DESC 'RFC2256: a user security information' SUP top AUXILIARY MAY supportedAlgorithms )",
"( 2.5.6.16.2 NAME 'certificationAuthority-V2' SUP certificationAuthority AUXILIARY MAY deltaRevocationList )",
"( 2.5.6.19 NAME 'cRLDistributionPoint' SUP top STRUCTURAL MUST cn MAY ( certificateRevocationList $ authorityRevocationList $ deltaRevocationList ) )",
"( 2.5.6.20 NAME 'dmd' SUP top STRUCTURAL MUST dmdName MAY ( userPassword $ searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ st $ l $ description ) )",
"( 2.5.6.21 NAME 'pkiUser' DESC 'RFC2587: a PKI user' SUP top AUXILIARY MAY userCertificate )",
"( 2.5.6.22 NAME 'pkiCA' DESC 'RFC2587: PKI certificate authority' SUP top AUXILIARY MAY ( authorityRevocationList $ certificateRevocationList $ cACertificate $ crossCertificatePair ) )",
"( 2.5.6.23 NAME 'deltaCRL' DESC 'RFC2587: PKI user' SUP top AUXILIARY MAY deltaRevocationList )",
"( 1.3.6.1.4.1.250.3.15 NAME 'labeledURIObject' DESC 'RFC2079: object that contains the URI attribute type' SUP top AUXILIARY MAY labeledURI )",
"( 0.9.2342.19200300.100.4.19 NAME 'simpleSecurityObject' DESC 'RFC1274: simple security object' SUP top AUXILIARY MUST userPassword )",
"( 1.3.6.1.4.1.1466.344 NAME 'dcObject' DESC 'RFC2247: domain component object' SUP top AUXILIARY MUST dc )",
"( 1.3.6.1.1.3.1 NAME 'uidObject' DESC 'RFC2377: uid object' SUP top AUXILIARY MUST uid )",
"( 0.9.2342.19200300.100.4.4 NAME ( 'pilotPerson' 'newPilotPerson' ) SUP person STRUCTURAL MAY ( userid $ textEncodedORAddress $ rfc822Mailbox $ favouriteDrink $ roomNumber $ userClass $ homeTelephoneNumber $ homePostalAddress $ secretary $ personalTitle $ preferredDeliveryMethod $ businessCategory $ janetMailbox $ otherMailbox $ mobileTelephoneNumber $ pagerTelephoneNumber $ organizationalStatus $ mailPreferenceOption $ personalSignature ) )",
"( 0.9.2342.19200300.100.4.5 NAME 'account' SUP top STRUCTURAL MUST userid MAY ( description $ seeAlso $ localityName $ organizationName $ organizationalUnitName $ host ) )",
"( 0.9.2342.19200300.100.4.6 NAME 'document' SUP top STRUCTURAL MUST documentIdentifier MAY ( commonName $ description $ seeAlso $ localityName $ organizationName $ organizationalUnitName $ documentTitle $ documentVersion $ documentAuthor $ documentLocation $ documentPublisher ) )",
"( 0.9.2342.19200300.100.4.7 NAME 'room' SUP top STRUCTURAL MUST commonName MAY ( roomNumber $ description $ seeAlso $ telephoneNumber ) )",
"( 0.9.2342.19200300.100.4.9 NAME 'documentSeries' SUP top STRUCTURAL MUST commonName MAY ( description $ seeAlso $ telephonenumber $ localityName $ organizationName $ organizationalUnitName ) )",
"( 0.9.2342.19200300.100.4.13 NAME 'domain' SUP top STRUCTURAL MUST domainComponent MAY ( associatedName $ organizationName $ description $ businessCategory $ seeAlso $ searchGuide $ userPassword $ localityName $ stateOrProvinceName $ streetAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ streetAddress $ facsimileTelephoneNumber $ internationalISDNNumber $ telephoneNumber $ teletexTerminalIdentifier $ telexNumber $ preferredDeliveryMethod $ destinationIndicator $ registeredAddress $ x121Address ) )",
"( 0.9.2342.19200300.100.4.14 NAME 'RFC822localPart' SUP domain STRUCTURAL MAY ( commonName $ surname $ description $ seeAlso $ telephoneNumber $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ streetAddress $ facsimileTelephoneNumber $ internationalISDNNumber $ telephoneNumber $ teletexTerminalIdentifier $ telexNumber $ preferredDeliveryMethod $ destinationIndicator $ registeredAddress $ x121Address ) )",
"( 0.9.2342.19200300.100.4.15 NAME 'dNSDomain' SUP domain STRUCTURAL MAY ( ARecord $ MDRecord $ MXRecord $ NSRecord $ SOARecord $ CNAMERecord ) )",
"( 0.9.2342.19200300.100.4.17 NAME 'domainRelatedObject' DESC 'RFC1274: an object related to an domain' SUP top AUXILIARY MUST associatedDomain )",
"( 0.9.2342.19200300.100.4.18 NAME 'friendlyCountry' SUP country STRUCTURAL MUST friendlyCountryName )",
"( 0.9.2342.19200300.100.4.20 NAME 'pilotOrganization' SUP ( organization $ organizationalUnit ) STRUCTURAL MAY buildingName )",
"( 0.9.2342.19200300.100.4.21 NAME 'pilotDSA' SUP dsa STRUCTURAL MAY dSAQuality )",
"( 0.9.2342.19200300.100.4.22 NAME 'qualityLabelledData' SUP top AUXILIARY MUST dsaQuality MAY ( subtreeMinimumQuality $ subtreeMaximumQuality ) )",
"( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' DESC 'RFC2798: Internet Organizational Person' SUP organizationalPerson STRUCTURAL MAY ( audio $ businessCategory $ carLicense $ departmentNumber $ displayName $ employeeNumber $ employeeType $ givenName $ homePhone $ homePostalAddress $ initials $ jpegPhoto $ labeledURI $ mail $ manager $ mobile $ o $ pager $ photo $ roomNumber $ secretary $ uid $ userCertificate $ x500uniqueIdentifier $ preferredLanguage $ userSMIMECertificate $ userPKCS12 ) )",
"( 1.3.6.1.1.1.2.0 NAME 'posixAccount' DESC 'Abstraction of an account with POSIX attributes' SUP top AUXILIARY MUST ( cn $ uid $ uidNumber $ gidNumber $ homeDirectory ) MAY ( userPassword $ loginShell $ gecos $ description ) )",
"( 1.3.6.1.1.1.2.1 NAME 'shadowAccount' DESC 'Additional attributes for shadow passwords' SUP top AUXILIARY MUST uid MAY ( userPassword $ description $ shadowLastChange $ shadowMin $ shadowMax $ shadowWarning $ shadowInactive $ shadowExpire $ shadowFlag ) )",
"( 1.3.6.1.1.1.2.2 NAME 'posixGroup' DESC 'Abstraction of a group of accounts' SUP top AUXILIARY MUST gidNumber MAY ( userPassword $ memberUid $ description ) )",
"( 1.3.6.1.1.1.2.3 NAME 'ipService' DESC 'Abstraction an Internet Protocol service. Maps an IP port and protocol (such as tcp or udp) to one or more names; the distinguished value of the cn attribute denotes the services canonical name' SUP top STRUCTURAL MUST ( cn $ ipServicePort $ ipServiceProtocol ) MAY description )",
"( 1.3.6.1.1.1.2.4 NAME 'ipProtocol' DESC 'Abstraction of an IP protocol. Maps a protocol number to one or more names. The distinguished value of the cn attribute denotes the protocols canonical name' SUP top STRUCTURAL MUST ( cn $ ipProtocolNumber ) MAY description )",
"( 1.3.6.1.1.1.2.5 NAME 'oncRpc' DESC 'Abstraction of an Open Network Computing (ONC) [RFC1057] Remote Procedure Call (RPC) binding. This class maps an ONC RPC number to a name. The distinguished value of the cn attribute denotes the RPC services canonical name' SUP top STRUCTURAL MUST ( cn $ oncRpcNumber ) MAY description )",
"( 1.3.6.1.1.1.2.6 NAME 'ipHost' DESC 'Abstraction of a host, an IP device. The distinguished value of the cn attribute denotes the hosts canonical name. Device SHOULD be used as a structural class' SUP top AUXILIARY MUST ( cn $ ipHostNumber ) MAY ( userPassword $ l $ description $ manager ) )",
"( 1.3.6.1.1.1.2.7 NAME 'ipNetwork' DESC 'Abstraction of a network. The distinguished value of the cn attribute denotes the networks canonical name' SUP top STRUCTURAL MUST ipNetworkNumber MAY ( cn $ ipNetmaskNumber $ l $ description $ manager ) )",
"( 1.3.6.1.1.1.2.8 NAME 'nisNetgroup' DESC 'Abstraction of a netgroup. May refer to other netgroups' SUP top STRUCTURAL MUST cn MAY ( nisNetgroupTriple $ memberNisNetgroup $ description ) )",
"( 1.3.6.1.1.1.2.9 NAME 'nisMap' DESC 'A generic abstraction of a NIS map' SUP top STRUCTURAL MUST nisMapName MAY description )",
"( 1.3.6.1.1.1.2.10 NAME 'nisObject' DESC 'An entry in a NIS map' SUP top STRUCTURAL MUST ( cn $ nisMapEntry $ nisMapName ) MAY description )",
"( 1.3.6.1.1.1.2.11 NAME 'ieee802Device' DESC 'A device with a MAC address; device SHOULD be used as a structural class' SUP top AUXILIARY MAY macAddress )",
"( 1.3.6.1.1.1.2.12 NAME 'bootableDevice' DESC 'A device with boot parameters; device SHOULD be used as a structural class' SUP top AUXILIARY MAY ( bootFile $ bootParameter ) )",
"( 1.3.6.1.1.1.2.14 NAME 'nisKeyObject' DESC 'An object with a public and secret key' SUP top AUXILIARY MUST ( cn $ nisPublicKey $ nisSecretKey ) MAY ( uidNumber $ description ) )",
"( 1.3.6.1.1.1.2.15 NAME 'nisDomainObject' DESC 'Associates a NIS domain with a naming context' SUP top AUXILIARY MUST nisDomain )",
"( 1.3.6.1.1.1.2.16 NAME 'automountMap' SUP top STRUCTURAL MUST automountMapName MAY description )",
"( 1.3.6.1.1.1.2.17 NAME 'automount' DESC 'Automount information' SUP top STRUCTURAL MUST ( automountKey $ automountInformation ) MAY description )",
"( 1.3.6.1.4.1.5322.13.1.1 NAME 'namedObject' SUP top STRUCTURAL MAY cn )",
"( 1.3.6.1.4.1.7057.10.1.2.1.2 NAME 'suseModuleConfiguration' DESC 'Contains configuration of Management Modules' SUP top STRUCTURAL MUST cn MAY suseDefaultBase )",
"( 1.3.6.1.4.1.7057.10.1.2.1.3 NAME 'suseUserConfiguration' DESC 'Configuration of user management tools' SUP suseModuleConfiguration STRUCTURAL MAY ( suseMinPasswordLength $ suseMaxPasswordLength $ susePasswordHash $ suseSkelDir $ suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseDefaultTemplate $ suseSearchFilter $ suseMapAttribute ) )",
"( 1.3.6.1.4.1.7057.10.1.2.1.4 NAME 'suseObjectTemplate' DESC 'Base Class for Object-Templates' SUP top STRUCTURAL MUST cn MAY ( susePlugin $ suseDefaultValue $ suseNamingAttribute ) )",
"( 1.3.6.1.4.1.7057.10.1.2.1.5 NAME 'suseUserTemplate' DESC 'User object template' SUP suseObjectTemplate STRUCTURAL MUST cn MAY suseSecondaryGroup )",
"( 1.3.6.1.4.1.7057.10.1.2.1.6 NAME 'suseGroupTemplate' DESC 'Group object template' SUP suseObjectTemplate STRUCTURAL MUST cn )",
"( 1.3.6.1.4.1.7057.10.1.2.1.7 NAME 'suseGroupConfiguration' DESC 'Configuration of user management tools' SUP suseModuleConfiguration STRUCTURAL MAY ( suseNextUniqueId $ suseMinUniqueId $ suseMaxUniqueId $ suseDefaultTemplate $ suseSearchFilter $ suseMapAttribute ) )",
"( 1.3.6.1.4.1.7057.10.1.2.1.8 NAME 'suseCaConfiguration' DESC 'Configuration of CA management tools' SUP suseModuleConfiguration STRUCTURAL )",
"( 1.3.6.1.4.1.7057.10.1.2.1.9 NAME 'suseDnsConfiguration' DESC 'Configuration of mail server management tools' SUP suseModuleConfiguration STRUCTURAL )",
"( 1.3.6.1.4.1.7057.10.1.2.1.10 NAME 'suseDhcpConfiguration' DESC 'Configuration of DHCP server management tools' SUP suseModuleConfiguration STRUCTURAL )",
"( 1.3.6.1.4.1.7057.10.1.2.1.11 NAME 'suseMailConfiguration' DESC 'Configuration of IMAP user management tools' SUP suseModuleConfiguration STRUCTURAL MUST ( suseImapServer $ suseImapAdmin $ suseImapDefaultQuota $ suseImapUseSsl ) )"
],
"structuralObjectClass": [
"subentry"
],
"subschemaSubentry": [
"cn=Subschema"
]
},
"schema_entry": "cn=Subschema",
"type": "SchemaInfo"
}
"""
slapd_2_4_dsa_info = """
{
"raw": {
"configContext": [
"cn=config"
],
"entryDN": [
""
],
"namingContexts": [
"o=services",
"o=test"
],
"objectClass": [
"top",
"OpenLDAProotDSE"
],
"structuralObjectClass": [
"OpenLDAProotDSE"
],
"subschemaSubentry": [
"cn=Subschema"
],
"supportedControl": [
"1.3.6.1.4.1.4203.1.9.1.1",
"2.16.840.1.113730.3.4.18",
"2.16.840.1.113730.3.4.2",
"1.3.6.1.4.1.4203.1.10.1",
"1.2.840.113556.1.4.319",
"1.2.826.0.1.3344810.2.3",
"1.3.6.1.1.13.2",
"1.3.6.1.1.13.1",
"1.3.6.1.1.12"
],
"supportedExtension": [
"1.3.6.1.4.1.1466.20037",
"1.3.6.1.4.1.4203.1.11.1",
"1.3.6.1.4.1.4203.1.11.3",
"1.3.6.1.1.8"
],
"supportedFeatures": [
"1.3.6.1.1.14",
"1.3.6.1.4.1.4203.1.5.1",
"1.3.6.1.4.1.4203.1.5.2",
"1.3.6.1.4.1.4203.1.5.3",
"1.3.6.1.4.1.4203.1.5.4",
"1.3.6.1.4.1.4203.1.5.5"
],
"supportedLDAPVersion": [
"3"
],
"supportedSASLMechanisms": [
"GSSAPI",
"DIGEST-MD5"
]
},
"type": "DsaInfo"
}
"""

View File

@ -0,0 +1,116 @@
"""
"""
# Created on 2016.07.10
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
try:
from queue import Queue
except ImportError: # Python 2
# noinspection PyUnresolvedReferences
from Queue import Queue
from io import StringIO
from os import linesep
from ..protocol.rfc2849 import decode_persistent_search_control
from ..strategy.asynchronous import AsyncStrategy
from ..core.exceptions import LDAPLDIFError
from ..utils.conv import prepare_for_stream
from ..protocol.rfc2849 import persistent_search_response_to_ldif, add_ldif_header
# noinspection PyProtectedMember
class AsyncStreamStrategy(AsyncStrategy):
"""
This strategy is asynchronous. It streams responses in a generator as they appear in the self._responses container
"""
def __init__(self, ldap_connection):
AsyncStrategy.__init__(self, ldap_connection)
self.can_stream = True
self.line_separator = linesep
self.all_base64 = False
self.stream = None
self.order = dict()
self._header_added = False
self.persistent_search_message_id = None
self.streaming = False
self.callback = None
self.events = Queue()
del self._requests # remove _requests dict from Async Strategy
def _start_listen(self):
AsyncStrategy._start_listen(self)
if self.streaming:
if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed):
self.set_stream(StringIO())
def _stop_listen(self):
AsyncStrategy._stop_listen(self)
if self.streaming:
self.stream.close()
def accumulate_stream(self, message_id, change):
if message_id == self.persistent_search_message_id:
with self.async_lock:
self._responses[message_id] = []
if self.streaming:
if not self._header_added and self.stream.tell() == 0:
header = add_ldif_header(['-'])[0]
self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
ldif_lines = persistent_search_response_to_ldif(change)
if self.stream and ldif_lines and not self.connection.closed:
fragment = self.line_separator.join(ldif_lines)
if not self._header_added and self.stream.tell() == 0:
self._header_added = True
header = add_ldif_header(['-'])[0]
self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator))
else: # strategy is not streaming, events are added to a queue
notification = decode_persistent_search_control(change)
if notification:
change.update(notification)
del change['controls']['2.16.840.1.113730.3.4.7']
if not self.callback:
self.events.put(change)
else:
self.callback(change)
def get_stream(self):
if self.streaming:
return self.stream
return None
def set_stream(self, value):
error = False
try:
if not value.writable():
error = True
except (ValueError, AttributeError):
error = True
if error:
raise LDAPLDIFError('stream must be writable')
self.stream = value
self.streaming = True

View File

@ -0,0 +1,221 @@
"""
"""
# Created on 2013.07.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from threading import Thread, Lock
import socket
from .. import get_config_parameter
from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult
from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE
from ..protocol.rfc4511 import LDAPMessage
from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED
from ..utils.asn1 import decoder, decode_message_fast
# noinspection PyProtectedMember
class AsyncStrategy(BaseStrategy):
"""
This strategy is asynchronous. You send the request and get the messageId of the request sent
Receiving data from socket is managed in a separated thread in a blocking mode
Requests return an int value to indicate the messageId of the requested Operation
You get the response with get_response, it has a timeout to wait for response to appear
Connection.response will contain the whole LDAP response for the messageId requested in a dict form
Connection.request will contain the result LDAP message in a dict form
Response appear in strategy._responses dictionary
"""
# noinspection PyProtectedMember
class ReceiverSocketThread(Thread):
"""
The thread that actually manage the receiver socket
"""
def __init__(self, ldap_connection):
Thread.__init__(self)
self.connection = ldap_connection
self.socket_size = get_config_parameter('SOCKET_SIZE')
def run(self):
"""
Wait for data on socket, compute the length of the message and wait for enough bytes to decode the message
Message are appended to strategy._responses
"""
unprocessed = b''
get_more_data = True
listen = True
data = b''
while listen:
if get_more_data:
try:
data = self.connection.socket.recv(self.socket_size)
except (OSError, socket.error, AttributeError):
if self.connection.receive_timeout: # a receive timeout has been detected - keep kistening on the socket
continue
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', str(e), self.connection)
raise # unexpected exception - re-raise
if len(data) > 0:
unprocessed += data
data = b''
else:
listen = False
length = BaseStrategy.compute_ldap_message_size(unprocessed)
if length == -1 or len(unprocessed) < length:
get_more_data = True
elif len(unprocessed) >= length: # add message to message list
if self.connection.usage:
self.connection._usage.update_received_message(length)
if log_enabled(NETWORK):
log(NETWORK, 'received %d bytes via <%s>', length, self.connection)
if self.connection.fast_decoder:
ldap_resp = decode_message_fast(unprocessed[:length])
dict_response = self.connection.strategy.decode_response_fast(ldap_resp)
else:
ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0]
dict_response = self.connection.strategy.decode_response(ldap_resp)
message_id = int(ldap_resp['messageID'])
if log_enabled(NETWORK):
log(NETWORK, 'received 1 ldap message via <%s>', self.connection)
if log_enabled(EXTENDED):
log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')):
if dict_response['result'] == 0: # StartTls in progress
if self.connection.server.tls:
self.connection.server.tls._start_tls(self.connection)
else:
self.connection.last_error = 'no Tls object defined in Server'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSSLConfigurationError(self.connection.last_error)
else:
self.connection.last_error = 'asynchronous StartTls failed'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPStartTLSError(self.connection.last_error)
del self.connection._awaiting_for_async_start_tls
if message_id != 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
with self.connection.strategy.async_lock:
if message_id in self.connection.strategy._responses:
self.connection.strategy._responses[message_id].append(dict_response)
else:
self.connection.strategy._responses[message_id] = [dict_response]
if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE)
if self.connection.strategy.can_stream: # for AsyncStreamStrategy, used for PersistentSearch
self.connection.strategy.accumulate_stream(message_id, dict_response)
unprocessed = unprocessed[length:]
get_more_data = False if unprocessed else True
listen = True if self.connection.listening or unprocessed else False
else: # Unsolicited Notification
if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
listen = False
else:
self.connection.last_error = 'unknown unsolicited notification from server'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPStartTLSError(self.connection.last_error)
self.connection.strategy.close()
def __init__(self, ldap_connection):
BaseStrategy.__init__(self, ldap_connection)
self.sync = False
self.no_real_dsa = False
self.pooled = False
self._responses = None
self._requests = None
self.can_stream = False
self.receiver = None
self.async_lock = Lock()
def open(self, reset_usage=True, read_server_info=True):
"""
Open connection and start listen on the socket in a different thread
"""
with self.connection.connection_lock:
self._responses = dict()
self._requests = dict()
BaseStrategy.open(self, reset_usage, read_server_info)
if read_server_info:
try:
self.connection.refresh_server_info()
except LDAPOperationResult: # catch errors from server if raise_exception = True
self.connection.server._dsa_info = None
self.connection.server._schema_info = None
def close(self):
"""
Close connection and stop socket thread
"""
with self.connection.connection_lock:
BaseStrategy.close(self)
def post_send_search(self, message_id):
"""
Clears connection.response and returns messageId
"""
self.connection.response = None
self.connection.request = None
self.connection.result = None
return message_id
def post_send_single_response(self, message_id):
"""
Clears connection.response and returns messageId.
"""
self.connection.response = None
self.connection.request = None
self.connection.result = None
return message_id
def _start_listen(self):
"""
Start thread in daemon mode
"""
if not self.connection.listening:
self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection)
self.connection.listening = True
self.receiver.daemon = True
self.receiver.start()
def _get_response(self, message_id):
"""
Performs the capture of LDAP response for this strategy
Checks lock to avoid race condition with receiver thread
"""
with self.async_lock:
responses = self._responses.pop(message_id) if message_id in self._responses and self._responses[message_id][-1] == RESPONSE_COMPLETE else None
return responses
def receiving(self):
raise NotImplementedError
def get_stream(self):
raise NotImplementedError
def set_stream(self, value):
raise NotImplementedError

View File

@ -0,0 +1,867 @@
"""
"""
# Created on 2013.07.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more dectails.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import socket
from struct import pack
from platform import system
from time import sleep
from random import choice
from datetime import datetime
from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL
from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\
LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \
LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError
from ..utils.uri import parse_uri
from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry
from ..operation.add import add_response_to_dict, add_request_to_dict
from ..operation.modify import modify_request_to_dict, modify_response_to_dict
from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\
search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\
search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast
from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \
sicily_bind_response_to_dict_fast
from ..operation.compare import compare_response_to_dict, compare_request_to_dict
from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast
from ..core.server import Server
from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict
from ..operation.delete import delete_response_to_dict, delete_request_to_dict
from ..protocol.convert import prepare_changes_for_request, build_controls_list
from ..operation.abandon import abandon_request_to_dict
from ..core.tls import Tls
from ..protocol.oid import Oids
from ..protocol.rfc2696 import RealSearchControlValue
from ..protocol.microsoft import DirSyncControlResponseValue
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message
from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence
from ..utils.conv import to_unicode
SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER'
TRANSACTION_ERROR = 'TRANSACTION_ERROR'
RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE'
# noinspection PyProtectedMember
class BaseStrategy(object):
"""
Base class for connection strategy
"""
def __init__(self, ldap_connection):
self.connection = ldap_connection
self._outstanding = None
self._referrals = []
self.sync = None # indicates a synchronous connection
self.no_real_dsa = None # indicates a connection to a fake LDAP server
self.pooled = None # Indicates a connection with a connection pool
self.can_stream = None # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen()
self.referral_cache = {}
if log_enabled(BASIC):
log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
def __str__(self):
s = [
str(self.connection) if self.connection else 'None',
'sync' if self.sync else 'async',
'no real DSA' if self.no_real_dsa else 'real DSA',
'pooled' if self.pooled else 'not pooled',
'can stream output' if self.can_stream else 'cannot stream output',
]
return ' - '.join(s)
def open(self, reset_usage=True, read_server_info=True):
"""
Open a socket to a server. Choose a server from the server pool if available
"""
if log_enabled(NETWORK):
log(NETWORK, 'opening connection for <%s>', self.connection)
if self.connection.lazy and not self.connection._executing_deferred:
self.connection._deferred_open = True
self.connection.closed = False
if log_enabled(NETWORK):
log(NETWORK, 'deferring open connection for <%s>', self.connection)
else:
if not self.connection.closed and not self.connection._executing_deferred: # try to close connection if still open
self.close()
self._outstanding = dict()
if self.connection.usage:
if reset_usage or not self.connection._usage.initial_connection_start_time:
self.connection._usage.start()
if self.connection.server_pool:
new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available
if self.connection.server != new_server:
self.connection.server = new_server
if self.connection.usage:
self.connection._usage.servers_from_pool += 1
exception_history = []
if not self.no_real_dsa: # tries to connect to a real server
for candidate_address in self.connection.server.candidate_addresses():
try:
if log_enabled(BASIC):
log(BASIC, 'try to open candidate address %s', candidate_address[:-2])
self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc)
self.connection.server.current_address = candidate_address
self.connection.server.update_availability(candidate_address, True)
break
except Exception as e:
self.connection.server.update_availability(candidate_address, False)
# exception_history.append((datetime.now(), exc_type, exc_value, candidate_address[4]))
exception_history.append((type(e)(str(e)), candidate_address[4]))
if not self.connection.server.current_address and exception_history:
# if len(exception_history) == 1: # only one exception, reraise
# if log_enabled(ERROR):
# log(ERROR, '<%s> for <%s>', exception_history[0][1](exception_history[0][2]), self.connection)
# raise exception_history[0][1](exception_history[0][2])
# else:
# if log_enabled(ERROR):
# log(ERROR, 'unable to open socket for <%s>', self.connection)
# raise LDAPSocketOpenError('unable to open socket', exception_history)
if log_enabled(ERROR):
log(ERROR, 'unable to open socket for <%s>', self.connection)
raise LDAPSocketOpenError('unable to open socket', exception_history)
elif not self.connection.server.current_address:
if log_enabled(ERROR):
log(ERROR, 'invalid server address for <%s>', self.connection)
raise LDAPSocketOpenError('invalid server address')
self.connection._deferred_open = False
self._start_listen()
# self.connection.do_auto_bind()
if log_enabled(NETWORK):
log(NETWORK, 'connection open for <%s>', self.connection)
def close(self):
"""
Close connection
"""
if log_enabled(NETWORK):
log(NETWORK, 'closing connection for <%s>', self.connection)
if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open):
self.connection.listening = False
self.connection.closed = True
if log_enabled(NETWORK):
log(NETWORK, 'deferred connection closed for <%s>', self.connection)
else:
if not self.connection.closed:
self._stop_listen()
if not self. no_real_dsa:
self._close_socket()
if log_enabled(NETWORK):
log(NETWORK, 'connection closed for <%s>', self.connection)
self.connection.bound = False
self.connection.request = None
self.connection.response = None
self.connection.tls_started = False
self._outstanding = None
self._referrals = []
if not self.connection.strategy.no_real_dsa:
self.connection.server.current_address = None
if self.connection.usage:
self.connection._usage.stop()
def _open_socket(self, address, use_ssl=False, unix_socket=False):
"""
Tries to open and connect a socket to a Server
raise LDAPExceptionError if unable to open or connect socket
"""
try:
self.connection.socket = socket.socket(*address[:3])
except Exception as e:
self.connection.last_error = 'socket creation error: ' + str(e)
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
try: # set socket timeout for opening connection
if self.connection.server.connect_timeout:
self.connection.socket.settimeout(self.connection.server.connect_timeout)
self.connection.socket.connect(address[4])
except socket.error as e:
self.connection.last_error = 'socket connection error while opening: ' + str(e)
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
# Set connection recv timeout (must be set after connect,
# because socket.settimeout() affects both, connect() as
# well as recv(). Set it before tls.wrap_socket() because
# the recv timeout should take effect during the TLS
# handshake.
if self.connection.receive_timeout is not None:
try: # set receive timeout for the connection socket
self.connection.socket.settimeout(self.connection.receive_timeout)
if system().lower() == 'windows':
self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout))
else:
self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0))
except socket.error as e:
self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e)
# if exc:
# if log_enabled(ERROR):
# log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
if use_ssl:
try:
self.connection.server.tls.wrap_socket(self.connection, do_handshake=True)
if self.connection.usage:
self.connection._usage.wrapped_sockets += 1
except Exception as e:
self.connection.last_error = 'socket ssl wrapping error: ' + str(e)
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
if self.connection.usage:
self.connection._usage.open_sockets += 1
self.connection.closed = False
def _close_socket(self):
"""
Try to close a socket
don't raise exception if unable to close socket, assume socket is already closed
"""
try:
self.connection.socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
try:
self.connection.socket.close()
except Exception:
pass
self.connection.socket = None
self.connection.closed = True
if self.connection.usage:
self.connection._usage.closed_sockets += 1
def _stop_listen(self):
self.connection.listening = False
def send(self, message_type, request, controls=None):
"""
Send an LDAP message
Returns the message_id
"""
self.connection.request = None
if self.connection.listening:
if self.connection.sasl_in_progress and message_type not in ['bindRequest']: # as per RFC4511 (4.2.1)
self.connection.last_error = 'cannot send operation requests while SASL bind is in progress'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSASLBindInProgressError(self.connection.last_error)
message_id = self.connection.server.next_message_id()
ldap_message = LDAPMessage()
ldap_message['messageID'] = MessageID(message_id)
ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
message_controls = build_controls_list(controls)
if message_controls is not None:
ldap_message['controls'] = message_controls
self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
self._outstanding[message_id] = self.connection.request
self.sending(ldap_message)
else:
self.connection.last_error = 'unable to send message, socket is not open'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketOpenError(self.connection.last_error)
return message_id
def get_response(self, message_id, timeout=None, get_request=False):
"""
Get response LDAP messages
Responses are returned by the underlying connection strategy
Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response
Result is stored in connection.result
Responses without result is stored in connection.response
A tuple (responses, result) is returned
"""
conf_sleep_interval = get_config_parameter('RESPONSE_SLEEPTIME')
if timeout is None:
timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
response = None
result = None
request = None
if self._outstanding and message_id in self._outstanding:
while timeout >= 0: # waiting for completed message to appear in responses
responses = self._get_response(message_id)
if not responses:
sleep(conf_sleep_interval)
timeout -= conf_sleep_interval
continue
if responses == SESSION_TERMINATED_BY_SERVER:
try: # try to close the session but don't raise any error if server has already closed the session
self.close()
except (socket.error, LDAPExceptionError):
pass
self.connection.last_error = 'session terminated by server'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSessionTerminatedByServerError(self.connection.last_error)
elif responses == TRANSACTION_ERROR: # Novell LDAP Transaction unsolicited notification
self.connection.last_error = 'transaction error'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPTransactionError(self.connection.last_error)
# if referral in response opens a new connection to resolve referrals if requested
if responses[-2]['result'] == RESULT_REFERRAL:
if self.connection.usage:
self.connection._usage.referrals_received += 1
if self.connection.auto_referrals:
ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals'])
if ref_response is not None:
responses = ref_response + [ref_result]
responses.append(RESPONSE_COMPLETE)
elif ref_result is not None:
responses = [ref_result, RESPONSE_COMPLETE]
self._referrals = []
if responses:
result = responses[-2]
response = responses[:-2]
self.connection.result = None
self.connection.response = None
break
if timeout <= 0:
if log_enabled(ERROR):
log(ERROR, 'socket timeout, no response from server for <%s>', self.connection)
raise LDAPResponseTimeoutError('no response from server')
if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
self._outstanding.pop(message_id)
self.connection.result = result.copy()
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
# checks if any response has a range tag
# self._auto_range_searching is set as a flag to avoid recursive searches
if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)):
self._auto_range_searching = result.copy()
temp_response = response[:] # copy
if self.do_search_on_auto_range(self._outstanding[message_id], response):
for resp in temp_response:
if resp['type'] == 'searchResEntry':
keys = [key for key in resp['raw_attributes'] if ';range=' in key]
for key in keys:
del resp['raw_attributes'][key]
del resp['attributes'][key]
response = temp_response
result = self._auto_range_searching
del self._auto_range_searching
if self.connection.empty_attributes:
for entry in response:
if entry['type'] == 'searchResEntry':
for attribute_type in self._outstanding[message_id]['attributes']:
if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
entry['raw_attributes'][attribute_type] = list()
entry['attributes'][attribute_type] = list()
if log_enabled(PROTOCOL):
log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self)
if not self.connection.auto_range:
attrs_to_remove = []
# removes original empty attribute in case a range tag is returned
for attribute_type in entry['attributes']:
if ';range' in attribute_type.lower():
orig_attr, _, _ = attribute_type.partition(';')
attrs_to_remove.append(orig_attr)
for attribute_type in attrs_to_remove:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self)
del entry['raw_attributes'][attribute_type]
del entry['attributes'][attribute_type]
request = self._outstanding.pop(message_id)
else:
if log_enabled(ERROR):
log(ERROR, 'message id not in outstanding queue for <%s>', self.connection)
raise(LDAPResponseTimeoutError('message id not in outstanding queue'))
if get_request:
return response, result, request
else:
return response, result
@staticmethod
def compute_ldap_message_size(data):
"""
Compute LDAP Message size according to BER definite length rules
Returns -1 if too few data to compute message length
"""
if isinstance(data, str): # fix for Python 2, data is string not bytes
data = bytearray(data) # Python 2 bytearray is equivalent to Python 3 bytes
ret_value = -1
if len(data) > 2:
if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
ret_value = data[1] + 2
else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
bytes_length = data[1] - 128
if len(data) >= bytes_length + 2:
value_length = 0
cont = bytes_length
for byte in data[2:2 + bytes_length]:
cont -= 1
value_length += byte * (256 ** cont)
ret_value = value_length + 2 + bytes_length
return ret_value
def decode_response(self, ldap_message):
"""
Convert received LDAPMessage to a dict
"""
message_type = ldap_message.getComponentByName('protocolOp').getName()
component = ldap_message['protocolOp'].getComponent()
controls = ldap_message['controls']
if message_type == 'bindResponse':
if not bytes(component['matchedDN']).startswith(b'NTLM'): # patch for microsoft ntlm authentication
result = bind_response_to_dict(component)
else:
result = sicily_bind_response_to_dict(component)
elif message_type == 'searchResEntry':
result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
elif message_type == 'searchResDone':
result = search_result_done_response_to_dict(component)
elif message_type == 'searchResRef':
result = search_result_reference_response_to_dict(component)
elif message_type == 'modifyResponse':
result = modify_response_to_dict(component)
elif message_type == 'addResponse':
result = add_response_to_dict(component)
elif message_type == 'delResponse':
result = delete_response_to_dict(component)
elif message_type == 'modDNResponse':
result = modify_dn_response_to_dict(component)
elif message_type == 'compareResponse':
result = compare_response_to_dict(component)
elif message_type == 'extendedResp':
result = extended_response_to_dict(component)
elif message_type == 'intermediateResponse':
result = intermediate_response_to_dict(component)
else:
if log_enabled(ERROR):
log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection)
raise LDAPUnknownResponseError('unknown response')
result['type'] = message_type
if controls:
result['controls'] = dict()
for control in controls:
decoded_control = self.decode_control(control)
result['controls'][decoded_control[0]] = decoded_control[1]
return result
def decode_response_fast(self, ldap_message):
"""
Convert received LDAPMessage from fast ber decoder to a dict
"""
if ldap_message['protocolOp'] == 1: # bindResponse
if not ldap_message['payload'][1][3].startswith(b'NTLM'): # patch for microsoft ntlm authentication
result = bind_response_to_dict_fast(ldap_message['payload'])
else:
result = sicily_bind_response_to_dict_fast(ldap_message['payload'])
result['type'] = 'bindResponse'
elif ldap_message['protocolOp'] == 4: # searchResEntry'
result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
result['type'] = 'searchResEntry'
elif ldap_message['protocolOp'] == 5: # searchResDone
result = ldap_result_to_dict_fast(ldap_message['payload'])
result['type'] = 'searchResDone'
elif ldap_message['protocolOp'] == 19: # searchResRef
result = search_result_reference_response_to_dict_fast(ldap_message['payload'])
result['type'] = 'searchResRef'
elif ldap_message['protocolOp'] == 7: # modifyResponse
result = ldap_result_to_dict_fast(ldap_message['payload'])
result['type'] = 'modifyResponse'
elif ldap_message['protocolOp'] == 9: # addResponse
result = ldap_result_to_dict_fast(ldap_message['payload'])
result['type'] = 'addResponse'
elif ldap_message['protocolOp'] == 11: # delResponse
result = ldap_result_to_dict_fast(ldap_message['payload'])
result['type'] = 'delResponse'
elif ldap_message['protocolOp'] == 13: # modDNResponse
result = ldap_result_to_dict_fast(ldap_message['payload'])
result['type'] = 'modDNResponse'
elif ldap_message['protocolOp'] == 15: # compareResponse
result = ldap_result_to_dict_fast(ldap_message['payload'])
result['type'] = 'compareResponse'
elif ldap_message['protocolOp'] == 24: # extendedResp
result = extended_response_to_dict_fast(ldap_message['payload'])
result['type'] = 'extendedResp'
elif ldap_message['protocolOp'] == 25: # intermediateResponse
result = intermediate_response_to_dict_fast(ldap_message['payload'])
result['type'] = 'intermediateResponse'
else:
if log_enabled(ERROR):
log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection)
raise LDAPUnknownResponseError('unknown response')
if ldap_message['controls']:
result['controls'] = dict()
for control in ldap_message['controls']:
decoded_control = self.decode_control_fast(control[3])
result['controls'][decoded_control[0]] = decoded_control[1]
return result
@staticmethod
def decode_control(control):
"""
decode control, return a 2-element tuple where the first element is the control oid
and the second element is a dictionary with description (from Oids), criticality and decoded control value
"""
control_type = str(control['controlType'])
criticality = bool(control['criticality'])
control_value = bytes(control['controlValue'])
unprocessed = None
if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696
control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue())
control_value = dict()
control_value['size'] = int(control_resp['size'])
control_value['cookie'] = bytes(control_resp['cookie'])
elif control_type == '1.2.840.113556.1.4.841': # DirSync AD
control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue())
control_value = dict()
control_value['more_results'] = bool(control_resp['MoreResults']) # more_result if nonzero
control_value['cookie'] = bytes(control_resp['CookieServer'])
elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527
control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry())
control_value = dict()
control_value['result'] = attributes_to_dict(control_resp['attributes'])
if unprocessed:
if log_enabled(ERROR):
log(ERROR, 'unprocessed control response in substrate')
raise LDAPControlError('unprocessed control response in substrate')
return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
@staticmethod
def decode_control_fast(control):
"""
decode control, return a 2-element tuple where the first element is the control oid
and the second element is a dictionary with description (from Oids), criticality and decoded control value
"""
control_type = str(to_unicode(control[0][3], from_server=True))
criticality = False
control_value = None
for r in control[1:]:
if r[2] == 4: # controlValue
control_value = r[3]
else:
criticality = False if r[3] == 0 else True # criticality (booleand default to False)
if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696
control_resp = decode_sequence(control_value, 0, len(control_value))
control_value = dict()
control_value['size'] = int(control_resp[0][3][0][3])
control_value['cookie'] = bytes(control_resp[0][3][1][3])
elif control_type == '1.2.840.113556.1.4.841': # DirSync AD
control_resp = decode_sequence(control_value, 0, len(control_value))
control_value = dict()
control_value['more_results'] = True if control_resp[0][3][0][3] else False # more_result if nonzero
control_value['cookie'] = control_resp[0][3][2][3]
elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527
control_resp = decode_sequence(control_value, 0, len(control_value))
control_value = dict()
control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3])
return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
@staticmethod
def decode_request(message_type, component, controls=None):
# message_type = ldap_message.getComponentByName('protocolOp').getName()
# component = ldap_message['protocolOp'].getComponent()
if message_type == 'bindRequest':
result = bind_request_to_dict(component)
elif message_type == 'unbindRequest':
result = dict()
elif message_type == 'addRequest':
result = add_request_to_dict(component)
elif message_type == 'compareRequest':
result = compare_request_to_dict(component)
elif message_type == 'delRequest':
result = delete_request_to_dict(component)
elif message_type == 'extendedReq':
result = extended_request_to_dict(component)
elif message_type == 'modifyRequest':
result = modify_request_to_dict(component)
elif message_type == 'modDNRequest':
result = modify_dn_request_to_dict(component)
elif message_type == 'searchRequest':
result = search_request_to_dict(component)
elif message_type == 'abandonRequest':
result = abandon_request_to_dict(component)
else:
if log_enabled(ERROR):
log(ERROR, 'unknown request <%s>', message_type)
raise LDAPUnknownRequestError('unknown request')
result['type'] = message_type
result['controls'] = controls
return result
def valid_referral_list(self, referrals):
referral_list = []
for referral in referrals:
candidate_referral = parse_uri(referral)
if candidate_referral:
for ref_host in self.connection.server.allowed_referral_hosts:
if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*':
if candidate_referral['host'] not in self._referrals:
candidate_referral['anonymousBindOnly'] = not ref_host[1]
referral_list.append(candidate_referral)
break
return referral_list
def do_next_range_search(self, request, response, attr_name):
done = False
current_response = response
while not done:
attr_type, _, returned_range = attr_name.partition(';range=')
_, _, high_range = returned_range.partition('-')
response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name]
response['attributes'][attr_type] += current_response['attributes'][attr_name]
if high_range != '*':
if log_enabled(PROTOCOL):
log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection)
requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*'
result = self.connection.search(search_base=response['dn'],
search_filter='(objectclass=*)',
search_scope=BASE,
dereference_aliases=request['dereferenceAlias'],
attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*'])
if isinstance(result, bool):
if result:
current_response = self.connection.response[0]
else:
done = True
else:
current_response, _ = self.get_response(result)
current_response = current_response[0]
if not done:
if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0:
del current_response['raw_attributes'][requested_range]
del current_response['attributes'][requested_range]
attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0]
continue
done = True
def do_search_on_auto_range(self, request, response):
for resp in [r for r in response if r['type'] == 'searchResEntry']:
for attr_name in list(resp['raw_attributes'].keys()): # generate list to avoid changing of dict size error
if ';range=' in attr_name:
attr_type, _, range_values = attr_name.partition(';range=')
if range_values in ('1-1', '0-0'): # DirSync returns these values for adding and removing members
return False
if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None:
resp['raw_attributes'][attr_type] = list()
if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None:
resp['attributes'][attr_type] = list()
self.do_next_range_search(request, resp, attr_name)
return True
def do_operation_on_referral(self, request, referrals):
if log_enabled(PROTOCOL):
log(PROTOCOL, 'following referral for <%s>', self.connection)
valid_referral_list = self.valid_referral_list(referrals)
if valid_referral_list:
preferred_referral_list = [referral for referral in valid_referral_list if referral['ssl'] == self.connection.server.ssl]
selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice(valid_referral_list)
cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl'])
if self.connection.use_referral_cache and cachekey in self.referral_cache:
referral_connection = self.referral_cache[cachekey]
else:
referral_server = Server(host=selected_referral['host'],
port=selected_referral['port'] or self.connection.server.port,
use_ssl=selected_referral['ssl'],
get_info=self.connection.server.get_info,
formatter=self.connection.server.custom_formatter,
connect_timeout=self.connection.server.connect_timeout,
mode=self.connection.server.mode,
allowed_referral_hosts=self.connection.server.allowed_referral_hosts,
tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file,
local_certificate_file=self.connection.server.tls.certificate_file,
validate=self.connection.server.tls.validate,
version=self.connection.server.tls.version,
ca_certs_file=self.connection.server.tls.ca_certs_file) if selected_referral['ssl'] else None)
from ..core.connection import Connection
referral_connection = Connection(server=referral_server,
user=self.connection.user if not selected_referral['anonymousBindOnly'] else None,
password=self.connection.password if not selected_referral['anonymousBindOnly'] else None,
version=self.connection.version,
authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS,
client_strategy=SYNC,
auto_referrals=True,
read_only=self.connection.read_only,
check_names=self.connection.check_names,
raise_exceptions=self.connection.raise_exceptions,
fast_decoder=self.connection.fast_decoder,
receive_timeout=self.connection.receive_timeout,
sasl_mechanism=self.connection.sasl_mechanism,
sasl_credentials=self.connection.sasl_credentials)
if self.connection.usage:
self.connection._usage.referrals_connections += 1
referral_connection.open()
referral_connection.strategy._referrals = self._referrals
if self.connection.tls_started and not referral_server.ssl: # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection
referral_connection.start_tls()
if self.connection.bound:
referral_connection.bind()
if self.connection.usage:
self.connection._usage.referrals_followed += 1
if request['type'] == 'searchRequest':
referral_connection.search(selected_referral['base'] or request['base'],
selected_referral['filter'] or request['filter'],
selected_referral['scope'] or request['scope'],
request['dereferenceAlias'],
selected_referral['attributes'] or request['attributes'],
request['sizeLimit'],
request['timeLimit'],
request['typesOnly'],
controls=request['controls'])
elif request['type'] == 'addRequest':
referral_connection.add(selected_referral['base'] or request['entry'],
None,
request['attributes'],
controls=request['controls'])
elif request['type'] == 'compareRequest':
referral_connection.compare(selected_referral['base'] or request['entry'],
request['attribute'],
request['value'],
controls=request['controls'])
elif request['type'] == 'delRequest':
referral_connection.delete(selected_referral['base'] or request['entry'],
controls=request['controls'])
elif request['type'] == 'extendedReq':
referral_connection.extended(request['name'],
request['value'],
controls=request['controls'],
no_encode=True
)
elif request['type'] == 'modifyRequest':
referral_connection.modify(selected_referral['base'] or request['entry'],
prepare_changes_for_request(request['changes']),
controls=request['controls'])
elif request['type'] == 'modDNRequest':
referral_connection.modify_dn(selected_referral['base'] or request['entry'],
request['newRdn'],
request['deleteOldRdn'],
request['newSuperior'],
controls=request['controls'])
else:
self.connection.last_error = 'referral operation not permitted'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPReferralError(self.connection.last_error)
response = referral_connection.response
result = referral_connection.result
if self.connection.use_referral_cache:
self.referral_cache[cachekey] = referral_connection
else:
referral_connection.unbind()
else:
response = None
result = None
return response, result
def sending(self, ldap_message):
if log_enabled(NETWORK):
log(NETWORK, 'sending 1 ldap message for <%s>', self.connection)
try:
encoded_message = encode(ldap_message)
self.connection.socket.sendall(encoded_message)
if log_enabled(EXTENDED):
log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>'))
if log_enabled(NETWORK):
log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection)
except socket.error as e:
self.connection.last_error = 'socket sending error' + str(e)
encoded_message = None
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error)
raise communication_exception_factory(LDAPSocketSendError, type(e)(str(e)))(self.connection.last_error)
if self.connection.usage:
self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message))
def _start_listen(self):
# overridden on strategy class
raise NotImplementedError
def _get_response(self, message_id):
# overridden in strategy class
raise NotImplementedError
def receiving(self):
# overridden in strategy class
raise NotImplementedError
def post_send_single_response(self, message_id):
# overridden in strategy class
raise NotImplementedError
def post_send_search(self, message_id):
# overridden in strategy class
raise NotImplementedError
def get_stream(self):
raise NotImplementedError
def set_stream(self, value):
raise NotImplementedError
def unbind_referral_cache(self):
while len(self.referral_cache) > 0:
cachekey, referral_connection = self.referral_cache.popitem()
referral_connection.unbind()

View File

@ -0,0 +1,148 @@
"""
"""
# Created on 2013.07.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from io import StringIO
from os import linesep
import random
from ..core.exceptions import LDAPLDIFError
from ..utils.conv import prepare_for_stream
from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT
from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
from ..protocol.convert import build_controls_list
from .base import BaseStrategy
class LdifProducerStrategy(BaseStrategy):
"""
This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations.
You send the request and get the request in the ldif-change representation of the operation.
NO OPERATION IS SENT TO THE LDAP SERVER!
Connection.request will contain the result LDAP message in a dict form
Connection.response will contain the ldif-change format of the requested operation if available
You don't need a real server to connect to for this strategy
"""
def __init__(self, ldap_connection):
BaseStrategy.__init__(self, ldap_connection)
self.sync = True
self.no_real_dsa = True
self.pooled = False
self.can_stream = True
self.line_separator = linesep
self.all_base64 = False
self.stream = None
self.order = dict()
self._header_added = False
random.seed()
def _open_socket(self, address, use_ssl=False, unix_socket=False): # fake open socket
self.connection.socket = NotImplemented # placeholder for a dummy socket
if self.connection.usage:
self.connection._usage.open_sockets += 1
self.connection.closed = False
def _close_socket(self):
if self.connection.usage:
self.connection._usage.closed_sockets += 1
self.connection.socket = None
self.connection.closed = True
def _start_listen(self):
self.connection.listening = True
self.connection.closed = False
self._header_added = False
if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed):
self.set_stream(StringIO())
def _stop_listen(self):
self.stream.close()
self.connection.listening = False
self.connection.closed = True
def receiving(self):
return None
def send(self, message_type, request, controls=None):
"""
Build the LDAPMessage without sending to server
"""
message_id = random.randint(0, LDAP_MAX_INT)
ldap_message = LDAPMessage()
ldap_message['messageID'] = MessageID(message_id)
ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
message_controls = build_controls_list(controls)
if message_controls is not None:
ldap_message['controls'] = message_controls
self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
self.connection.request['controls'] = controls
self._outstanding[message_id] = self.connection.request
return message_id
def post_send_single_response(self, message_id):
self.connection.response = None
self.connection.result = None
if self._outstanding and message_id in self._outstanding:
request = self._outstanding.pop(message_id)
ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type']))
if self.stream and ldif_lines and not self.connection.closed:
self.accumulate_stream(self.line_separator.join(ldif_lines))
ldif_lines = add_ldif_header(ldif_lines)
self.connection.response = self.line_separator.join(ldif_lines)
return self.connection.response
return None
def post_send_search(self, message_id):
raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations')
def _get_response(self, message_id):
pass
def accumulate_stream(self, fragment):
if not self._header_added and self.stream.tell() == 0:
self._header_added = True
header = add_ldif_header(['-'])[0]
self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator))
def get_stream(self):
return self.stream
def set_stream(self, value):
error = False
try:
if not value.writable():
error = True
except (ValueError, AttributeError):
error = True
if error:
raise LDAPLDIFError('stream must be writable')
self.stream = value

View File

@ -0,0 +1,200 @@
"""
"""
# Created on 2016.04.30
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .. import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
from .mockBase import MockBaseStrategy
from .asynchronous import AsyncStrategy
from ..operation.search import search_result_done_response_to_dict, search_result_entry_response_to_dict
from ..core.results import DO_NOT_RAISE_EXCEPTIONS
from ..utils.log import log, log_enabled, ERROR, PROTOCOL
from ..core.exceptions import LDAPResponseTimeoutError, LDAPOperationResult
from ..operation.bind import bind_response_to_dict
from ..operation.delete import delete_response_to_dict
from ..operation.add import add_response_to_dict
from ..operation.compare import compare_response_to_dict
from ..operation.modifyDn import modify_dn_response_to_dict
from ..operation.modify import modify_response_to_dict
from ..operation.search import search_result_done_response_to_dict, search_result_entry_response_to_dict
from ..operation.extended import extended_response_to_dict
# LDAPResult ::= SEQUENCE {
# resultCode ENUMERATED {
# success (0),
# operationsError (1),
# protocolError (2),
# timeLimitExceeded (3),
# sizeLimitExceeded (4),
# compareFalse (5),
# compareTrue (6),
# authMethodNotSupported (7),
# strongerAuthRequired (8),
# -- 9 reserved --
# referral (10),
# adminLimitExceeded (11),
# unavailableCriticalExtension (12),
# confidentialityRequired (13),
# saslBindInProgress (14),
# noSuchAttribute (16),
# undefinedAttributeType (17),
# inappropriateMatching (18),
# constraintViolation (19),
# attributeOrValueExists (20),
# invalidAttributeSyntax (21),
# -- 22-31 unused --
# noSuchObject (32),
# aliasProblem (33),
# invalidDNSyntax (34),
# -- 35 reserved for undefined isLeaf --
# aliasDereferencingProblem (36),
# -- 37-47 unused --
# inappropriateAuthentication (48),
# invalidCredentials (49),
# insufficientAccessRights (50),
# busy (51),
# unavailable (52),
# unwillingToPerform (53),
# loopDetect (54),
# -- 55-63 unused --
# namingViolation (64),
# objectClassViolation (65),
# notAllowedOnNonLeaf (66),
# notAllowedOnRDN (67),
# entryAlreadyExists (68),
# objectClassModsProhibited (69),
# -- 70 reserved for CLDAP --
# affectsMultipleDSAs (71),
# -- 72-79 unused --
# other (80),
# ... },
# matchedDN LDAPDN,
# diagnosticMessage LDAPString,
# referral [3] Referral OPTIONAL }
class MockAsyncStrategy(MockBaseStrategy, AsyncStrategy): # class inheritance sequence is important, MockBaseStrategy must be the first one
"""
This strategy create a mock LDAP server, with asynchronous access
It can be useful to test LDAP without accessing a real Server
"""
def __init__(self, ldap_connection):
AsyncStrategy.__init__(self, ldap_connection)
MockBaseStrategy.__init__(self)
#outstanding = dict() # a dictionary with the message id as key and a tuple (result, response) as value
def post_send_search(self, payload):
message_id, message_type, request, controls = payload
async_response = []
async_result = dict()
if message_type == 'searchRequest':
responses, result = self.mock_search(request, controls)
result['type'] = 'searchResDone'
for entry in responses:
response = search_result_entry_response_to_dict(entry, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
response['type'] = 'searchResEntry'
if self.connection.empty_attributes:
for attribute_type in request['attributes']:
attribute_name = str(attribute_type)
if attribute_name not in response['raw_attributes'] and attribute_name not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
response['raw_attributes'][attribute_name] = list()
response['attributes'][attribute_name] = list()
if log_enabled(PROTOCOL):
log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>',
attribute_type, self)
if not self.connection.auto_range:
attrs_to_remove = []
# removes original empty attribute in case a range tag is returned
for attribute_type in response['attributes']:
attribute_name = str(attribute_type)
if ';range' in attribute_name.lower():
orig_attr, _, _ = attribute_name.partition(';')
attrs_to_remove.append(orig_attr)
for attribute_type in attrs_to_remove:
if log_enabled(PROTOCOL):
log(PROTOCOL,
'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>',
attribute_type, self)
del response['raw_attributes'][attribute_type]
del response['attributes'][attribute_type]
async_response.append(response)
async_result = search_result_done_response_to_dict(result)
async_result['type'] = 'searchResDone'
self._responses[message_id] = (request, async_result, async_response)
return message_id
def post_send_single_response(self, payload): # payload is a tuple sent by self.send() made of message_type, request, controls
message_id, message_type, request, controls = payload
responses = []
result = None
if message_type == 'bindRequest':
result = bind_response_to_dict(self.mock_bind(request, controls))
result['type'] = 'bindResponse'
elif message_type == 'unbindRequest':
self.bound = None
elif message_type == 'abandonRequest':
pass
elif message_type == 'delRequest':
result = delete_response_to_dict(self.mock_delete(request, controls))
result['type'] = 'delResponse'
elif message_type == 'addRequest':
result = add_response_to_dict(self.mock_add(request, controls))
result['type'] = 'addResponse'
elif message_type == 'compareRequest':
result = compare_response_to_dict(self.mock_compare(request, controls))
result['type'] = 'compareResponse'
elif message_type == 'modDNRequest':
result = modify_dn_response_to_dict(self.mock_modify_dn(request, controls))
result['type'] = 'modDNResponse'
elif message_type == 'modifyRequest':
result = modify_response_to_dict(self.mock_modify(request, controls))
result['type'] = 'modifyResponse'
elif message_type == 'extendedReq':
result = extended_response_to_dict(self.mock_extended(request, controls))
result['type'] = 'extendedResp'
responses.append(result)
if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
self._responses[message_id] = (request, result, responses)
return message_id
def get_response(self, message_id, timeout=None, get_request=False):
if message_id in self._responses:
request, result, response = self._responses.pop(message_id)
else:
raise(LDAPResponseTimeoutError('message id not in outstanding queue'))
if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
if get_request:
return response, result, request
else:
return response, result

View File

@ -0,0 +1,901 @@
"""
"""
# Created on 2016.04.30
#
# Author: Giovanni Cannata
#
# Copyright 2016 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import json
import re
from threading import Lock
from random import SystemRandom
from pyasn1.type.univ import OctetString
from .. import SEQUENCE_TYPES, ALL_ATTRIBUTES
from ..operation.bind import bind_request_to_dict
from ..operation.delete import delete_request_to_dict
from ..operation.add import add_request_to_dict
from ..operation.compare import compare_request_to_dict
from ..operation.modifyDn import modify_dn_request_to_dict
from ..operation.modify import modify_request_to_dict
from ..operation.extended import extended_request_to_dict
from ..operation.search import search_request_to_dict, parse_filter, ROOT, AND, OR, NOT, MATCH_APPROX, \
MATCH_GREATER_OR_EQUAL, MATCH_LESS_OR_EQUAL, MATCH_EXTENSIBLE, MATCH_PRESENT,\
MATCH_SUBSTRING, MATCH_EQUAL
from ..utils.conv import json_hook, to_unicode, to_raw
from ..core.exceptions import LDAPDefinitionError, LDAPPasswordIsMandatoryError, LDAPInvalidValueError, LDAPSocketOpenError
from ..core.results import RESULT_SUCCESS, RESULT_OPERATIONS_ERROR, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \
RESULT_INVALID_CREDENTIALS, RESULT_NO_SUCH_OBJECT, RESULT_ENTRY_ALREADY_EXISTS, RESULT_COMPARE_TRUE, \
RESULT_COMPARE_FALSE, RESULT_NO_SUCH_ATTRIBUTE, RESULT_UNWILLING_TO_PERFORM
from ..utils.ciDict import CaseInsensitiveDict
from ..utils.dn import to_dn, safe_dn, safe_rdn
from ..protocol.sasl.sasl import validate_simple_password
from ..protocol.formatters.standard import find_attribute_validator, format_attribute_values
from ..protocol.rfc2696 import paged_search_control
from ..utils.log import log, log_enabled, ERROR, BASIC
from ..utils.asn1 import encode
from ..utils.conv import ldap_escape_to_bytes
from ..strategy.base import BaseStrategy # needed for decode_control() method
from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID
from ..protocol.convert import build_controls_list
# LDAPResult ::= SEQUENCE {
# resultCode ENUMERATED {
# success (0),
# operationsError (1),
# protocolError (2),
# timeLimitExceeded (3),
# sizeLimitExceeded (4),
# compareFalse (5),
# compareTrue (6),
# authMethodNotSupported (7),
# strongerAuthRequired (8),
# -- 9 reserved --
# referral (10),
# adminLimitExceeded (11),
# unavailableCriticalExtension (12),
# confidentialityRequired (13),
# saslBindInProgress (14),
# noSuchAttribute (16),
# undefinedAttributeType (17),
# inappropriateMatching (18),
# constraintViolation (19),
# attributeOrValueExists (20),
# invalidAttributeSyntax (21),
# -- 22-31 unused --
# noSuchObject (32),
# aliasProblem (33),
# invalidDNSyntax (34),
# -- 35 reserved for undefined isLeaf --
# aliasDereferencingProblem (36),
# -- 37-47 unused --
# inappropriateAuthentication (48),
# invalidCredentials (49),
# insufficientAccessRights (50),
# busy (51),
# unavailable (52),
# unwillingToPerform (53),
# loopDetect (54),
# -- 55-63 unused --
# namingViolation (64),
# objectClassViolation (65),
# notAllowedOnNonLeaf (66),
# notAllowedOnRDN (67),
# entryAlreadyExists (68),
# objectClassModsProhibited (69),
# -- 70 reserved for CLDAP --
# affectsMultipleDSAs (71),
# -- 72-79 unused --
# other (80),
# ... },
# matchedDN LDAPDN,
# diagnosticMessage LDAPString,
# referral [3] Referral OPTIONAL }
# noinspection PyProtectedMember,PyUnresolvedReferences
SEARCH_CONTROLS = ['1.2.840.113556.1.4.319' # simple paged search [RFC 2696]
]
SERVER_ENCODING = 'utf-8'
def random_cookie():
return to_raw(SystemRandom().random())[-6:]
class PagedSearchSet(object):
def __init__(self, response, size, criticality):
self.size = size
self.response = response
self.cookie = None
self.sent = 0
self.done = False
def next(self, size=None):
if size:
self.size=size
message = ''
response = self.response[self.sent: self.sent + self.size]
self.sent += self.size
if self.sent > len(self.response):
self.done = True
self.cookie = ''
else:
self.cookie = random_cookie()
response_control = paged_search_control(False, len(self.response), self.cookie)
result = {'resultCode': RESULT_SUCCESS,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None,
'controls': [BaseStrategy.decode_control(response_control)]
}
return response, result
class MockBaseStrategy(object):
"""
Base class for connection strategy
"""
def __init__(self):
if not hasattr(self.connection.server, 'dit'): # create entries dict if not already present
self.connection.server.dit = CaseInsensitiveDict()
self.entries = self.connection.server.dit # for simpler reference
self.no_real_dsa = True
self.bound = None
self.custom_validators = None
self.operational_attributes = ['entryDN']
self.add_entry('cn=schema', [], validate=False) # add default entry for schema
self._paged_sets = [] # list of paged search in progress
if log_enabled(BASIC):
log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
def _start_listen(self):
self.connection.listening = True
self.connection.closed = False
if self.connection.usage:
self.connection._usage.open_sockets += 1
def _stop_listen(self):
self.connection.listening = False
self.connection.closed = True
if self.connection.usage:
self.connection._usage.closed_sockets += 1
def _prepare_value(self, attribute_type, value, validate=True):
"""
Prepare a value for being stored in the mock DIT
:param value: object to store
:return: raw value to store in the DIT
"""
if validate: # if loading from json dump do not validate values:
validator = find_attribute_validator(self.connection.server.schema, attribute_type, self.custom_validators)
validated = validator(value)
if validated is False:
raise LDAPInvalidValueError('value non valid for attribute \'%s\'' % attribute_type)
elif validated is not True: # a valid LDAP value equivalent to the actual value
value = validated
raw_value = to_raw(value)
if not isinstance(raw_value, bytes):
raise LDAPInvalidValueError('The value "%s" of type %s for "%s" must be bytes or an offline schema needs to be provided when Mock strategy is used.' % (
value,
type(value),
attribute_type,
))
return raw_value
def _update_attribute(self, dn, attribute_type, value):
pass
def add_entry(self, dn, attributes, validate=True):
with self.connection.server.dit_lock:
escaped_dn = safe_dn(dn)
if escaped_dn not in self.connection.server.dit:
new_entry = CaseInsensitiveDict()
for attribute in attributes:
if attribute in self.operational_attributes: # no restore of operational attributes, should be computed at runtime
continue
if not isinstance(attributes[attribute], SEQUENCE_TYPES): # entry attributes are always lists of bytes values
attributes[attribute] = [attributes[attribute]]
if self.connection.server.schema and self.connection.server.schema.attribute_types[attribute].single_value and len(attributes[attribute]) > 1: # multiple values in single-valued attribute
return False
if attribute.lower() == 'objectclass' and self.connection.server.schema: # builds the objectClass hierarchy only if schema is present
class_set = set()
for object_class in attributes['objectClass']:
if self.connection.server.schema.object_classes and object_class not in self.connection.server.schema.object_classes:
return False
# walkups the class hierarchy and buils a set of all classes in it
class_set.add(object_class)
class_set_size = 0
while class_set_size != len(class_set):
new_classes = set()
class_set_size = len(class_set)
for class_name in class_set:
if self.connection.server.schema.object_classes[class_name].superior:
new_classes.update(self.connection.server.schema.object_classes[class_name].superior)
class_set.update(new_classes)
new_entry['objectClass'] = [to_raw(value) for value in class_set]
else:
new_entry[attribute] = [self._prepare_value(attribute, value, validate) for value in attributes[attribute]]
for rdn in safe_rdn(escaped_dn, decompose=True): # adds rdns to entry attributes
if rdn[0] not in new_entry: # if rdn attribute is missing adds attribute and its value
new_entry[rdn[0]] = [to_raw(rdn[1])]
else:
raw_rdn = to_raw(rdn[1])
if raw_rdn not in new_entry[rdn[0]]: # add rdn value if rdn attribute is present but value is missing
new_entry[rdn[0]].append(raw_rdn)
new_entry['entryDN'] = [to_raw(escaped_dn)]
self.connection.server.dit[escaped_dn] = new_entry
return True
return False
def remove_entry(self, dn):
with self.connection.server.dit_lock:
escaped_dn = safe_dn(dn)
if escaped_dn in self.connection.server.dit:
del self.connection.server.dit[escaped_dn]
return True
return False
def entries_from_json(self, json_entry_file):
target = open(json_entry_file, 'r')
definition = json.load(target, object_hook=json_hook)
if 'entries' not in definition:
self.connection.last_error = 'invalid JSON definition, missing "entries" section'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPDefinitionError(self.connection.last_error)
if not self.connection.server.dit:
self.connection.server.dit = CaseInsensitiveDict()
for entry in definition['entries']:
if 'raw' not in entry:
self.connection.last_error = 'invalid JSON definition, missing "raw" section'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPDefinitionError(self.connection.last_error)
if 'dn' not in entry:
self.connection.last_error = 'invalid JSON definition, missing "dn" section'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPDefinitionError(self.connection.last_error)
self.add_entry(entry['dn'], entry['raw'], validate=False)
target.close()
def mock_bind(self, request_message, controls):
# BindRequest ::= [APPLICATION 0] SEQUENCE {
# version INTEGER (1 .. 127),
# name LDAPDN,
# authentication AuthenticationChoice }
#
# BindResponse ::= [APPLICATION 1] SEQUENCE {
# COMPONENTS OF LDAPResult,
# serverSaslCreds [7] OCTET STRING OPTIONAL }
#
# request: version, name, authentication
# response: LDAPResult + serverSaslCreds
request = bind_request_to_dict(request_message)
identity = request['name']
if 'simple' in request['authentication']:
try:
password = validate_simple_password(request['authentication']['simple'])
except LDAPPasswordIsMandatoryError:
password = ''
identity = '<anonymous>'
else:
self.connection.last_error = 'only Simple Bind allowed in Mock strategy'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPDefinitionError(self.connection.last_error)
# checks userPassword for password. userPassword must be a text string or a list of text strings
if identity in self.connection.server.dit:
if 'userPassword' in self.connection.server.dit[identity]:
# if self.connection.server.dit[identity]['userPassword'] == password or password in self.connection.server.dit[identity]['userPassword']:
if self.equal(identity, 'userPassword', password):
result_code = RESULT_SUCCESS
message = ''
self.bound = identity
else:
result_code = RESULT_INVALID_CREDENTIALS
message = 'invalid credentials'
else: # no user found, returns invalidCredentials
result_code = RESULT_INVALID_CREDENTIALS
message = 'missing userPassword attribute'
elif identity == '<anonymous>':
result_code = RESULT_SUCCESS
message = ''
self.bound = identity
else:
result_code = RESULT_INVALID_CREDENTIALS
message = 'missing object'
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None,
'serverSaslCreds': None
}
def mock_delete(self, request_message, controls):
# DelRequest ::= [APPLICATION 10] LDAPDN
#
# DelResponse ::= [APPLICATION 11] LDAPResult
#
# request: entry
# response: LDAPResult
request = delete_request_to_dict(request_message)
dn = safe_dn(request['entry'])
if dn in self.connection.server.dit:
del self.connection.server.dit[dn]
result_code = RESULT_SUCCESS
message = ''
else:
result_code = RESULT_NO_SUCH_OBJECT
message = 'object not found'
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
def mock_add(self, request_message, controls):
# AddRequest ::= [APPLICATION 8] SEQUENCE {
# entry LDAPDN,
# attributes AttributeList }
#
# AddResponse ::= [APPLICATION 9] LDAPResult
#
# request: entry, attributes
# response: LDAPResult
request = add_request_to_dict(request_message)
dn = safe_dn(request['entry'])
attributes = request['attributes']
# converts attributes values to bytes
if dn not in self.connection.server.dit:
if self.add_entry(dn, attributes):
result_code = RESULT_SUCCESS
message = ''
else:
result_code = RESULT_OPERATIONS_ERROR
message = 'error adding entry'
else:
result_code = RESULT_ENTRY_ALREADY_EXISTS
message = 'entry already exist'
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
def mock_compare(self, request_message, controls):
# CompareRequest ::= [APPLICATION 14] SEQUENCE {
# entry LDAPDN,
# ava AttributeValueAssertion }
#
# CompareResponse ::= [APPLICATION 15] LDAPResult
#
# request: entry, attribute, value
# response: LDAPResult
request = compare_request_to_dict(request_message)
dn = safe_dn(request['entry'])
attribute = request['attribute']
value = to_raw(request['value'])
if dn in self.connection.server.dit:
if attribute in self.connection.server.dit[dn]:
if self.equal(dn, attribute, value):
result_code = RESULT_COMPARE_TRUE
message = ''
else:
result_code = RESULT_COMPARE_FALSE
message = ''
else:
result_code = RESULT_NO_SUCH_ATTRIBUTE
message = 'attribute not found'
else:
result_code = RESULT_NO_SUCH_OBJECT
message = 'object not found'
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
def mock_modify_dn(self, request_message, controls):
# ModifyDNRequest ::= [APPLICATION 12] SEQUENCE {
# entry LDAPDN,
# newrdn RelativeLDAPDN,
# deleteoldrdn BOOLEAN,
# newSuperior [0] LDAPDN OPTIONAL }
#
# ModifyDNResponse ::= [APPLICATION 13] LDAPResult
#
# request: entry, newRdn, deleteOldRdn, newSuperior
# response: LDAPResult
request = modify_dn_request_to_dict(request_message)
dn = safe_dn(request['entry'])
new_rdn = request['newRdn']
delete_old_rdn = request['deleteOldRdn']
new_superior = safe_dn(request['newSuperior']) if request['newSuperior'] else ''
dn_components = to_dn(dn)
if dn in self.connection.server.dit:
if new_superior and new_rdn: # performs move in the DIT
new_dn = safe_dn(dn_components[0] + ',' + new_superior)
self.connection.server.dit[new_dn] = self.connection.server.dit[dn].copy()
moved_entry = self.connection.server.dit[new_dn]
if delete_old_rdn:
del self.connection.server.dit[dn]
result_code = RESULT_SUCCESS
message = 'entry moved'
moved_entry['entryDN'] = [to_raw(new_dn)]
elif new_rdn and not new_superior: # performs rename
new_dn = safe_dn(new_rdn + ',' + safe_dn(dn_components[1:]))
self.connection.server.dit[new_dn] = self.connection.server.dit[dn].copy()
renamed_entry = self.connection.server.dit[new_dn]
del self.connection.server.dit[dn]
renamed_entry['entryDN'] = [to_raw(new_dn)]
for rdn in safe_rdn(new_dn, decompose=True): # adds rdns to entry attributes
renamed_entry[rdn[0]] = [to_raw(rdn[1])]
result_code = RESULT_SUCCESS
message = 'entry rdn renamed'
else:
result_code = RESULT_UNWILLING_TO_PERFORM
message = 'newRdn or newSuperior missing'
else:
result_code = RESULT_NO_SUCH_OBJECT
message = 'object not found'
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
def mock_modify(self, request_message, controls):
# ModifyRequest ::= [APPLICATION 6] SEQUENCE {
# object LDAPDN,
# changes SEQUENCE OF change SEQUENCE {
# operation ENUMERATED {
# add (0),
# delete (1),
# replace (2),
# ... },
# modification PartialAttribute } }
#
# ModifyResponse ::= [APPLICATION 7] LDAPResult
#
# request: entry, changes
# response: LDAPResult
#
# changes is a dictionary in the form {'attribute': [(operation, [val1, ...]), ...], ...}
# operation is 0 (add), 1 (delete), 2 (replace), 3 (increment)
request = modify_request_to_dict(request_message)
dn = safe_dn(request['entry'])
changes = request['changes']
result_code = 0
message = ''
rdns = [rdn[0] for rdn in safe_rdn(dn, decompose=True)]
if dn in self.connection.server.dit:
entry = self.connection.server.dit[dn]
original_entry = entry.copy() # to preserve atomicity of operation
for modification in changes:
operation = modification['operation']
attribute = modification['attribute']['type']
elements = modification['attribute']['value']
if operation == 0: # add
if attribute not in entry and elements: # attribute not present, creates the new attribute and add elements
if self.connection.server.schema and self.connection.server.schema.attribute_types and self.connection.server.schema.attribute_types[attribute].single_value and len(elements) > 1: # multiple values in single-valued attribute
result_code = 19
message = 'attribute is single-valued'
else:
entry[attribute] = [to_raw(element) for element in elements]
else: # attribute present, adds elements to current values
if self.connection.server.schema and self.connection.server.schema.attribute_types and self.connection.server.schema.attribute_types[attribute].single_value: # multiple values in single-valued attribute
result_code = 19
message = 'attribute is single-valued'
else:
entry[attribute].extend([to_raw(element) for element in elements])
elif operation == 1: # delete
if attribute not in entry: # attribute must exist
result_code = RESULT_NO_SUCH_ATTRIBUTE
message = 'attribute must exists for deleting its values'
elif attribute in rdns: # attribute can't be used in dn
result_code = 67
message = 'cannot delete an rdn'
else:
if not elements: # deletes whole attribute if element list is empty
del entry[attribute]
else:
for element in elements:
raw_element = to_raw(element)
if self.equal(dn, attribute, raw_element): # removes single element
entry[attribute].remove(raw_element)
else:
result_code = 1
message = 'value to delete not found'
if not entry[attribute]: # removes the whole attribute if no elements remained
del entry[attribute]
elif operation == 2: # replace
if attribute not in entry and elements: # attribute not present, creates the new attribute and add elements
if self.connection.server.schema and self.connection.server.schema.attribute_types and self.connection.server.schema.attribute_types[attribute].single_value and len(elements) > 1: # multiple values in single-valued attribute
result_code = 19
message = 'attribute is single-valued'
else:
entry[attribute] = [to_raw(element) for element in elements]
elif not elements and attribute in rdns: # attribute can't be used in dn
result_code = 67
message = 'cannot replace an rdn'
elif not elements: # deletes whole attribute if element list is empty
if attribute in entry:
del entry[attribute]
else: # substitutes elements
entry[attribute] = [to_raw(element) for element in elements]
if result_code: # an error has happened, restores the original dn
self.connection.server.dit[dn] = original_entry
else:
result_code = RESULT_NO_SUCH_OBJECT
message = 'object not found'
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
def mock_search(self, request_message, controls):
# SearchRequest ::= [APPLICATION 3] SEQUENCE {
# baseObject LDAPDN,
# scope ENUMERATED {
# baseObject (0),
# singleLevel (1),
# wholeSubtree (2),
# ... },
# derefAliases ENUMERATED {
# neverDerefAliases (0),
# derefInSearching (1),
# derefFindingBaseObj (2),
# derefAlways (3) },
# sizeLimit INTEGER (0 .. maxInt),
# timeLimit INTEGER (0 .. maxInt),
# typesOnly BOOLEAN,
# filter Filter,
# attributes AttributeSelection }
#
# SearchResultEntry ::= [APPLICATION 4] SEQUENCE {
# objectName LDAPDN,
# attributes PartialAttributeList }
#
#
# SearchResultReference ::= [APPLICATION 19] SEQUENCE
# SIZE (1..MAX) OF uri URI
#
# SearchResultDone ::= [APPLICATION 5] LDAPResult
#
# request: base, scope, dereferenceAlias, sizeLimit, timeLimit, typesOnly, filter, attributes
# response_entry: object, attributes
# response_done: LDAPResult
request = search_request_to_dict(request_message)
if controls:
decoded_controls = [self.decode_control(control) for control in controls if control]
for decoded_control in decoded_controls:
if decoded_control[1]['criticality'] and decoded_control[0] not in SEARCH_CONTROLS:
message = 'Critical requested control ' + str(decoded_control[0]) + ' not available'
result = {'resultCode': RESULT_UNAVAILABLE_CRITICAL_EXTENSION,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
return [], result
elif decoded_control[0] == '1.2.840.113556.1.4.319': # Simple paged search
if not decoded_control[1]['value']['cookie']: # new paged search
response, result = self._execute_search(request)
if result['resultCode'] == RESULT_SUCCESS: # success
paged_set = PagedSearchSet(response, int(decoded_control[1]['value']['size']), decoded_control[1]['criticality'])
response, result = paged_set.next()
if paged_set.done: # paged search already completed, no need to store the set
del paged_set
else:
self._paged_sets.append(paged_set)
return response, result
else:
return [], result
else:
for paged_set in self._paged_sets:
if paged_set.cookie == decoded_control[1]['value']['cookie']: # existing paged set
response, result = paged_set.next() # returns next bunch of entries as per paged set specifications
if paged_set.done:
self._paged_sets.remove(paged_set)
return response, result
# paged set not found
message = 'Invalid cookie in simple paged search'
result = {'resultCode': RESULT_OPERATIONS_ERROR,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
return [], result
else:
return self._execute_search(request)
def _execute_search(self, request):
responses = []
base = safe_dn(request['base'])
scope = request['scope']
attributes = request['attributes']
if '+' in attributes: # operational attributes requested
attributes.extend(self.operational_attributes)
attributes.remove('+')
attributes = [attr.lower() for attr in request['attributes']]
filter_root = parse_filter(request['filter'], self.connection.server.schema, auto_escape=True, auto_encode=False, validator=self.connection.server.custom_validator, check_names=self.connection.check_names)
candidates = []
if scope == 0: # base object
if base in self.connection.server.dit or base.lower() == 'cn=schema':
candidates.append(base)
elif scope == 1: # single level
for entry in self.connection.server.dit:
if entry.lower().endswith(base.lower()) and ',' not in entry[:-len(base) - 1]: # only leafs without commas in the remaining dn
candidates.append(entry)
elif scope == 2: # whole subtree
for entry in self.connection.server.dit:
if entry.lower().endswith(base.lower()):
candidates.append(entry)
if not candidates: # incorrect base
result_code = RESULT_NO_SUCH_OBJECT
message = 'incorrect base object'
else:
matched = self.evaluate_filter_node(filter_root, candidates)
if self.connection.raise_exceptions and 0 < request['sizeLimit'] < len(matched):
result_code = 4
message = 'size limit exceeded'
else:
for match in matched:
responses.append({
'object': match,
'attributes': [{'type': attribute,
'vals': [] if request['typesOnly'] else self.connection.server.dit[match][attribute]}
for attribute in self.connection.server.dit[match]
if attribute.lower() in attributes or ALL_ATTRIBUTES in attributes]
})
result_code = 0
message = ''
result = {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None
}
return responses[:request['sizeLimit']] if request['sizeLimit'] > 0 else responses, result
def mock_extended(self, request_message, controls):
# ExtendedRequest ::= [APPLICATION 23] SEQUENCE {
# requestName [0] LDAPOID,
# requestValue [1] OCTET STRING OPTIONAL }
#
# ExtendedResponse ::= [APPLICATION 24] SEQUENCE {
# COMPONENTS OF LDAPResult,
# responseName [10] LDAPOID OPTIONAL,
# responseValue [11] OCTET STRING OPTIONAL }
#
# IntermediateResponse ::= [APPLICATION 25] SEQUENCE {
# responseName [0] LDAPOID OPTIONAL,
# responseValue [1] OCTET STRING OPTIONAL }
request = extended_request_to_dict(request_message)
result_code = RESULT_UNWILLING_TO_PERFORM
message = 'not implemented'
response_name = None
response_value = None
if self.connection.server.info:
for extension in self.connection.server.info.supported_extensions:
if request['name'] == extension[0]: # server can answer the extended request
if extension[0] == '2.16.840.1.113719.1.27.100.31': # getBindDNRequest [NOVELL]
result_code = 0
message = ''
response_name = '2.16.840.1.113719.1.27.100.32' # getBindDNResponse [NOVELL]
response_value = OctetString(self.bound)
elif extension[0] == '1.3.6.1.4.1.4203.1.11.3': # WhoAmI [RFC4532]
result_code = 0
message = ''
response_name = '1.3.6.1.4.1.4203.1.11.3' # WhoAmI [RFC4532]
response_value = OctetString(self.bound)
break
return {'resultCode': result_code,
'matchedDN': '',
'diagnosticMessage': to_unicode(message, SERVER_ENCODING),
'referral': None,
'responseName': response_name,
'responseValue': response_value
}
def evaluate_filter_node(self, node, candidates):
"""After evaluation each 2 sets are added to each MATCH node, one for the matched object and one for unmatched object.
The unmatched object set is needed if a superior node is a NOT that reverts the evaluation. The BOOLEAN nodes mix the sets
returned by the MATCH nodes"""
node.matched = set()
node.unmatched = set()
if node.elements:
for element in node.elements:
self.evaluate_filter_node(element, candidates)
if node.tag == ROOT:
return node.elements[0].matched
elif node.tag == AND:
first_element = node.elements[0]
node.matched.update(first_element.matched)
node.unmatched.update(first_element.unmatched)
for element in node.elements[1:]:
node.matched.intersection_update(element.matched)
node.unmatched.intersection_update(element.unmatched)
elif node.tag == OR:
for element in node.elements:
node.matched.update(element.matched)
node.unmatched.update(element.unmatched)
elif node.tag == NOT:
node.matched = node.elements[0].unmatched
node.unmatched = node.elements[0].matched
elif node.tag == MATCH_GREATER_OR_EQUAL:
attr_name = node.assertion['attr']
attr_value = node.assertion['value']
for candidate in candidates:
if attr_name in self.connection.server.dit[candidate]:
for value in self.connection.server.dit[candidate][attr_name]:
if value.isdigit() and attr_value.isdigit(): # int comparison
if int(value) >= int(attr_value):
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
else:
if to_unicode(value, SERVER_ENCODING).lower() >= to_unicode(attr_value, SERVER_ENCODING).lower(): # case insensitive string comparison
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
elif node.tag == MATCH_LESS_OR_EQUAL:
attr_name = node.assertion['attr']
attr_value = node.assertion['value']
for candidate in candidates:
if attr_name in self.connection.server.dit[candidate]:
for value in self.connection.server.dit[candidate][attr_name]:
if value.isdigit() and attr_value.isdigit(): # int comparison
if int(value) <= int(attr_value):
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
else:
if to_unicode(value, SERVER_ENCODING).lower() <= to_unicode(attr_value, SERVER_ENCODING).lower(): # case insentive string comparison
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
elif node.tag == MATCH_EXTENSIBLE:
self.connection.last_error = 'Extensible match not allowed in Mock strategy'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPDefinitionError(self.connection.last_error)
elif node.tag == MATCH_PRESENT:
attr_name = node.assertion['attr']
for candidate in candidates:
if attr_name in self.connection.server.dit[candidate]:
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
elif node.tag == MATCH_SUBSTRING:
attr_name = node.assertion['attr']
# rebuild the original substring filter
if 'initial' in node.assertion and node.assertion['initial'] is not None:
substring_filter = re.escape(to_unicode(node.assertion['initial'], SERVER_ENCODING))
else:
substring_filter = ''
if 'any' in node.assertion and node.assertion['any'] is not None:
for middle in node.assertion['any']:
substring_filter += '.*' + re.escape(to_unicode(middle, SERVER_ENCODING))
if 'final' in node.assertion and node.assertion['final'] is not None:
substring_filter += '.*' + re.escape(to_unicode(node.assertion['final'], SERVER_ENCODING))
if substring_filter and not node.assertion.get('any', None) and not node.assertion.get('final', None): # only initial, adds .*
substring_filter += '.*'
regex_filter = re.compile(substring_filter, flags=re.UNICODE | re.IGNORECASE) # unicode AND ignorecase
for candidate in candidates:
if attr_name in self.connection.server.dit[candidate]:
for value in self.connection.server.dit[candidate][attr_name]:
if regex_filter.match(to_unicode(value, SERVER_ENCODING)):
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
else:
node.unmatched.add(candidate)
elif node.tag == MATCH_EQUAL or node.tag == MATCH_APPROX:
attr_name = node.assertion['attr']
attr_value = node.assertion['value']
for candidate in candidates:
# if attr_name in self.connection.server.dit[candidate] and attr_value in self.connection.server.dit[candidate][attr_name]:
if attr_name in self.connection.server.dit[candidate] and self.equal(candidate, attr_name, attr_value):
node.matched.add(candidate)
else:
node.unmatched.add(candidate)
def equal(self, dn, attribute_type, value_to_check):
# value is the value to match
attribute_values = self.connection.server.dit[dn][attribute_type]
if not isinstance(attribute_values, SEQUENCE_TYPES):
attribute_values = [attribute_values]
escaped_value_to_check = ldap_escape_to_bytes(value_to_check)
for attribute_value in attribute_values:
if self._check_equality(escaped_value_to_check, attribute_value):
return True
if self._check_equality(self._prepare_value(attribute_type, value_to_check), attribute_value):
return True
return False
@staticmethod
def _check_equality(value1, value2):
if value1 == value2: # exact matching
return True
if str(value1).isdigit() and str(value2).isdigit():
if int(value1) == int(value2): # int comparison
return True
try:
if to_unicode(value1, SERVER_ENCODING).lower() == to_unicode(value2, SERVER_ENCODING).lower(): # case insensitive comparison
return True
except UnicodeError:
pass
return False
def send(self, message_type, request, controls=None):
self.connection.request = self.decode_request(message_type, request, controls)
if self.connection.listening:
message_id = self.connection.server.next_message_id()
if self.connection.usage: # ldap message is built for updating metrics only
ldap_message = LDAPMessage()
ldap_message['messageID'] = MessageID(message_id)
ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
message_controls = build_controls_list(controls)
if message_controls is not None:
ldap_message['controls'] = message_controls
asn1_request = BaseStrategy.decode_request(message_type, request, controls)
self.connection._usage.update_transmitted_message(asn1_request, len(encode(ldap_message)))
return message_id, message_type, request, controls
else:
self.connection.last_error = 'unable to send message, connection is not open'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketOpenError(self.connection.last_error)

View File

@ -0,0 +1,133 @@
"""
"""
# Created on 2014.11.17
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from ..core.results import DO_NOT_RAISE_EXCEPTIONS
from .mockBase import MockBaseStrategy
from .. import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
from .sync import SyncStrategy
from ..operation.bind import bind_response_to_dict
from ..operation.delete import delete_response_to_dict
from ..operation.add import add_response_to_dict
from ..operation.compare import compare_response_to_dict
from ..operation.modifyDn import modify_dn_response_to_dict
from ..operation.modify import modify_response_to_dict
from ..operation.search import search_result_done_response_to_dict, search_result_entry_response_to_dict
from ..operation.extended import extended_response_to_dict
from ..core.exceptions import LDAPSocketOpenError, LDAPOperationResult
from ..utils.log import log, log_enabled, ERROR, PROTOCOL
class MockSyncStrategy(MockBaseStrategy, SyncStrategy): # class inheritance sequence is important, MockBaseStrategy must be the first one
"""
This strategy create a mock LDAP server, with synchronous access
It can be useful to test LDAP without accessing a real Server
"""
def __init__(self, ldap_connection):
SyncStrategy.__init__(self, ldap_connection)
MockBaseStrategy.__init__(self)
def post_send_search(self, payload):
message_id, message_type, request, controls = payload
self.connection.response = []
self.connection.result = dict()
if message_type == 'searchRequest':
responses, result = self.mock_search(request, controls)
for entry in responses:
response = search_result_entry_response_to_dict(entry, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
response['type'] = 'searchResEntry'
###
if self.connection.empty_attributes:
for attribute_type in request['attributes']:
attribute_name = str(attribute_type)
if attribute_name not in response['raw_attributes'] and attribute_name not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
response['raw_attributes'][attribute_name] = list()
response['attributes'][attribute_name] = list()
if log_enabled(PROTOCOL):
log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>',
attribute_type, self)
if not self.connection.auto_range:
attrs_to_remove = []
# removes original empty attribute in case a range tag is returned
for attribute_type in response['attributes']:
attribute_name = str(attribute_type)
if ';range' in attribute_name.lower():
orig_attr, _, _ = attribute_name.partition(';')
attrs_to_remove.append(orig_attr)
for attribute_type in attrs_to_remove:
if log_enabled(PROTOCOL):
log(PROTOCOL,
'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>',
attribute_type, self)
del response['raw_attributes'][attribute_type]
del response['attributes'][attribute_type]
###
self.connection.response.append(response)
result = search_result_done_response_to_dict(result)
result['type'] = 'searchResDone'
self.connection.result = result
if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
return self.connection.response
def post_send_single_response(self, payload): # payload is a tuple sent by self.send() made of message_type, request, controls
message_id, message_type, request, controls = payload
responses = []
result = None
if message_type == 'bindRequest':
result = bind_response_to_dict(self.mock_bind(request, controls))
result['type'] = 'bindResponse'
elif message_type == 'unbindRequest':
self.bound = None
elif message_type == 'abandonRequest':
pass
elif message_type == 'delRequest':
result = delete_response_to_dict(self.mock_delete(request, controls))
result['type'] = 'delResponse'
elif message_type == 'addRequest':
result = add_response_to_dict(self.mock_add(request, controls))
result['type'] = 'addResponse'
elif message_type == 'compareRequest':
result = compare_response_to_dict(self.mock_compare(request, controls))
result['type'] = 'compareResponse'
elif message_type == 'modDNRequest':
result = modify_dn_response_to_dict(self.mock_modify_dn(request, controls))
result['type'] = 'modDNResponse'
elif message_type == 'modifyRequest':
result = modify_response_to_dict(self.mock_modify(request, controls))
result['type'] = 'modifyResponse'
elif message_type == 'extendedReq':
result = extended_response_to_dict(self.mock_extended(request, controls))
result['type'] = 'extendedResp'
self.connection.result = result
responses.append(result)
if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
if log_enabled(PROTOCOL):
log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
return responses

View File

@ -0,0 +1,255 @@
"""
"""
# Created on 2014.03.04
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from time import sleep
import socket
from .. import get_config_parameter
from .sync import SyncStrategy
from ..core.exceptions import LDAPSocketOpenError, LDAPOperationResult, LDAPMaximumRetriesError
from ..utils.log import log, log_enabled, ERROR, BASIC
# noinspection PyBroadException,PyProtectedMember
class RestartableStrategy(SyncStrategy):
def __init__(self, ldap_connection):
SyncStrategy.__init__(self, ldap_connection)
self.sync = True
self.no_real_dsa = False
self.pooled = False
self.can_stream = False
self.restartable_sleep_time = get_config_parameter('RESTARTABLE_SLEEPTIME')
self.restartable_tries = get_config_parameter('RESTARTABLE_TRIES')
self._restarting = False
self._last_bind_controls = None
self._current_message_type = None
self._current_request = None
self._current_controls = None
self._restart_tls = None
self.exception_history = []
def open(self, reset_usage=False, read_server_info=True):
SyncStrategy.open(self, reset_usage, read_server_info)
def _open_socket(self, address, use_ssl=False, unix_socket=False):
"""
Try to open and connect a socket to a Server
raise LDAPExceptionError if unable to open or connect socket
if connection is restartable tries for the number of restarting requested or forever
"""
try:
SyncStrategy._open_socket(self, address, use_ssl, unix_socket) # try to open socket using SyncWait
self._reset_exception_history()
return
except Exception as e: # machinery for restartable connection
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
if not self._restarting: # if not already performing a restart
self._restarting = True
counter = self.restartable_tries
while counter > 0: # includes restartable_tries == True
if log_enabled(BASIC):
log(BASIC, 'try #%d to open Restartable connection <%s>', self.restartable_tries - counter, self.connection)
sleep(self.restartable_sleep_time)
if not self.connection.closed:
try: # resetting connection
self.connection.unbind()
except (socket.error, LDAPSocketOpenError): # don't trace catch socket errors because socket could already be closed
pass
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
try: # reissuing same operation
if self.connection.server_pool:
new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available
if self.connection.server != new_server:
self.connection.server = new_server
if self.connection.usage:
self.connection._usage.servers_from_pool += 1
SyncStrategy._open_socket(self, address, use_ssl, unix_socket) # calls super (not restartable) _open_socket()
if self.connection.usage:
self.connection._usage.restartable_successes += 1
self.connection.closed = False
self._restarting = False
self._reset_exception_history()
return
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
if self.connection.usage:
self.connection._usage.restartable_failures += 1
if not isinstance(self.restartable_tries, bool):
counter -= 1
self._restarting = False
self.connection.last_error = 'restartable connection strategy failed while opening socket'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPMaximumRetriesError(self.connection.last_error, self.exception_history, self.restartable_tries)
def send(self, message_type, request, controls=None):
self._current_message_type = message_type
self._current_request = request
self._current_controls = controls
if not self._restart_tls: # RFCs doesn't define how to stop tls once started
self._restart_tls = self.connection.tls_started
if message_type == 'bindRequest': # stores controls used in bind operation to be used again when restarting the connection
self._last_bind_controls = controls
try:
message_id = SyncStrategy.send(self, message_type, request, controls) # tries to send using SyncWait
self._reset_exception_history()
return message_id
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
if not self._restarting: # machinery for restartable connection
self._restarting = True
counter = self.restartable_tries
while counter > 0:
if log_enabled(BASIC):
log(BASIC, 'try #%d to send in Restartable connection <%s>', self.restartable_tries - counter, self.connection)
sleep(self.restartable_sleep_time)
if not self.connection.closed:
try: # resetting connection
self.connection.unbind()
except (socket.error, LDAPSocketOpenError): # don't trace socket errors because socket could already be closed
pass
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
failure = False
try: # reopening connection
self.connection.open(reset_usage=False, read_server_info=False)
if self._restart_tls: # restart tls if start_tls was previously used
self.connection.start_tls(read_server_info=False)
if message_type != 'bindRequest':
self.connection.bind(read_server_info=False, controls=self._last_bind_controls) # binds with previously used controls unless the request is already a bindRequest
if not self.connection.server.schema and not self.connection.server.info:
self.connection.refresh_server_info()
else:
self.connection._fire_deferred(read_info=False) # in case of lazy connection, not open by the refresh_server_info
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
failure = True
if not failure:
try: # reissuing same operation
ret_value = self.connection.send(message_type, request, controls)
if self.connection.usage:
self.connection._usage.restartable_successes += 1
self._restarting = False
self._reset_exception_history()
return ret_value # successful send
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
failure = True
if failure and self.connection.usage:
self.connection._usage.restartable_failures += 1
if not isinstance(self.restartable_tries, bool):
counter -= 1
self._restarting = False
self.connection.last_error = 'restartable connection failed to send'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPMaximumRetriesError(self.connection.last_error, self.exception_history, self.restartable_tries)
def post_send_single_response(self, message_id):
try:
ret_value = SyncStrategy.post_send_single_response(self, message_id)
self._reset_exception_history()
return ret_value
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
# if an LDAPExceptionError is raised then resend the request
try:
ret_value = SyncStrategy.post_send_single_response(self, self.send(self._current_message_type, self._current_request, self._current_controls))
self._reset_exception_history()
return ret_value
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
if not isinstance(e, LDAPOperationResult):
self.connection.last_error = 'restartable connection strategy failed in post_send_single_response'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise
def post_send_search(self, message_id):
try:
ret_value = SyncStrategy.post_send_search(self, message_id)
self._reset_exception_history()
return ret_value
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
# if an LDAPExceptionError is raised then resend the request
try:
ret_value = SyncStrategy.post_send_search(self, self.connection.send(self._current_message_type, self._current_request, self._current_controls))
self._reset_exception_history()
return ret_value
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
self._add_exception_to_history(type(e)(str(e)))
if not isinstance(e, LDAPOperationResult):
self.connection.last_error = e.args
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise e
def _add_exception_to_history(self, exc):
if not isinstance(self.restartable_tries, bool): # doesn't accumulate when restarting forever
if not isinstance(exc, LDAPMaximumRetriesError): # doesn't add the LDAPMaximumRetriesError exception
self.exception_history.append(exc)
def _reset_exception_history(self):
if self.exception_history:
self.exception_history = []
def get_stream(self):
raise NotImplementedError
def set_stream(self, value):
raise NotImplementedError

View File

@ -0,0 +1,493 @@
"""
"""
# Created on 2014.03.23
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
from os import linesep
from threading import Thread, Lock
from time import sleep
from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND
from .base import BaseStrategy
from ..core.usage import ConnectionUsage
from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError
from ..utils.log import log, log_enabled, ERROR, BASIC
from ..protocol.rfc4511 import LDAP_MAX_INT
TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION'
BOGUS_BIND = -1
BOGUS_UNBIND = -2
BOGUS_EXTENDED = -3
BOGUS_ABANDON = -4
try:
from queue import Queue, Empty
except ImportError: # Python 2
# noinspection PyUnresolvedReferences
from Queue import Queue, Empty
# noinspection PyProtectedMember
class ReusableStrategy(BaseStrategy):
"""
A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime.
The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of
connections that can be used as needed. Each connection lives in its own thread and has a busy/available status.
The strategy performs the requested operation on the first available connection.
The pool of connections is instantiated at strategy initialization.
Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection.
When lifetime is expired the connection is closed and will be open again when needed.
"""
pools = dict()
def receiving(self):
raise NotImplementedError
def _start_listen(self):
raise NotImplementedError
def _get_response(self, message_id):
raise NotImplementedError
def get_stream(self):
raise NotImplementedError
def set_stream(self, value):
raise NotImplementedError
# noinspection PyProtectedMember
class ConnectionPool(object):
"""
Container for the Connection Threads
"""
def __new__(cls, connection):
if connection.pool_name in ReusableStrategy.pools: # returns existing connection pool
pool = ReusableStrategy.pools[connection.pool_name]
if not pool.started: # if pool is not started remove it from the pools singleton and create a new onw
del ReusableStrategy.pools[connection.pool_name]
return object.__new__(cls)
if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive: # change lifetime
pool.keepalive = connection.pool_keepalive
if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime: # change keepalive
pool.lifetime = connection.pool_lifetime
if connection.pool_size and pool.pool_size != connection.pool_size: # if pool size has changed terminate and recreate the connections
pool.terminate_pool()
pool.pool_size = connection.pool_size
return pool
else:
return object.__new__(cls)
def __init__(self, connection):
if not hasattr(self, 'workers'):
self.name = connection.pool_name
self.master_connection = connection
self.workers = []
self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE')
self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME')
self.keepalive = connection.pool_keepalive
self.request_queue = Queue()
self.open_pool = False
self.bind_pool = False
self.tls_pool = False
self._incoming = dict()
self.counter = 0
self.terminated_usage = ConnectionUsage() if connection._usage else None
self.terminated = False
self.pool_lock = Lock()
ReusableStrategy.pools[self.name] = self
self.started = False
if log_enabled(BASIC):
log(BASIC, 'instantiated ConnectionPool: <%r>', self)
def __str__(self):
s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated')
s += ' - responses in queue: ' + str(len(self._incoming))
s += ' - pool size: ' + str(self.pool_size)
s += ' - lifetime: ' + str(self.lifetime)
s += ' - keepalive: ' + str(self.keepalive)
s += ' - open: ' + str(self.open_pool)
s += ' - bind: ' + str(self.bind_pool)
s += ' - tls: ' + str(self.tls_pool) + linesep
s += 'MASTER CONN: ' + str(self.master_connection) + linesep
s += 'WORKERS:'
if self.workers:
for i, worker in enumerate(self.workers):
s += linesep + str(i).rjust(5) + ': ' + str(worker)
else:
s += linesep + ' no active workers in pool'
return s
def __repr__(self):
return self.__str__()
def get_info_from_server(self):
for worker in self.workers:
with worker.worker_lock:
if not worker.connection.server.schema or not worker.connection.server.info:
worker.get_info_from_server = True
else:
worker.get_info_from_server = False
def rebind_pool(self):
for worker in self.workers:
with worker.worker_lock:
worker.connection.rebind(self.master_connection.user,
self.master_connection.password,
self.master_connection.authentication,
self.master_connection.sasl_mechanism,
self.master_connection.sasl_credentials)
def start_pool(self):
if not self.started:
self.create_pool()
for worker in self.workers:
with worker.worker_lock:
worker.thread.start()
self.started = True
self.terminated = False
if log_enabled(BASIC):
log(BASIC, 'worker started for pool <%s>', self)
return True
return False
def create_pool(self):
if log_enabled(BASIC):
log(BASIC, 'created pool <%s>', self)
self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)]
def terminate_pool(self):
if not self.terminated:
if log_enabled(BASIC):
log(BASIC, 'terminating pool <%s>', self)
self.started = False
self.request_queue.join() # waits for all queue pending operations
for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])): # put a TERMINATE signal on the queue for each active thread
self.request_queue.put((TERMINATE_REUSABLE, None, None, None))
self.request_queue.join() # waits for all queue terminate operations
self.terminated = True
if log_enabled(BASIC):
log(BASIC, 'pool terminated for <%s>', self)
class PooledConnectionThread(Thread):
"""
The thread that holds the Reusable connection and receive operation request via the queue
Result are sent back in the pool._incoming list when ready
"""
def __init__(self, worker, master_connection):
Thread.__init__(self)
self.daemon = True
self.worker = worker
self.master_connection = master_connection
if log_enabled(BASIC):
log(BASIC, 'instantiated PooledConnectionThread: <%r>', self)
# noinspection PyProtectedMember
def run(self):
self.worker.running = True
terminate = False
pool = self.master_connection.strategy.pool
while not terminate:
try:
counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive)
except Empty: # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation
if not self.worker.connection.closed:
self.worker.connection.abandon(0)
continue
with self.worker.worker_lock:
self.worker.busy = True
if counter == TERMINATE_REUSABLE:
terminate = True
if self.worker.connection.bound:
try:
self.worker.connection.unbind()
if log_enabled(BASIC):
log(BASIC, 'thread terminated')
except LDAPExceptionError:
pass
else:
if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime: # destroy and create a new connection
try:
self.worker.connection.unbind()
except LDAPExceptionError:
pass
self.worker.new_connection()
if log_enabled(BASIC):
log(BASIC, 'thread respawn')
if message_type not in ['bindRequest', 'unbindRequest']:
try:
if pool.open_pool and self.worker.connection.closed:
self.worker.connection.open(read_server_info=False)
if pool.tls_pool and not self.worker.connection.tls_started:
self.worker.connection.start_tls(read_server_info=False)
if pool.bind_pool and not self.worker.connection.bound:
self.worker.connection.bind(read_server_info=False)
elif pool.open_pool and not self.worker.connection.closed: # connection already open, issues a start_tls
if pool.tls_pool and not self.worker.connection.tls_started:
self.worker.connection.start_tls(read_server_info=False)
if self.worker.get_info_from_server and counter:
self.worker.connection._fire_deferred()
self.worker.get_info_from_server = False
response = None
result = None
if message_type == 'searchRequest':
response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls))
else:
response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls))
result = self.worker.connection.result
with pool.pool_lock:
pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread
with pool.pool_lock:
pool._incoming[counter] = (type(e)(str(e)), None, None)
# except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread
# exc = e
# with pool.pool_lock:
# if exc:
# pool._incoming[counter] = (exc, None, None)
# else:
# pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
self.worker.busy = False
pool.request_queue.task_done()
self.worker.task_counter += 1
if log_enabled(BASIC):
log(BASIC, 'thread terminated')
if self.master_connection.usage:
pool.terminated_usage += self.worker.connection.usage
self.worker.running = False
class PooledConnectionWorker(object):
"""
Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool
"""
def __init__(self, connection, request_queue):
self.master_connection = connection
self.request_queue = request_queue
self.running = False
self.busy = False
self.get_info_from_server = False
self.connection = None
self.creation_time = None
self.task_counter = 0
self.new_connection()
self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection)
self.worker_lock = Lock()
if log_enabled(BASIC):
log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self)
def __str__(self):
s = 'CONN: ' + str(self.connection) + linesep + ' THREAD: '
s += 'running' if self.running else 'halted'
s += ' - ' + ('busy' if self.busy else 'available')
s += ' - ' + ('created at: ' + self.creation_time.isoformat())
s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds)
s += ' - requests served: ' + str(self.task_counter)
return s
def new_connection(self):
from ..core.connection import Connection
# noinspection PyProtectedMember
self.creation_time = datetime.now()
self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server,
user=self.master_connection.user,
password=self.master_connection.password,
auto_bind=AUTO_BIND_NONE, # do not perform auto_bind because it reads again the schema
version=self.master_connection.version,
authentication=self.master_connection.authentication,
client_strategy=RESTARTABLE,
auto_referrals=self.master_connection.auto_referrals,
auto_range=self.master_connection.auto_range,
sasl_mechanism=self.master_connection.sasl_mechanism,
sasl_credentials=self.master_connection.sasl_credentials,
check_names=self.master_connection.check_names,
collect_usage=self.master_connection._usage,
read_only=self.master_connection.read_only,
raise_exceptions=self.master_connection.raise_exceptions,
lazy=False,
fast_decoder=self.master_connection.fast_decoder,
receive_timeout=self.master_connection.receive_timeout,
return_empty_attributes=self.master_connection.empty_attributes)
# simulates auto_bind, always with read_server_info=False
if self.master_connection.auto_bind and self.master_connection.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
if log_enabled(BASIC):
log(BASIC, 'performing automatic bind for <%s>', self.connection)
self.connection.open(read_server_info=False)
if self.master_connection.auto_bind == AUTO_BIND_NO_TLS:
self.connection.bind(read_server_info=False)
elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
self.connection.start_tls(read_server_info=False)
self.connection.bind(read_server_info=False)
elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
self.connection.bind(read_server_info=False)
self.connection.start_tls(read_server_info=False)
if self.master_connection.server_pool:
self.connection.server_pool = self.master_connection.server_pool
self.connection.server_pool.initialize(self.connection)
# ReusableStrategy methods
def __init__(self, ldap_connection):
BaseStrategy.__init__(self, ldap_connection)
self.sync = False
self.no_real_dsa = False
self.pooled = True
self.can_stream = False
if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name:
self.pool = ReusableStrategy.ConnectionPool(ldap_connection)
else:
if log_enabled(ERROR):
log(ERROR, 'reusable connection must have a pool_name')
raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name')
def open(self, reset_usage=True, read_server_info=True):
# read_server_info not used
self.pool.open_pool = True
self.pool.start_pool()
self.connection.closed = False
if self.connection.usage:
if reset_usage or not self.connection._usage.initial_connection_start_time:
self.connection._usage.start()
def terminate(self):
self.pool.terminate_pool()
self.pool.open_pool = False
self.connection.bound = False
self.connection.closed = True
self.pool.bind_pool = False
self.pool.tls_pool = False
def _close_socket(self):
"""
Doesn't really close the socket
"""
self.connection.closed = True
if self.connection.usage:
self.connection._usage.closed_sockets += 1
def send(self, message_type, request, controls=None):
if self.pool.started:
if message_type == 'bindRequest':
self.pool.bind_pool = True
counter = BOGUS_BIND
elif message_type == 'unbindRequest':
self.pool.bind_pool = False
counter = BOGUS_UNBIND
elif message_type == 'abandonRequest':
counter = BOGUS_ABANDON
elif message_type == 'extendedReq' and self.connection.starting_tls:
self.pool.tls_pool = True
counter = BOGUS_EXTENDED
else:
with self.pool.pool_lock:
self.pool.counter += 1
if self.pool.counter > LDAP_MAX_INT:
self.pool.counter = 1
counter = self.pool.counter
self.pool.request_queue.put((counter, message_type, request, controls))
return counter
if log_enabled(ERROR):
log(ERROR, 'reusable connection pool not started')
raise LDAPConnectionPoolNotStartedError('reusable connection pool not started')
def validate_bind(self, controls):
# in case of a new connection or different credentials
if (self.connection.user != self.pool.master_connection.user or
self.connection.password != self.pool.master_connection.password or
self.connection.authentication != self.pool.master_connection.authentication or
self.connection.sasl_mechanism != self.pool.master_connection.sasl_mechanism or
self.connection.sasl_credentials != self.pool.master_connection.sasl_credentials):
self.pool.master_connection.user = self.connection.user
self.pool.master_connection.password = self.connection.password
self.pool.master_connection.authentication = self.connection.authentication
self.pool.master_connection.sasl_mechanism = self.connection.sasl_mechanism
self.pool.master_connection.sasl_credentials = self.connection.sasl_credentials
self.pool.rebind_pool()
temp_connection = self.pool.workers[0].connection
temp_connection.lazy = False
if not self.connection.server.schema or not self.connection.server.info:
result = self.pool.workers[0].connection.bind(controls=controls)
else:
result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False)
temp_connection.unbind()
temp_connection.lazy = True
if result:
self.pool.bind_pool = True # bind pool if bind is validated
return result
def get_response(self, counter, timeout=None, get_request=False):
sleeptime = get_config_parameter('RESPONSE_SLEEPTIME')
request=None
if timeout is None:
timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
if counter == BOGUS_BIND: # send a bogus bindResponse
response = list()
result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '<bogus Bind response>', 'saslCreds': None}
elif counter == BOGUS_UNBIND: # bogus unbind response
response = None
result = None
elif counter == BOGUS_ABANDON: # abandon cannot be executed because of multiple connections
response = list()
result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': '<bogus StartTls response>'}
elif counter == BOGUS_EXTENDED: # bogus startTls extended response
response = list()
result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': '<bogus StartTls response>'}
self.connection.starting_tls = False
else:
response = None
result = None
while timeout >= 0: # waiting for completed message to appear in _incoming
try:
with self.connection.strategy.pool.pool_lock:
response, result, request = self.connection.strategy.pool._incoming.pop(counter)
except KeyError:
sleep(sleeptime)
timeout -= sleeptime
continue
break
if timeout <= 0:
if log_enabled(ERROR):
log(ERROR, 'no response from worker threads in Reusable connection')
raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection')
if isinstance(response, LDAPOperationResult):
raise response # an exception has been raised with raise_exceptions
if get_request:
return response, result, request
return response, result
def post_send_single_response(self, counter):
return counter
def post_send_search(self, counter):
return counter

View File

@ -0,0 +1,212 @@
"""
"""
# Created on 2013.07.15
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
import socket
from .. import SEQUENCE_TYPES, get_config_parameter
from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult
from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR
from ..protocol.rfc4511 import LDAPMessage
from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message
from ..utils.asn1 import decoder, decode_message_fast
LDAP_MESSAGE_TEMPLATE = LDAPMessage()
# noinspection PyProtectedMember
class SyncStrategy(BaseStrategy):
"""
This strategy is synchronous. You send the request and get the response
Requests return a boolean value to indicate the result of the requested Operation
Connection.response will contain the whole LDAP response for the messageId requested in a dict form
Connection.request will contain the result LDAP message in a dict form
"""
def __init__(self, ldap_connection):
BaseStrategy.__init__(self, ldap_connection)
self.sync = True
self.no_real_dsa = False
self.pooled = False
self.can_stream = False
self.socket_size = get_config_parameter('SOCKET_SIZE')
def open(self, reset_usage=True, read_server_info=True):
BaseStrategy.open(self, reset_usage, read_server_info)
if read_server_info:
try:
self.connection.refresh_server_info()
except LDAPOperationResult: # catch errors from server if raise_exception = True
self.connection.server._dsa_info = None
self.connection.server._schema_info = None
def _start_listen(self):
if not self.connection.listening and not self.connection.closed:
self.connection.listening = True
def receiving(self):
"""
Receive data over the socket
Checks if the socket is closed
"""
messages = []
receiving = True
unprocessed = b''
data = b''
get_more_data = True
exc = None
while receiving:
if get_more_data:
try:
data = self.connection.socket.recv(self.socket_size)
except (OSError, socket.error, AttributeError) as e:
self.connection.last_error = 'error receiving data: ' + str(e)
try: # try to close the connection before raising exception
self.close()
except (socket.error, LDAPExceptionError):
pass
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error)
raise communication_exception_factory(LDAPSocketReceiveError, type(e)(str(e)))(self.connection.last_error)
unprocessed += data
if len(data) > 0:
length = BaseStrategy.compute_ldap_message_size(unprocessed)
if length == -1: # too few data to decode message length
get_more_data = True
continue
if len(unprocessed) < length:
get_more_data = True
else:
if log_enabled(NETWORK):
log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection)
messages.append(unprocessed[:length])
unprocessed = unprocessed[length:]
get_more_data = False
if len(unprocessed) == 0:
receiving = False
else:
receiving = False
if log_enabled(NETWORK):
log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection)
return messages
def post_send_single_response(self, message_id):
"""
Executed after an Operation Request (except Search)
Returns the result message or None
"""
responses, result = self.get_response(message_id)
self.connection.result = result
if result['type'] == 'intermediateResponse': # checks that all responses are intermediates (there should be only one)
for response in responses:
if response['type'] != 'intermediateResponse':
self.connection.last_error = 'multiple messages received error'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketReceiveError(self.connection.last_error)
responses.append(result)
return responses
def post_send_search(self, message_id):
"""
Executed after a search request
Returns the result message and store in connection.response the objects found
"""
responses, result = self.get_response(message_id)
self.connection.result = result
if isinstance(responses, SEQUENCE_TYPES):
self.connection.response = responses[:] # copy search result entries
return responses
self.connection.last_error = 'error receiving response'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketReceiveError(self.connection.last_error)
def _get_response(self, message_id):
"""
Performs the capture of LDAP response for SyncStrategy
"""
ldap_responses = []
response_complete = False
while not response_complete:
responses = self.receiving()
if responses:
for response in responses:
if len(response) > 0:
if self.connection.usage:
self.connection._usage.update_received_message(len(response))
if self.connection.fast_decoder:
ldap_resp = decode_message_fast(response)
dict_response = self.decode_response_fast(ldap_resp)
else:
ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE) # unprocessed unused because receiving() waits for the whole message
dict_response = self.decode_response(ldap_resp)
if log_enabled(EXTENDED):
log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
if int(ldap_resp['messageID']) == message_id:
ldap_responses.append(dict_response)
if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
response_complete = True
elif int(ldap_resp['messageID']) == 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
return SESSION_TERMINATED_BY_SERVER
elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4': # Novell LDAP transaction error unsolicited notification
return TRANSACTION_ERROR
else:
self.connection.last_error = 'unknown unsolicited notification from server'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketReceiveError(self.connection.last_error)
elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp':
self.connection.last_error = 'multiple extended responses to a single extended request'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPExtensionError(self.connection.last_error)
# pass # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it
else:
self.connection.last_error = 'invalid messageId received'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketReceiveError(self.connection.last_error)
# response = unprocessed
# if response: # if this statement is removed unprocessed data will be processed as another message
# self.connection.last_error = 'unprocessed substrate error'
# if log_enabled(ERROR):
# log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
# raise LDAPSocketReceiveError(self.connection.last_error)
else:
return SESSION_TERMINATED_BY_SERVER
ldap_responses.append(RESPONSE_COMPLETE)
return ldap_responses
def set_stream(self, value):
raise NotImplementedError
def get_stream(self):
raise NotImplementedError

View File

@ -0,0 +1,245 @@
"""
"""
# Created on 2015.08.19
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1 import __version__ as pyasn1_version
from pyasn1.codec.ber import decoder # for usage in other modules
from pyasn1.codec.ber.encoder import Encoder # for monkeypatching of boolean value
from ..core.results import RESULT_CODES
from ..utils.conv import to_unicode
from ..protocol.convert import referrals_to_list
CLASSES = {(False, False): 0, # Universal
(False, True): 1, # Application
(True, False): 2, # Context
(True, True): 3} # Private
# Monkeypatching of pyasn1 for encoding Boolean with the value 0xFF for TRUE
# THIS IS NOT PART OF THE FAST BER DECODER
if pyasn1_version == 'xxx0.2.3':
from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode
from pyasn1.type.univ import Boolean
from pyasn1.compat.octets import ints2octs
class BooleanCEREncoder(BooleanEncoder):
_true = ints2octs((255,))
tagMap[Boolean.tagSet] = BooleanCEREncoder()
else:
from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder
from pyasn1.type.univ import Boolean
from copy import deepcopy
class LDAPBooleanEncoder(AbstractItemEncoder):
supportIndefLenMode = False
if pyasn1_version <= '0.2.3':
from pyasn1.compat.octets import ints2octs
_true = ints2octs((255,))
_false = ints2octs((0,))
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and self._true or self._false, 0
elif pyasn1_version <= '0.3.1':
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and (255,) or (0,), False, False
elif pyasn1_version <= '0.3.4':
def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False):
return value and (255,) or (0,), False, False
elif pyasn1_version <= '0.3.7':
def encodeValue(self, value, encodeFun, **options):
return value and (255,) or (0,), False, False
else:
def encodeValue(self, value, asn1Spec, encodeFun, **options):
return value and (255,) or (0,), False, False
customTagMap = deepcopy(tagMap)
customTypeMap = deepcopy(typeMap)
customTagMap[Boolean.tagSet] = LDAPBooleanEncoder()
customTypeMap[Boolean.typeId] = LDAPBooleanEncoder()
encode = Encoder(customTagMap, customTypeMap)
# end of monkey patching
# a fast BER decoder for LDAP responses only
def compute_ber_size(data):
"""
Compute size according to BER definite length rules
Returns size of value and value offset
"""
if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
return data[1], 2
else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
bytes_length = data[1] - 128
value_length = 0
cont = bytes_length
for byte in data[2: 2 + bytes_length]:
cont -= 1
value_length += byte * (256 ** cont)
return value_length, bytes_length + 2
def decode_message_fast(message):
ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of sequence, at maximum 3 bytes for length
decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT)
return {
'messageID': decoded[0][3],
'protocolOp': decoded[1][2],
'payload': decoded[1][3],
'controls': decoded[2][3] if len(decoded) == 3 else None
}
def decode_sequence(message, start, stop, context_decoders=None):
decoded = []
while start < stop:
octet = get_byte(message[start])
ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))]
ber_constructed = bool(octet & 0b00100000)
ber_type = octet & 0b00011111
ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if ber_class < 2 else None
ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10]))
start += ber_value_offset
if ber_decoder:
value = ber_decoder(message, start, start + ber_len, context_decoders) # call value decode function
else:
# try:
value = context_decoders[ber_type](message, start, start + ber_len) # call value decode function for context class
# except KeyError:
# if ber_type == 3: # Referral in result
# value = decode_sequence(message, start, start + ber_len)
# else:
# raise # re-raise, should never happen
decoded.append((ber_class, ber_constructed, ber_type, value))
start += ber_len
return decoded
def decode_integer(message, start, stop, context_decoders=None):
first = message[start]
value = -1 if get_byte(first) & 0x80 else 0
for octet in message[start: stop]:
value = value << 8 | get_byte(octet)
return value
def decode_octet_string(message, start, stop, context_decoders=None):
return message[start: stop]
def decode_boolean(message, start, stop, context_decoders=None):
return False if message[start: stop] == 0 else True
def decode_bind_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT)
def decode_extended_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT)
def decode_intermediate_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT)
def decode_controls(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, CONTROLS_CONTEXT)
def ldap_result_to_dict_fast(response):
response_dict = dict()
response_dict['result'] = int(response[0][3]) # resultCode
response_dict['description'] = RESULT_CODES[response_dict['result']]
response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN
response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage
if len(response) == 4:
response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) # referrals
else:
response_dict['referrals'] = None
return response_dict
######
if str is not bytes: # Python 3
def get_byte(x):
return x
def get_bytes(x):
return x
else: # Python 2
def get_byte(x):
return ord(x)
def get_bytes(x):
return bytearray(x)
DECODERS = {
# Universal
(0, 1): decode_boolean, # Boolean
(0, 2): decode_integer, # Integer
(0, 4): decode_octet_string, # Octet String
(0, 10): decode_integer, # Enumerated
(0, 16): decode_sequence, # Sequence
(0, 17): decode_sequence, # Set
# Application
(1, 1): decode_bind_response, # Bind response
(1, 4): decode_sequence, # Search result entry
(1, 5): decode_sequence, # Search result done
(1, 7): decode_sequence, # Modify response
(1, 9): decode_sequence, # Add response
(1, 11): decode_sequence, # Delete response
(1, 13): decode_sequence, # ModifyDN response
(1, 15): decode_sequence, # Compare response
(1, 19): decode_sequence, # Search result reference
(1, 24): decode_extended_response, # Extended response
(1, 25): decode_intermediate_response, # intermediate response
(2, 3): decode_octet_string #
}
BIND_RESPONSE_CONTEXT = {
7: decode_octet_string # SaslCredentials
}
EXTENDED_RESPONSE_CONTEXT = {
10: decode_octet_string, # ResponseName
11: decode_octet_string # Response Value
}
INTERMEDIATE_RESPONSE_CONTEXT = {
0: decode_octet_string, # IntermediateResponseName
1: decode_octet_string # IntermediateResponseValue
}
LDAP_MESSAGE_CONTEXT = {
0: decode_controls, # Controls
3: decode_sequence # Referral
}
CONTROLS_CONTEXT = {
0: decode_sequence # Control
}

View File

@ -0,0 +1,194 @@
"""
"""
# Created on 2014.08.23
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
try:
from collections.abc import MutableMapping, Mapping
except ImportError:
from collections import MutableMapping, Mapping
from .. import SEQUENCE_TYPES
class CaseInsensitiveDict(MutableMapping):
def __init__(self, other=None, **kwargs):
self._store = dict() # store use the original key
self._case_insensitive_keymap = dict() # is a mapping ci_key -> key
if other or kwargs:
if other is None:
other = dict()
self.update(other, **kwargs)
def __contains__(self, item):
try:
self.__getitem__(item)
return True
except KeyError:
return False
@staticmethod
def _ci_key(key):
return key.strip().lower() if hasattr(key, 'lower') else key
def __delitem__(self, key):
ci_key = self._ci_key(key)
del self._store[self._case_insensitive_keymap[ci_key]]
del self._case_insensitive_keymap[ci_key]
def __setitem__(self, key, item):
ci_key = self._ci_key(key)
if ci_key in self._case_insensitive_keymap: # updates existing value
self._store[self._case_insensitive_keymap[ci_key]] = item
else: # new key
self._store[key] = item
self._case_insensitive_keymap[ci_key] = key
def __getitem__(self, key):
return self._store[self._case_insensitive_keymap[self._ci_key(key)]]
def __iter__(self):
return self._store.__iter__()
def __len__(self): # if len is 0 then the cidict appears as False in IF statement
return len(self._store)
def __repr__(self):
return repr(self._store)
def __str__(self):
return str(self._store)
def keys(self):
return self._store.keys()
def values(self):
return self._store.values()
def items(self):
return self._store.items()
def __eq__(self, other):
if not isinstance(other, (Mapping, dict)):
return NotImplemented
if isinstance(other, CaseInsensitiveDict):
if len(self.items()) != len(other.items()):
return False
else:
for key, value in self.items():
if not (key in other and other[key] == value):
return False
return True
return self == CaseInsensitiveDict(other)
def copy(self):
return CaseInsensitiveDict(self._store)
class CaseInsensitiveWithAliasDict(CaseInsensitiveDict):
def __init__(self, other=None, **kwargs):
self._aliases = dict()
self._alias_keymap = dict() # is a mapping key -> [alias1, alias2, ...]
CaseInsensitiveDict.__init__(self, other, **kwargs)
def aliases(self):
return self._aliases.keys()
def __setitem__(self, key, value):
if isinstance(key, SEQUENCE_TYPES):
ci_key = self._ci_key(key[0])
if ci_key not in self._aliases:
CaseInsensitiveDict.__setitem__(self, key[0], value)
self.set_alias(ci_key, key[1:])
else:
raise KeyError('\'' + str(key[0] + ' already used as alias'))
else:
ci_key = self._ci_key(key)
if ci_key not in self._aliases:
CaseInsensitiveDict.__setitem__(self, key, value)
else:
self[self._aliases[ci_key]] = value
def __delitem__(self, key):
ci_key = self._ci_key(key)
try:
CaseInsensitiveDict.__delitem__(self, ci_key)
if ci_key in self._alias_keymap:
for alias in self._alias_keymap[ci_key][:]: # removes aliases, uses a copy of _alias_keymap because iterator gets confused when aliases are removed from _alias_keymap
self.remove_alias(alias)
return
except KeyError: # try to remove alias
if ci_key in self._aliases:
self.remove_alias(ci_key)
def set_alias(self, key, alias):
if not isinstance(alias, SEQUENCE_TYPES):
alias = [alias]
for alias_to_add in alias:
ci_key = self._ci_key(key)
if ci_key in self._case_insensitive_keymap:
ci_alias = self._ci_key(alias_to_add)
if ci_alias not in self._case_insensitive_keymap: # checks if alias is used a key
if ci_alias not in self._aliases: # checks if alias is used as another alias
self._aliases[ci_alias] = ci_key
if ci_key in self._alias_keymap: # extend alias keymap
self._alias_keymap[ci_key].append(self._ci_key(ci_alias))
else:
self._alias_keymap[ci_key] = list()
self._alias_keymap[ci_key].append(self._ci_key(ci_alias))
else:
if ci_key == self._ci_key(self._alias_keymap[ci_alias]): # passes if alias is already defined to the same key
pass
else:
raise KeyError('\'' + str(alias_to_add) + '\' already used as alias')
else:
if ci_key == self._ci_key(self._case_insensitive_keymap[ci_alias]): # passes if alias is already defined to the same key
pass
else:
raise KeyError('\'' + str(alias_to_add) + '\' already used as key')
else:
raise KeyError('\'' + str(ci_key) + '\' is not an existing key')
def remove_alias(self, alias):
if not isinstance(alias, SEQUENCE_TYPES):
alias = [alias]
for alias_to_remove in alias:
ci_alias = self._ci_key(alias_to_remove)
self._alias_keymap[self._aliases[ci_alias]].remove(ci_alias)
if not self._alias_keymap[self._aliases[ci_alias]]: # remove keymap if empty
del self._alias_keymap[self._aliases[ci_alias]]
del self._aliases[ci_alias]
def __getitem__(self, key):
try:
return CaseInsensitiveDict.__getitem__(self, key)
except KeyError:
return CaseInsensitiveDict.__getitem__(self, self._aliases[self._ci_key(key)])
def copy(self):
new = CaseInsensitiveWithAliasDict(self._store)
new._aliases = self._aliases.copy()
new._alias_keymap = self._alias_keymap
return new

View File

@ -0,0 +1,292 @@
"""
"""
# Created on 2016.08.31
#
# Author: Giovanni Cannata
#
# Copyright 2013 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from sys import stdin, getdefaultencoding
from .. import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES, SEQUENCE_TYPES
from ..core.exceptions import LDAPConfigurationParameterError
# checks
_CLASSES_EXCLUDED_FROM_CHECK = ['subschema']
_ATTRIBUTES_EXCLUDED_FROM_CHECK = [ALL_ATTRIBUTES,
ALL_OPERATIONAL_ATTRIBUTES,
NO_ATTRIBUTES,
'ldapSyntaxes',
'matchingRules',
'matchingRuleUse',
'dITContentRules',
'dITStructureRules',
'nameForms',
'altServer',
'namingContexts',
'supportedControl',
'supportedExtension',
'supportedFeatures',
'supportedCapabilities',
'supportedLdapVersion',
'supportedSASLMechanisms',
'vendorName',
'vendorVersion',
'subschemaSubentry',
'ACL']
_UTF8_ENCODED_SYNTAXES = ['1.2.840.113556.1.4.904', # DN String [MICROSOFT]
'1.2.840.113556.1.4.1362', # String (Case) [MICROSOFT]
'1.3.6.1.4.1.1466.115.121.1.12', # DN String [RFC4517]
'1.3.6.1.4.1.1466.115.121.1.15', # Directory String [RFC4517]
'1.3.6.1.4.1.1466.115.121.1.41', # Postal Address) [RFC4517]
'1.3.6.1.4.1.1466.115.121.1.58', # Substring Assertion [RFC4517]
'2.16.840.1.113719.1.1.5.1.6', # Case Ignore List [NOVELL]
'2.16.840.1.113719.1.1.5.1.14', # Tagged String [NOVELL]
'2.16.840.1.113719.1.1.5.1.15', # Tagged Name and String [NOVELL]
'2.16.840.1.113719.1.1.5.1.23', # Tagged Name [NOVELL]
'2.16.840.1.113719.1.1.5.1.25'] # Typed Name [NOVELL]
_UTF8_ENCODED_TYPES = []
_ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF = ['msds-memberOfTransitive', 'msds-memberTransitive', 'entryDN']
_IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF = ['instanceType', 'nTSecurityDescriptor', 'objectCategory']
_CASE_INSENSITIVE_ATTRIBUTE_NAMES = True
_CASE_INSENSITIVE_SCHEMA_NAMES = True
# abstraction layer
_ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX = 'OA_'
# communication
_POOLING_LOOP_TIMEOUT = 10 # number of seconds to wait before restarting a cycle to find an active server in the pool
_RESPONSE_SLEEPTIME = 0.05 # seconds to wait while waiting for a response in asynchronous strategies
_RESPONSE_WAITING_TIMEOUT = 3 # waiting timeout for receiving a response in asynchronous strategies
_SOCKET_SIZE = 4096 # socket byte size
_CHECK_AVAILABILITY_TIMEOUT = 2.5 # default timeout for socket connect when checking availability
_RESET_AVAILABILITY_TIMEOUT = 5 # default timeout for resetting the availability status when checking candidate addresses
_RESTARTABLE_SLEEPTIME = 2 # time to wait in a restartable strategy before retrying the request
_RESTARTABLE_TRIES = 30 # number of times to retry in a restartable strategy before giving up. Set to True for unlimited retries
_REUSABLE_THREADED_POOL_SIZE = 5
_REUSABLE_THREADED_LIFETIME = 3600 # 1 hour
_DEFAULT_THREADED_POOL_NAME = 'REUSABLE_DEFAULT_POOL'
_ADDRESS_INFO_REFRESH_TIME = 300 # seconds to wait before refreshing address info from dns
_ADDITIONAL_SERVER_ENCODINGS = ['latin-1', 'koi8-r'] # some broken LDAP implementation may have different encoding than those expected by RFCs
_ADDITIONAL_CLIENT_ENCODINGS = ['utf-8']
_IGNORE_MALFORMED_SCHEMA = False # some flaky LDAP servers returns malformed schema. If True no expection is raised and schema is thrown away
_DEFAULT_SERVER_ENCODING = 'utf-8' # should always be utf-8
if stdin and hasattr(stdin, 'encoding') and stdin.encoding:
_DEFAULT_CLIENT_ENCODING = stdin.encoding
elif getdefaultencoding():
_DEFAULT_CLIENT_ENCODING = getdefaultencoding()
else:
_DEFAULT_CLIENT_ENCODING = 'utf-8'
PARAMETERS = ['CASE_INSENSITIVE_ATTRIBUTE_NAMES',
'CASE_INSENSITIVE_SCHEMA_NAMES',
'ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX',
'POOLING_LOOP_TIMEOUT',
'RESPONSE_SLEEPTIME',
'RESPONSE_WAITING_TIMEOUT',
'SOCKET_SIZE',
'CHECK_AVAILABILITY_TIMEOUT',
'RESTARTABLE_SLEEPTIME',
'RESTARTABLE_TRIES',
'REUSABLE_THREADED_POOL_SIZE',
'REUSABLE_THREADED_LIFETIME',
'DEFAULT_THREADED_POOL_NAME',
'ADDRESS_INFO_REFRESH_TIME',
'RESET_AVAILABILITY_TIMEOUT',
'DEFAULT_CLIENT_ENCODING',
'DEFAULT_SERVER_ENCODING',
'CLASSES_EXCLUDED_FROM_CHECK',
'ATTRIBUTES_EXCLUDED_FROM_CHECK',
'UTF8_ENCODED_SYNTAXES',
'UTF8_ENCODED_TYPES',
'ADDITIONAL_SERVER_ENCODINGS',
'ADDITIONAL_CLIENT_ENCODINGS',
'IGNORE_MALFORMED_SCHEMA',
'ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF',
'IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF'
]
def get_config_parameter(parameter):
if parameter == 'CASE_INSENSITIVE_ATTRIBUTE_NAMES': # Boolean
return _CASE_INSENSITIVE_ATTRIBUTE_NAMES
elif parameter == 'CASE_INSENSITIVE_SCHEMA_NAMES': # Boolean
return _CASE_INSENSITIVE_SCHEMA_NAMES
elif parameter == 'ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX': # String
return _ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX
elif parameter == 'POOLING_LOOP_TIMEOUT': # Integer
return _POOLING_LOOP_TIMEOUT
elif parameter == 'RESPONSE_SLEEPTIME': # Integer
return _RESPONSE_SLEEPTIME
elif parameter == 'RESPONSE_WAITING_TIMEOUT': # Integer
return _RESPONSE_WAITING_TIMEOUT
elif parameter == 'SOCKET_SIZE': # Integer
return _SOCKET_SIZE
elif parameter == 'CHECK_AVAILABILITY_TIMEOUT': # Integer
return _CHECK_AVAILABILITY_TIMEOUT
elif parameter == 'RESTARTABLE_SLEEPTIME': # Integer
return _RESTARTABLE_SLEEPTIME
elif parameter == 'RESTARTABLE_TRIES': # Integer
return _RESTARTABLE_TRIES
elif parameter == 'REUSABLE_THREADED_POOL_SIZE': # Integer
return _REUSABLE_THREADED_POOL_SIZE
elif parameter == 'REUSABLE_THREADED_LIFETIME': # Integer
return _REUSABLE_THREADED_LIFETIME
elif parameter == 'DEFAULT_THREADED_POOL_NAME': # String
return _DEFAULT_THREADED_POOL_NAME
elif parameter == 'ADDRESS_INFO_REFRESH_TIME': # Integer
return _ADDRESS_INFO_REFRESH_TIME
elif parameter == 'RESET_AVAILABILITY_TIMEOUT': # Integer
return _RESET_AVAILABILITY_TIMEOUT
elif parameter in ['DEFAULT_CLIENT_ENCODING', 'DEFAULT_ENCODING']: # String - DEFAULT_ENCODING for backward compatibility
return _DEFAULT_CLIENT_ENCODING
elif parameter == 'DEFAULT_SERVER_ENCODING': # String
return _DEFAULT_SERVER_ENCODING
elif parameter == 'CLASSES_EXCLUDED_FROM_CHECK': # Sequence
if isinstance(_CLASSES_EXCLUDED_FROM_CHECK, SEQUENCE_TYPES):
return _CLASSES_EXCLUDED_FROM_CHECK
else:
return [_CLASSES_EXCLUDED_FROM_CHECK]
elif parameter == 'ATTRIBUTES_EXCLUDED_FROM_CHECK': # Sequence
if isinstance(_ATTRIBUTES_EXCLUDED_FROM_CHECK, SEQUENCE_TYPES):
return _ATTRIBUTES_EXCLUDED_FROM_CHECK
else:
return [_ATTRIBUTES_EXCLUDED_FROM_CHECK]
elif parameter == 'UTF8_ENCODED_SYNTAXES': # Sequence
if isinstance(_UTF8_ENCODED_SYNTAXES, SEQUENCE_TYPES):
return _UTF8_ENCODED_SYNTAXES
else:
return [_UTF8_ENCODED_SYNTAXES]
elif parameter == 'UTF8_ENCODED_TYPES': # Sequence
if isinstance(_UTF8_ENCODED_TYPES, SEQUENCE_TYPES):
return _UTF8_ENCODED_TYPES
else:
return [_UTF8_ENCODED_TYPES]
elif parameter in ['ADDITIONAL_SERVER_ENCODINGS', 'ADDITIONAL_ENCODINGS']: # Sequence - ADDITIONAL_ENCODINGS for backward compatibility
if isinstance(_ADDITIONAL_SERVER_ENCODINGS, SEQUENCE_TYPES):
return _ADDITIONAL_SERVER_ENCODINGS
else:
return [_ADDITIONAL_SERVER_ENCODINGS]
elif parameter in ['ADDITIONAL_CLIENT_ENCODINGS']: # Sequence
if isinstance(_ADDITIONAL_CLIENT_ENCODINGS, SEQUENCE_TYPES):
return _ADDITIONAL_CLIENT_ENCODINGS
else:
return [_ADDITIONAL_CLIENT_ENCODINGS]
elif parameter == 'IGNORE_MALFORMED_SCHEMA': # Boolean
return _IGNORE_MALFORMED_SCHEMA
elif parameter == 'ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF': # Sequence
if isinstance(_ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF, SEQUENCE_TYPES):
return _ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF
else:
return [_ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF]
elif parameter == 'IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF': # Sequence
if isinstance(_IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF, SEQUENCE_TYPES):
return _IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF
else:
return [_IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF]
raise LDAPConfigurationParameterError('configuration parameter %s not valid' % parameter)
def set_config_parameter(parameter, value):
if parameter == 'CASE_INSENSITIVE_ATTRIBUTE_NAMES':
global _CASE_INSENSITIVE_ATTRIBUTE_NAMES
_CASE_INSENSITIVE_ATTRIBUTE_NAMES = value
elif parameter == 'CASE_INSENSITIVE_SCHEMA_NAMES':
global _CASE_INSENSITIVE_SCHEMA_NAMES
_CASE_INSENSITIVE_SCHEMA_NAMES = value
elif parameter == 'ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX':
global _ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX
_ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX = value
elif parameter == 'POOLING_LOOP_TIMEOUT':
global _POOLING_LOOP_TIMEOUT
_POOLING_LOOP_TIMEOUT = value
elif parameter == 'RESPONSE_SLEEPTIME':
global _RESPONSE_SLEEPTIME
_RESPONSE_SLEEPTIME = value
elif parameter == 'RESPONSE_WAITING_TIMEOUT':
global _RESPONSE_WAITING_TIMEOUT
_RESPONSE_WAITING_TIMEOUT = value
elif parameter == 'SOCKET_SIZE':
global _SOCKET_SIZE
_SOCKET_SIZE = value
elif parameter == 'CHECK_AVAILABILITY_TIMEOUT':
global _CHECK_AVAILABILITY_TIMEOUT
_CHECK_AVAILABILITY_TIMEOUT = value
elif parameter == 'RESTARTABLE_SLEEPTIME':
global _RESTARTABLE_SLEEPTIME
_RESTARTABLE_SLEEPTIME = value
elif parameter == 'RESTARTABLE_TRIES':
global _RESTARTABLE_TRIES
_RESTARTABLE_TRIES = value
elif parameter == 'REUSABLE_THREADED_POOL_SIZE':
global _REUSABLE_THREADED_POOL_SIZE
_REUSABLE_THREADED_POOL_SIZE = value
elif parameter == 'REUSABLE_THREADED_LIFETIME':
global _REUSABLE_THREADED_LIFETIME
_REUSABLE_THREADED_LIFETIME = value
elif parameter == 'DEFAULT_THREADED_POOL_NAME':
global _DEFAULT_THREADED_POOL_NAME
_DEFAULT_THREADED_POOL_NAME = value
elif parameter == 'ADDRESS_INFO_REFRESH_TIME':
global _ADDRESS_INFO_REFRESH_TIME
_ADDRESS_INFO_REFRESH_TIME = value
elif parameter == 'RESET_AVAILABILITY_TIMEOUT':
global _RESET_AVAILABILITY_TIMEOUT
_RESET_AVAILABILITY_TIMEOUT = value
elif parameter in ['DEFAULT_CLIENT_ENCODING', 'DEFAULT_ENCODING']:
global _DEFAULT_CLIENT_ENCODING
_DEFAULT_CLIENT_ENCODING = value
elif parameter == 'DEFAULT_SERVER_ENCODING':
global _DEFAULT_SERVER_ENCODING
_DEFAULT_SERVER_ENCODING = value
elif parameter == 'CLASSES_EXCLUDED_FROM_CHECK':
global _CLASSES_EXCLUDED_FROM_CHECK
_CLASSES_EXCLUDED_FROM_CHECK = value
elif parameter == 'ATTRIBUTES_EXCLUDED_FROM_CHECK':
global _ATTRIBUTES_EXCLUDED_FROM_CHECK
_ATTRIBUTES_EXCLUDED_FROM_CHECK = value
elif parameter == 'UTF8_ENCODED_SYNTAXES':
global _UTF8_ENCODED_SYNTAXES
_UTF8_ENCODED_SYNTAXES = value
elif parameter == 'UTF8_ENCODED_TYPES':
global _UTF8_ENCODED_TYPES
_UTF8_ENCODED_TYPES = value
elif parameter in ['ADDITIONAL_SERVER_ENCODINGS', 'ADDITIONAL_ENCODINGS']:
global _ADDITIONAL_SERVER_ENCODINGS
_ADDITIONAL_SERVER_ENCODINGS = value if isinstance(value, SEQUENCE_TYPES) else [value]
elif parameter in ['ADDITIONAL_CLIENT_ENCODINGS']:
global _ADDITIONAL_CLIENT_ENCODINGS
_ADDITIONAL_CLIENT_ENCODINGS = value if isinstance(value, SEQUENCE_TYPES) else [value]
elif parameter == 'IGNORE_MALFORMED_SCHEMA':
global _IGNORE_MALFORMED_SCHEMA
_IGNORE_MALFORMED_SCHEMA = value
elif parameter == 'ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF':
global _ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF
_ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF = value
elif parameter == 'IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF':
global _IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF
_IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF = value
else:
raise LDAPConfigurationParameterError('unable to set configuration parameter %s' % parameter)

View File

@ -0,0 +1,278 @@
"""
"""
# Created on 2014.04.26
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from base64 import b64encode, b64decode
import datetime
import re
from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter
from ..utils.ciDict import CaseInsensitiveDict
from ..core.exceptions import LDAPDefinitionError
def to_unicode(obj, encoding=None, from_server=False):
"""Try to convert bytes (and str in python2) to unicode.
Return object unmodified if python3 string, else raise an exception
"""
conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING')
conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING')
conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS')
conf_additional_client_encodings = get_config_parameter('ADDITIONAL_CLIENT_ENCODINGS')
if isinstance(obj, NUMERIC_TYPES):
obj = str(obj)
if isinstance(obj, (bytes, bytearray)):
if from_server: # data from server
if encoding is None:
encoding = conf_default_server_encoding
try:
return obj.decode(encoding)
except UnicodeDecodeError:
for encoding in conf_additional_server_encodings: # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510)
try:
return obj.decode(encoding)
except UnicodeDecodeError:
pass
raise UnicodeError("Unable to convert server data to unicode: %r" % obj)
else: # data from client
if encoding is None:
encoding = conf_default_client_encoding
try:
return obj.decode(encoding)
except UnicodeDecodeError:
for encoding in conf_additional_client_encodings: # tries additional encodings
try:
return obj.decode(encoding)
except UnicodeDecodeError:
pass
raise UnicodeError("Unable to convert client data to unicode: %r" % obj)
if isinstance(obj, STRING_TYPES): # python3 strings, python 2 unicode
return obj
raise UnicodeError("Unable to convert type %s to unicode: %r" % (type(obj).__class__.__name__, obj))
def to_raw(obj, encoding='utf-8'):
"""Tries to convert to raw bytes from unicode"""
if isinstance(obj, NUMERIC_TYPES):
obj = str(obj)
if not (isinstance(obj, bytes)):
if isinstance(obj, SEQUENCE_TYPES):
return [to_raw(element) for element in obj]
elif isinstance(obj, STRING_TYPES):
return obj.encode(encoding)
return obj
def escape_filter_chars(text, encoding=None):
""" Escape chars mentioned in RFC4515. """
if encoding is None:
encoding = get_config_parameter('DEFAULT_ENCODING')
try:
text = to_unicode(text, encoding)
escaped = text.replace('\\', '\\5c')
escaped = escaped.replace('*', '\\2a')
escaped = escaped.replace('(', '\\28')
escaped = escaped.replace(')', '\\29')
escaped = escaped.replace('\x00', '\\00')
except Exception: # probably raw bytes values, return escaped bytes value
escaped = to_unicode(escape_bytes(text))
# escape all octets greater than 0x7F that are not part of a valid UTF-8
# escaped = ''.join(c if c <= ord(b'\x7f') else escape_bytes(to_raw(to_unicode(c, encoding))) for c in escaped)
return escaped
def unescape_filter_chars(text, encoding=None):
""" unescape chars mentioned in RFC4515. """
if encoding is None:
encoding = get_config_parameter('DEFAULT_ENCODING')
unescaped = to_raw(text, encoding)
unescaped = unescaped.replace(b'\\5c', b'\\')
unescaped = unescaped.replace(b'\\5C', b'\\')
unescaped = unescaped.replace(b'\\2a', b'*')
unescaped = unescaped.replace(b'\\2A', b'*')
unescaped = unescaped.replace(b'\\28', b'(')
unescaped = unescaped.replace(b'\\29', b')')
unescaped = unescaped.replace(b'\\00', b'\x00')
return unescaped
def escape_bytes(bytes_value):
""" Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string"""
if bytes_value:
if str is not bytes: # Python 3
if isinstance(bytes_value, str):
bytes_value = bytearray(bytes_value, encoding='utf-8')
escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value])
else: # Python 2
if isinstance(bytes_value, unicode):
bytes_value = bytes_value.encode('utf-8')
escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value])
else:
escaped = ''
return ('\\' + escaped) if escaped else ''
def prepare_for_stream(value):
if str is not bytes: # Python 3
return value
else: # Python 2
return value.decode()
def json_encode_b64(obj):
try:
return dict(encoding='base64', encoded=b64encode(obj))
except Exception as e:
raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e))
# noinspection PyProtectedMember
def check_json_dict(json_dict):
# needed for python 2
for k, v in json_dict.items():
if isinstance(v, dict):
check_json_dict(v)
elif isinstance(v, CaseInsensitiveDict):
check_json_dict(v._store)
elif isinstance(v, SEQUENCE_TYPES):
for i, e in enumerate(v):
if isinstance(e, dict):
check_json_dict(e)
elif isinstance(e, CaseInsensitiveDict):
check_json_dict(e._store)
else:
v[i] = format_json(e)
else:
json_dict[k] = format_json(v)
def json_hook(obj):
if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys():
return b64decode(obj['encoded'])
return obj
# noinspection PyProtectedMember
def format_json(obj):
if isinstance(obj, CaseInsensitiveDict):
return obj._store
if isinstance(obj, datetime.datetime):
return str(obj)
if isinstance(obj, int):
return obj
if str is bytes: # Python 2
if isinstance(obj, long): # long exists only in python2
return obj
try:
if str is not bytes: # Python 3
if isinstance(obj, bytes):
# return check_escape(str(obj, 'utf-8', errors='strict'))
return str(obj, 'utf-8', errors='strict')
raise LDAPDefinitionError('unable to serialize ' + str(obj))
else: # Python 2
if isinstance(obj, unicode):
return obj
else:
# return unicode(check_escape(obj))
return unicode(obj)
except (TypeError, UnicodeDecodeError):
pass
try:
return json_encode_b64(bytes(obj))
except Exception:
pass
raise LDAPDefinitionError('unable to serialize ' + str(obj))
def is_filter_escaped(text):
if not type(text) == ((str is not bytes) and str or unicode): # requires str for Python 3 and unicode for Python 2
raise ValueError('unicode input expected')
return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text)
# def ldap_escape_to_bytes(text):
# bytesequence = bytearray()
# if text.startswith('\\'):
# byte_values = text.split('\\')
# for value in byte_values[1:]:
# if len(value) != 2 and not value.isdigit():
# raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
# bytesequence.append(int(value, 16))
# return bytes(bytesequence)
# raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
def ldap_escape_to_bytes(text):
bytesequence = bytearray()
i = 0
try:
if isinstance(text, STRING_TYPES):
while i < len(text):
if text[i] == '\\':
if len(text) > i + 2:
try:
bytesequence.append(int(text[i+1:i+3], 16))
i += 3
continue
except ValueError:
pass
bytesequence.append(92) # "\" ASCII code
else:
raw = to_raw(text[i])
for c in raw:
bytesequence.append(c)
i += 1
elif isinstance(text, (bytes, bytearray)):
while i < len(text):
if text[i] == 92: # "\" ASCII code
if len(text) > i + 2:
try:
bytesequence.append(int(text[i + 1:i + 3], 16))
i += 3
continue
except ValueError:
pass
bytesequence.append(92) # "\" ASCII code
else:
bytesequence.append(text[i])
i += 1
except Exception:
raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
return bytes(bytesequence)

View File

@ -0,0 +1,375 @@
"""
"""
# Created on 2014.09.08
#
# Author: Giovanni Cannata
#
# Copyright 2014 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from string import hexdigits, ascii_letters, digits
from .. import SEQUENCE_TYPES
from ..core.exceptions import LDAPInvalidDnError
STATE_ANY = 0
STATE_ESCAPE = 1
STATE_ESCAPE_HEX = 2
def _add_ava(ava, decompose, remove_space, space_around_equal):
if not ava:
return ''
space = ' ' if space_around_equal else ''
attr_name, _, value = ava.partition('=')
if decompose:
if remove_space:
component = (attr_name.strip(), value.strip())
else:
component = (attr_name, value)
else:
if remove_space:
component = attr_name.strip() + space + '=' + space + value.strip()
else:
component = attr_name + space + '=' + space + value
return component
def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False):
"""
Convert an iterator to a list of dn parts
if decompose=True return a list of tuple (one for each dn component) else return a list of strings
if remove_space=True removes unneeded spaces
if space_around_equal=True add spaces around equal in returned strings
if separate_rdn=True consider multiple RDNs as different component of DN
"""
dn = []
component = ''
escape_sequence = False
for c in iterator:
if c == '\\': # escape sequence
escape_sequence = True
elif escape_sequence and c != ' ':
escape_sequence = False
elif c == '+' and separate_rdn:
dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
component = ''
continue
elif c == ',':
if '=' in component:
dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
component = ''
continue
component += c
dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
return dn
def _find_first_unescaped(dn, char, pos):
while True:
pos = dn.find(char, pos)
if pos == -1:
break # no char found
if pos > 0 and dn[pos - 1] != '\\': # unescaped char
break
pos += 1
return pos
def _find_last_unescaped(dn, char, start, stop=0):
while True:
stop = dn.rfind(char, start, stop)
if stop == -1:
break
if stop >= 0 and dn[stop - 1] != '\\':
break
if stop < start:
stop = -1
break
return stop
def _get_next_ava(dn):
comma = _find_first_unescaped(dn, ',', 0)
plus = _find_first_unescaped(dn, '+', 0)
if plus > 0 and (plus < comma or comma == -1):
equal = _find_first_unescaped(dn, '=', plus + 1)
if equal > plus + 1:
plus = _find_last_unescaped(dn, '+', plus, equal)
return dn[:plus], '+'
if comma > 0:
equal = _find_first_unescaped(dn, '=', comma + 1)
if equal > comma + 1:
comma = _find_last_unescaped(dn, ',', comma, equal)
return dn[:comma], ','
return dn, ''
def _split_ava(ava, escape=False, strip=True):
equal = ava.find('=')
while equal > 0: # not first character
if ava[equal - 1] != '\\': # not an escaped equal so it must be an ava separator
# attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal]
if strip:
attribute_type = ava[0:equal].strip()
attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip()
else:
attribute_type = ava[0:equal]
attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:]
return attribute_type, attribute_value
equal = ava.find('=', equal + 1)
return '', (ava.strip if strip else ava) # if no equal found return only value
def _validate_attribute_type(attribute_type):
if not attribute_type:
raise LDAPInvalidDnError('attribute type not present')
if attribute_type == '<GUID': # patch for AD DirSync
return True
for c in attribute_type:
if not (c in ascii_letters or c in digits or c == '-'): # allowed uppercase and lowercase letters, digits and hyphen as per RFC 4512
raise LDAPInvalidDnError('character \'' + c + '\' not allowed in attribute type')
if attribute_type[0] in digits or attribute_type[0] == '-': # digits and hyphen not allowed as first character
raise LDAPInvalidDnError('character \'' + attribute_type[0] + '\' not allowed as first character of attribute type')
return True
def _validate_attribute_value(attribute_value):
if not attribute_value:
return False
if attribute_value[0] == '#': # only hex characters are valid
for c in attribute_value:
if 'c' not in hexdigits: # allowed only hex digits as per RFC 4514
raise LDAPInvalidDnError('character ' + c + ' not allowed in hex representation of attribute value')
if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars)
raise LDAPInvalidDnError('hex representation must be in the form of <HEX><HEX> pairs')
if attribute_value[0] == ' ': # space cannot be used as first or last character
raise LDAPInvalidDnError('SPACE not allowed as first character of attribute value')
if attribute_value[-1] == ' ':
raise LDAPInvalidDnError('SPACE not allowed as last character of attribute value')
state = STATE_ANY
for c in attribute_value:
if state == STATE_ANY:
if c == '\\':
state = STATE_ESCAPE
elif c in '"#+,;<=>\00':
raise LDAPInvalidDnError('special characters ' + c + ' must be escaped')
elif state == STATE_ESCAPE:
if c in hexdigits:
state = STATE_ESCAPE_HEX
elif c in ' "#+,;<=>\\\00':
state = STATE_ANY
else:
raise LDAPInvalidDnError('invalid escaped character ' + c)
elif state == STATE_ESCAPE_HEX:
if c in hexdigits:
state = STATE_ANY
else:
raise LDAPInvalidDnError('invalid escaped character ' + c)
# final state
if state != STATE_ANY:
raise LDAPInvalidDnError('invalid final character')
return True
def _escape_attribute_value(attribute_value):
if not attribute_value:
return ''
if attribute_value[0] == '#': # with leading SHARP only pairs of hex characters are valid
valid_hex = True
if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars)
valid_hex = False
if valid_hex:
for c in attribute_value:
if c not in hexdigits: # allowed only hex digits as per RFC 4514
valid_hex = False
break
if valid_hex:
return attribute_value
state = STATE_ANY
escaped = ''
tmp_buffer = ''
for c in attribute_value:
if state == STATE_ANY:
if c == '\\':
state = STATE_ESCAPE
elif c in '"#+,;<=>\00':
escaped += '\\' + c
else:
escaped += c
elif state == STATE_ESCAPE:
if c in hexdigits:
tmp_buffer = c
state = STATE_ESCAPE_HEX
elif c in ' "#+,;<=>\\\00':
escaped += '\\' + c
state = STATE_ANY
else:
escaped += '\\\\' + c
elif state == STATE_ESCAPE_HEX:
if c in hexdigits:
escaped += '\\' + tmp_buffer + c
else:
escaped += '\\\\' + tmp_buffer + c
tmp_buffer = ''
state = STATE_ANY
# final state
if state == STATE_ESCAPE:
escaped += '\\\\'
elif state == STATE_ESCAPE_HEX:
escaped += '\\\\' + tmp_buffer
if escaped[0] == ' ': # leading SPACE must be escaped
escaped = '\\' + escaped
if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\': # trailing SPACE must be escaped
escaped = escaped[:-1] + '\\ '
return escaped
def parse_dn(dn, escape=False, strip=True):
rdns = []
avas = []
while dn:
ava, separator = _get_next_ava(dn) # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas
dn = dn[len(ava) + 1:]
if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0:
avas.append((ava, separator))
else:
avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator)
for ava, separator in avas:
attribute_type, attribute_value = _split_ava(ava, escape, strip)
if not _validate_attribute_type(attribute_type):
raise LDAPInvalidDnError('unable to validate attribute type in ' + ava)
if not _validate_attribute_value(attribute_value):
raise LDAPInvalidDnError('unable to validate attribute value in ' + ava)
rdns.append((attribute_type, attribute_value, separator))
dn = dn[len(ava) + 1:]
if not rdns:
raise LDAPInvalidDnError('empty dn')
return rdns
def safe_dn(dn, decompose=False, reverse=False):
"""
normalize and escape a dn, if dn is a sequence it is joined.
the reverse parameter changes the join direction of the sequence
"""
if isinstance(dn, SEQUENCE_TYPES):
components = [rdn for rdn in dn]
if reverse:
dn = ','.join(reversed(components))
else:
dn = ','.join(components)
if decompose:
escaped_dn = []
else:
escaped_dn = ''
if dn.startswith('<GUID=') and dn.endswith('>'): # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '<GUID=7b95f0d5-a3ed-486c-919c-077b8c9731f2>')
escaped_dn = dn
elif '@' not in dn and '\\' not in dn: # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username
for component in parse_dn(dn, escape=True):
if decompose:
escaped_dn.append((component[0], component[1], component[2]))
else:
escaped_dn += component[0] + '=' + component[1] + component[2]
elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2:
raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain')
elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2:
raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain')
else:
escaped_dn = dn
return escaped_dn
def safe_rdn(dn, decompose=False):
"""Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used"""
escaped_rdn = []
one_more = True
for component in parse_dn(dn, escape=True):
if component[2] == '+' or one_more:
if decompose:
escaped_rdn.append((component[0], component[1]))
else:
escaped_rdn.append(component[0] + '=' + component[1])
if component[2] == '+':
one_more = True
else:
one_more = False
break
if one_more:
raise LDAPInvalidDnError('bad dn ' + str(dn))
return escaped_rdn
def escape_rdn(rdn):
"""
Escape rdn characters to prevent injection according to RFC 4514.
"""
# '/' must be handled first or the escape slashes will be escaped!
for char in ['\\', ',', '+', '"', '<', '>', ';', '=', '\x00']:
rdn = rdn.replace(char, '\\' + char)
if rdn[0] == '#' or rdn[0] == ' ':
rdn = ''.join(('\\', rdn))
if rdn[-1] == ' ':
rdn = ''.join((rdn[:-1], '\\ '))
return rdn

View File

@ -0,0 +1,94 @@
"""
"""
# Created on 2015.07.16
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from .. import HASHED_NONE, HASHED_MD5, HASHED_SALTED_MD5, HASHED_SALTED_SHA, HASHED_SALTED_SHA256, \
HASHED_SALTED_SHA384, HASHED_SALTED_SHA512, HASHED_SHA, HASHED_SHA256, HASHED_SHA384, HASHED_SHA512
import hashlib
from os import urandom
from base64 import b64encode
from ..core.exceptions import LDAPInvalidHashAlgorithmError
# each tuple: (the string to include between braces in the digest, the name of the algorithm to invoke with the new() function)
algorithms_table = {
HASHED_MD5: ('md5', 'MD5'),
HASHED_SHA: ('sha', 'SHA1'),
HASHED_SHA256: ('sha256', 'SHA256'),
HASHED_SHA384: ('sha384', 'SHA384'),
HASHED_SHA512: ('sha512', 'SHA512')
}
salted_table = {
HASHED_SALTED_MD5: ('smd5', HASHED_MD5),
HASHED_SALTED_SHA: ('ssha', HASHED_SHA),
HASHED_SALTED_SHA256: ('ssha256', HASHED_SHA256),
HASHED_SALTED_SHA384: ('ssha384', HASHED_SHA384),
HASHED_SALTED_SHA512: ('ssha512', HASHED_SHA512)
}
def hashed(algorithm, value, salt=None, raw=False, encoding='utf-8'):
if str is not bytes and not isinstance(value, bytes): # Python 3
value = value.encode(encoding)
if algorithm is None or algorithm == HASHED_NONE:
return value
# algorithm name can be already coded in the ldap3 constants or can be any value passed in the 'algorithm' parameter
if algorithm in algorithms_table:
try:
digest = hashlib.new(algorithms_table[algorithm][1], value).digest()
except ValueError:
raise LDAPInvalidHashAlgorithmError('Hash algorithm ' + str(algorithm) + ' not available')
if raw:
return digest
return ('{%s}' % algorithms_table[algorithm][0]) + b64encode(digest).decode('ascii')
elif algorithm in salted_table:
if not salt:
salt = urandom(8)
digest = hashed(salted_table[algorithm][1], value + salt, raw=True) + salt
if raw:
return digest
return ('{%s}' % salted_table[algorithm][0]) + b64encode(digest).decode('ascii')
else:
# if an unknown (to the library) algorithm is requested passes the name as the string in braces and as the algorithm name
# if salt is present uses it to salt the digest
try:
if not salt:
digest = hashlib.new(algorithm, value).digest()
else:
digest = hashlib.new(algorithm, value + salt).digest() + salt
except ValueError:
raise LDAPInvalidHashAlgorithmError('Hash algorithm ' + str(algorithm) + ' not available')
if raw:
return digest
return ('{%s}' % algorithm) + b64encode(digest).decode('ascii')

View File

@ -0,0 +1,211 @@
"""
"""
# Created on 2015.05.01
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from logging import getLogger, DEBUG
from copy import deepcopy
from pprint import pformat
from ..protocol.rfc4511 import LDAPMessage
# logging levels
OFF = 0
ERROR = 10
BASIC = 20
PROTOCOL = 30
NETWORK = 40
EXTENDED = 50
_sensitive_lines = ('simple', 'credentials', 'serversaslcreds') # must be a tuple, not a list, lowercase
_sensitive_args = ('simple', 'password', 'sasl_credentials', 'saslcreds', 'server_creds')
_sensitive_attrs = ('userpassword', 'unicodepwd')
_hide_sensitive_data = None
DETAIL_LEVELS = [OFF, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED]
_max_line_length = 4096
_logging_level = None
_detail_level = None
_logging_encoding = 'ascii'
try:
from logging import NullHandler
except ImportError: # NullHandler not present in Python < 2.7
from logging import Handler
class NullHandler(Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _strip_sensitive_data_from_dict(d):
if not isinstance(d, dict):
return d
try:
d = deepcopy(d)
except Exception: # if deepcopy goes wrong gives up and returns the dict unchanged
return d
for k in d.keys():
if isinstance(d[k], dict):
d[k] = _strip_sensitive_data_from_dict(d[k])
elif k.lower() in _sensitive_args and d[k]:
d[k] = '<stripped %d characters of sensitive data>' % len(d[k])
return d
def get_detail_level_name(level_name):
if level_name == OFF:
return 'OFF'
elif level_name == ERROR:
return 'ERROR'
elif level_name == BASIC:
return 'BASIC'
elif level_name == PROTOCOL:
return 'PROTOCOL'
elif level_name == NETWORK:
return 'NETWORK'
elif level_name == EXTENDED:
return 'EXTENDED'
raise ValueError('unknown detail level')
def log(detail, message, *args):
if detail <= _detail_level:
if _hide_sensitive_data:
args = tuple([_strip_sensitive_data_from_dict(arg) if isinstance(arg, dict) else arg for arg in args])
encoded_message = (get_detail_level_name(detail) + ':' + message % args).encode(_logging_encoding, 'backslashreplace')
if str is not bytes: # Python 3
encoded_message = encoded_message.decode()
if len(encoded_message) > _max_line_length:
logger.log(_logging_level, encoded_message[:_max_line_length] + ' <removed %d remaining bytes in this log line>' % (len(encoded_message) - _max_line_length, ))
else:
logger.log(_logging_level, encoded_message)
def log_enabled(detail):
if detail <= _detail_level:
if logger.isEnabledFor(_logging_level):
return True
return False
def set_library_log_hide_sensitive_data(hide=True):
global _hide_sensitive_data
if hide:
_hide_sensitive_data = True
else:
_hide_sensitive_data = False
if log_enabled(ERROR):
log(ERROR, 'hide sensitive data set to ' + str(_hide_sensitive_data))
def get_library_log_hide_sensitive_data():
return True if _hide_sensitive_data else False
def set_library_log_activation_level(logging_level):
if isinstance(logging_level, int):
global _logging_level
_logging_level = logging_level
else:
if log_enabled(ERROR):
log(ERROR, 'invalid library log activation level <%s> ', logging_level)
raise ValueError('invalid library log activation level')
def get_library_log_activation_lavel():
return _logging_level
def set_library_log_max_line_length(length):
if isinstance(length, int):
global _max_line_length
_max_line_length = length
else:
if log_enabled(ERROR):
log(ERROR, 'invalid log max line length <%s> ', length)
raise ValueError('invalid library log max line length')
def get_library_log_max_line_length():
return _max_line_length
def set_library_log_detail_level(detail):
if detail in DETAIL_LEVELS:
global _detail_level
_detail_level = detail
if log_enabled(ERROR):
log(ERROR, 'detail level set to ' + get_detail_level_name(_detail_level))
else:
if log_enabled(ERROR):
log(ERROR, 'unable to set log detail level to <%s>', detail)
raise ValueError('invalid library log detail level')
def get_library_log_detail_level():
return _detail_level
def format_ldap_message(message, prefix):
if isinstance(message, LDAPMessage):
try: # pyasn1 prettyprint raises exception in version 0.4.3
formatted = message.prettyPrint().split('\n') # pyasn1 pretty print
except Exception as e:
formatted = ['pyasn1 exception', str(e)]
else:
formatted = pformat(message).split('\n')
prefixed = ''
for line in formatted:
if line:
if _hide_sensitive_data and line.strip().lower().startswith(_sensitive_lines): # _sensitive_lines is a tuple. startswith() method checks each tuple element
tag, _, data = line.partition('=')
if data.startswith("b'") and data.endswith("'") or data.startswith('b"') and data.endswith('"'):
prefixed += '\n' + prefix + tag + '=<stripped %d characters of sensitive data>' % (len(data) - 3, )
else:
prefixed += '\n' + prefix + tag + '=<stripped %d characters of sensitive data>' % len(data)
else:
prefixed += '\n' + prefix + line
return prefixed
# sets a logger for the library with NullHandler. It can be used by the application with its own logging configuration
logger = getLogger('ldap3')
logger.addHandler(NullHandler())
# sets defaults for the library logging
set_library_log_activation_level(DEBUG)
set_library_log_detail_level(OFF)
set_library_log_hide_sensitive_data(True)

View File

@ -0,0 +1,497 @@
"""
"""
# Created on 2015.04.02
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
# NTLMv2 authentication as per [MS-NLMP] (https://msdn.microsoft.com/en-us/library/cc236621.aspx)
from struct import pack, unpack
from platform import system, version
from socket import gethostname
from time import time
import hmac
import hashlib
import binascii
from os import urandom
try:
from locale import getpreferredencoding
oem_encoding = getpreferredencoding()
except Exception:
oem_encoding = 'utf-8'
from ..protocol.formatters.formatters import format_ad_timestamp
NTLM_SIGNATURE = b'NTLMSSP\x00'
NTLM_MESSAGE_TYPE_NTLM_NEGOTIATE = 1
NTLM_MESSAGE_TYPE_NTLM_CHALLENGE = 2
NTLM_MESSAGE_TYPE_NTLM_AUTHENTICATE = 3
FLAG_NEGOTIATE_56 = 31 # W
FLAG_NEGOTIATE_KEY_EXCH = 30 # V
FLAG_NEGOTIATE_128 = 29 # U
FLAG_NEGOTIATE_VERSION = 25 # T
FLAG_NEGOTIATE_TARGET_INFO = 23 # S
FLAG_REQUEST_NOT_NT_SESSION_KEY = 22 # R
FLAG_NEGOTIATE_IDENTIFY = 20 # Q
FLAG_NEGOTIATE_EXTENDED_SESSIONSECURITY = 19 # P
FLAG_TARGET_TYPE_SERVER = 17 # O
FLAG_TARGET_TYPE_DOMAIN = 16 # N
FLAG_NEGOTIATE_ALWAYS_SIGN = 15 # M
FLAG_NEGOTIATE_OEM_WORKSTATION_SUPPLIED = 13 # L
FLAG_NEGOTIATE_OEM_DOMAIN_SUPPLIED = 12 # K
FLAG_NEGOTIATE_ANONYMOUS = 11 # J
FLAG_NEGOTIATE_NTLM = 9 # H
FLAG_NEGOTIATE_LM_KEY = 7 # G
FLAG_NEGOTIATE_DATAGRAM = 6 # F
FLAG_NEGOTIATE_SEAL = 5 # E
FLAG_NEGOTIATE_SIGN = 4 # D
FLAG_REQUEST_TARGET = 2 # C
FLAG_NEGOTIATE_OEM = 1 # B
FLAG_NEGOTIATE_UNICODE = 0 # A
FLAG_TYPES = [FLAG_NEGOTIATE_56,
FLAG_NEGOTIATE_KEY_EXCH,
FLAG_NEGOTIATE_128,
FLAG_NEGOTIATE_VERSION,
FLAG_NEGOTIATE_TARGET_INFO,
FLAG_REQUEST_NOT_NT_SESSION_KEY,
FLAG_NEGOTIATE_IDENTIFY,
FLAG_NEGOTIATE_EXTENDED_SESSIONSECURITY,
FLAG_TARGET_TYPE_SERVER,
FLAG_TARGET_TYPE_DOMAIN,
FLAG_NEGOTIATE_ALWAYS_SIGN,
FLAG_NEGOTIATE_OEM_WORKSTATION_SUPPLIED,
FLAG_NEGOTIATE_OEM_DOMAIN_SUPPLIED,
FLAG_NEGOTIATE_ANONYMOUS,
FLAG_NEGOTIATE_NTLM,
FLAG_NEGOTIATE_LM_KEY,
FLAG_NEGOTIATE_DATAGRAM,
FLAG_NEGOTIATE_SEAL,
FLAG_NEGOTIATE_SIGN,
FLAG_REQUEST_TARGET,
FLAG_NEGOTIATE_OEM,
FLAG_NEGOTIATE_UNICODE]
AV_END_OF_LIST = 0
AV_NETBIOS_COMPUTER_NAME = 1
AV_NETBIOS_DOMAIN_NAME = 2
AV_DNS_COMPUTER_NAME = 3
AV_DNS_DOMAIN_NAME = 4
AV_DNS_TREE_NAME = 5
AV_FLAGS = 6
AV_TIMESTAMP = 7
AV_SINGLE_HOST_DATA = 8
AV_TARGET_NAME = 9
AV_CHANNEL_BINDINGS = 10
AV_TYPES = [AV_END_OF_LIST,
AV_NETBIOS_COMPUTER_NAME,
AV_NETBIOS_DOMAIN_NAME,
AV_DNS_COMPUTER_NAME,
AV_DNS_DOMAIN_NAME,
AV_DNS_TREE_NAME,
AV_FLAGS,
AV_TIMESTAMP,
AV_SINGLE_HOST_DATA,
AV_TARGET_NAME,
AV_CHANNEL_BINDINGS]
AV_FLAG_CONSTRAINED = 0
AV_FLAG_INTEGRITY = 1
AV_FLAG_TARGET_SPN_UNTRUSTED = 2
AV_FLAG_TYPES = [AV_FLAG_CONSTRAINED,
AV_FLAG_INTEGRITY,
AV_FLAG_TARGET_SPN_UNTRUSTED]
def pack_windows_version(debug=False):
if debug:
if system().lower() == 'windows':
try:
major_release, minor_release, build = version().split('.')
major_release = int(major_release)
minor_release = int(minor_release)
build = int(build)
except Exception:
major_release = 5
minor_release = 1
build = 2600
else:
major_release = 5
minor_release = 1
build = 2600
else:
major_release = 0
minor_release = 0
build = 0
return pack('<B', major_release) + \
pack('<B', minor_release) + \
pack('<H', build) + \
pack('<B', 0) + \
pack('<B', 0) + \
pack('<B', 0) + \
pack('<B', 15)
def unpack_windows_version(version_message):
if len(version_message) != 8:
raise ValueError('version field must be 8 bytes long')
if str is bytes: # Python 2
return (unpack('<B', version_message[0])[0],
unpack('<B', version_message[1])[0],
unpack('<H', version_message[2:4])[0],
unpack('<B', version_message[7])[0])
else: # Python 3
return (int(version_message[0]),
int(version_message[1]),
int(unpack('<H', version_message[2:4])[0]),
int(version_message[7]))
class NtlmClient(object):
def __init__(self, domain, user_name, password):
self.client_config_flags = 0
self.exported_session_key = None
self.negotiated_flags = None
self.user_name = user_name
self.user_domain = domain
self.no_lm_response_ntlm_v1 = None
self.client_blocked = False
self.client_block_exceptions = []
self.client_require_128_bit_encryption = None
self.max_life_time = None
self.client_signing_key = None
self.client_sealing_key = None
self.sequence_number = None
self.server_sealing_key = None
self.server_signing_key = None
self.integrity = False
self.replay_detect = False
self.sequence_detect = False
self.confidentiality = False
self.datagram = False
self.identity = False
self.client_supplied_target_name = None
self.client_channel_binding_unhashed = None
self.unverified_target_name = None
self._password = password
self.server_challenge = None
self.server_target_name = None
self.server_target_info = None
self.server_version = None
self.server_av_netbios_computer_name = None
self.server_av_netbios_domain_name = None
self.server_av_dns_computer_name = None
self.server_av_dns_domain_name = None
self.server_av_dns_forest_name = None
self.server_av_target_name = None
self.server_av_flags = None
self.server_av_timestamp = None
self.server_av_single_host_data = None
self.server_av_channel_bindings = None
self.server_av_flag_constrained = None
self.server_av_flag_integrity = None
self.server_av_flag_target_spn_untrusted = None
self.current_encoding = None
self.client_challenge = None
self.server_target_info_raw = None
def get_client_flag(self, flag):
if not self.client_config_flags:
return False
if flag in FLAG_TYPES:
return True if self.client_config_flags & (1 << flag) else False
raise ValueError('invalid flag')
def get_negotiated_flag(self, flag):
if not self.negotiated_flags:
return False
if flag not in FLAG_TYPES:
raise ValueError('invalid flag')
return True if self.negotiated_flags & (1 << flag) else False
def get_server_av_flag(self, flag):
if not self.server_av_flags:
return False
if flag not in AV_FLAG_TYPES:
raise ValueError('invalid AV flag')
return True if self.server_av_flags & (1 << flag) else False
def set_client_flag(self, flags):
if type(flags) == int:
flags = [flags]
for flag in flags:
if flag in FLAG_TYPES:
self.client_config_flags |= (1 << flag)
else:
raise ValueError('invalid flag')
def reset_client_flags(self):
self.client_config_flags = 0
def unset_client_flag(self, flags):
if type(flags) == int:
flags = [flags]
for flag in flags:
if flag in FLAG_TYPES:
self.client_config_flags &= ~(1 << flag)
else:
raise ValueError('invalid flag')
def create_negotiate_message(self):
"""
Microsoft MS-NLMP 2.2.1.1
"""
self.reset_client_flags()
self.set_client_flag([FLAG_REQUEST_TARGET,
FLAG_NEGOTIATE_56,
FLAG_NEGOTIATE_128,
FLAG_NEGOTIATE_NTLM,
FLAG_NEGOTIATE_ALWAYS_SIGN,
FLAG_NEGOTIATE_OEM,
FLAG_NEGOTIATE_UNICODE,
FLAG_NEGOTIATE_EXTENDED_SESSIONSECURITY])
message = NTLM_SIGNATURE # 8 bytes
message += pack('<I', NTLM_MESSAGE_TYPE_NTLM_NEGOTIATE) # 4 bytes
message += pack('<I', self.client_config_flags) # 4 bytes
message += self.pack_field('', 40) # domain name field # 8 bytes
if self.get_client_flag(FLAG_NEGOTIATE_VERSION): # version 8 bytes - used for debug in ntlm
message += pack_windows_version(True)
else:
message += pack_windows_version(False)
return message
def parse_challenge_message(self, message):
"""
Microsoft MS-NLMP 2.2.1.2
"""
if len(message) < 56: # minimum size of challenge message
return False
if message[0:8] != NTLM_SIGNATURE: # NTLM signature - 8 bytes
return False
if int(unpack('<I', message[8:12])[0]) != NTLM_MESSAGE_TYPE_NTLM_CHALLENGE: # type of message - 4 bytes
return False
target_name_len, _, target_name_offset = self.unpack_field(message[12:20]) # targetNameFields - 8 bytes
self.negotiated_flags = unpack('<I', message[20:24])[0] # negotiated flags - 4 bytes
self.current_encoding = 'utf-16-le' if self.get_negotiated_flag(
FLAG_NEGOTIATE_UNICODE) else oem_encoding # set encoding
self.server_challenge = message[24:32] # server challenge - 8 bytes
target_info_len, _, target_info_offset = self.unpack_field(message[40:48]) # targetInfoFields - 8 bytes
self.server_version = unpack_windows_version(message[48:56])
if self.get_negotiated_flag(FLAG_REQUEST_TARGET) and target_name_len:
self.server_target_name = message[target_name_offset: target_name_offset + target_name_len].decode(
self.current_encoding)
if self.get_negotiated_flag(FLAG_NEGOTIATE_TARGET_INFO) and target_info_len:
self.server_target_info_raw = message[target_info_offset: target_info_offset + target_info_len]
self.server_target_info = self.unpack_av_info(self.server_target_info_raw)
for attribute, value in self.server_target_info:
if attribute == AV_NETBIOS_COMPUTER_NAME:
self.server_av_netbios_computer_name = value.decode('utf-16-le') # always unicode
elif attribute == AV_NETBIOS_DOMAIN_NAME:
self.server_av_netbios_domain_name = value.decode('utf-16-le') # always unicode
elif attribute == AV_DNS_COMPUTER_NAME:
self.server_av_dns_computer_name = value.decode('utf-16-le') # always unicode
elif attribute == AV_DNS_DOMAIN_NAME:
self.server_av_dns_domain_name = value.decode('utf-16-le') # always unicode
elif attribute == AV_DNS_TREE_NAME:
self.server_av_dns_forest_name = value.decode('utf-16-le') # always unicode
elif attribute == AV_FLAGS:
if self.get_server_av_flag(AV_FLAG_CONSTRAINED):
self.server_av_flag_constrained = True
if self.get_server_av_flag(AV_FLAG_INTEGRITY):
self.server_av_flag_integrity = True
if self.get_server_av_flag(AV_FLAG_TARGET_SPN_UNTRUSTED):
self.server_av_flag_target_spn_untrusted = True
elif attribute == AV_TIMESTAMP:
self.server_av_timestamp = format_ad_timestamp(unpack('<Q', value)[0])
elif attribute == AV_SINGLE_HOST_DATA:
self.server_av_single_host_data = value
elif attribute == AV_TARGET_NAME:
self.server_av_target_name = value.decode('utf-16-le') # always unicode
elif attribute == AV_CHANNEL_BINDINGS:
self.server_av_channel_bindings = value
else:
raise ValueError('unknown AV type')
def create_authenticate_message(self):
"""
Microsoft MS-NLMP 2.2.1.3
"""
# 3.1.5.2
if not self.client_config_flags and not self.negotiated_flags:
return False
# 3.1.5.2
if self.get_client_flag(FLAG_NEGOTIATE_128) and not self.get_negotiated_flag(FLAG_NEGOTIATE_128):
return False
# 3.1.5.2
if (not self.server_av_netbios_computer_name or not self.server_av_netbios_domain_name) and self.server_av_flag_integrity:
return False
message = NTLM_SIGNATURE # 8 bytes
message += pack('<I', NTLM_MESSAGE_TYPE_NTLM_AUTHENTICATE) # 4 bytes
pos = 88 # payload starts at 88
# 3.1.5.2
if self.server_target_info:
lm_challenge_response = b''
else:
# computed LmChallengeResponse - todo
lm_challenge_response = b''
message += self.pack_field(lm_challenge_response, pos) # LmChallengeResponseField field # 8 bytes
pos += len(lm_challenge_response)
nt_challenge_response = self.compute_nt_response()
message += self.pack_field(nt_challenge_response, pos) # NtChallengeResponseField field # 8 bytes
pos += len(nt_challenge_response)
domain_name = self.user_domain.encode(self.current_encoding)
message += self.pack_field(domain_name, pos) # DomainNameField field # 8 bytes
pos += len(domain_name)
user_name = self.user_name.encode(self.current_encoding)
message += self.pack_field(user_name, pos) # UserNameField field # 8 bytes
pos += len(user_name)
if self.get_negotiated_flag(FLAG_NEGOTIATE_OEM_WORKSTATION_SUPPLIED) or self.get_negotiated_flag(
FLAG_NEGOTIATE_VERSION):
workstation = gethostname().encode(self.current_encoding)
else:
workstation = b''
message += self.pack_field(workstation, pos) # empty WorkstationField field # 8 bytes
pos += len(workstation)
encrypted_random_session_key = b''
message += self.pack_field(encrypted_random_session_key, pos) # EncryptedRandomSessionKeyField field # 8 bytes
pos += len(encrypted_random_session_key)
message += pack('<I', self.negotiated_flags) # negotiated flags - 4 bytes
if self.get_negotiated_flag(FLAG_NEGOTIATE_VERSION):
message += pack_windows_version(True) # windows version - 8 bytes
else:
message += pack_windows_version() # empty windows version - 8 bytes
message += pack('<Q', 0) # mic
message += pack('<Q', 0) # mic - total of 16 bytes
# payload starts at 88
message += lm_challenge_response
message += nt_challenge_response
message += domain_name
message += user_name
message += workstation
message += encrypted_random_session_key
return message
@staticmethod
def pack_field(value, offset):
return pack('<HHI', len(value), len(value), offset)
@staticmethod
def unpack_field(field_message):
if len(field_message) != 8:
raise ValueError('ntlm field must be 8 bytes long')
return unpack('<H', field_message[0:2])[0], \
unpack('<H', field_message[2:4])[0], \
unpack('<I', field_message[4:8])[0]
@staticmethod
def unpack_av_info(info):
if info:
avs = list()
done = False
pos = 0
while not done:
av_type = unpack('<H', info[pos: pos + 2])[0]
if av_type not in AV_TYPES:
raise ValueError('unknown AV type')
av_len = unpack('<H', info[pos + 2: pos + 4])[0]
av_value = info[pos + 4: pos + 4 + av_len]
pos += av_len + 4
if av_type == AV_END_OF_LIST:
done = True
else:
avs.append((av_type, av_value))
else:
return list()
return avs
@staticmethod
def pack_av_info(avs):
# avs is a list of tuples, each tuple is made of av_type and av_value
info = b''
for av_type, av_value in avs:
if av_type(0) == AV_END_OF_LIST:
continue
info += pack('<H', av_type)
info += pack('<H', len(av_value))
info += av_value
# add AV_END_OF_LIST
info += pack('<H', AV_END_OF_LIST)
info += pack('<H', 0)
return info
@staticmethod
def pack_windows_timestamp():
return pack('<Q', (int(time()) + 11644473600) * 10000000)
def compute_nt_response(self):
if not self.user_name and not self._password: # anonymous authentication
return b''
self.client_challenge = urandom(8)
temp = b''
temp += pack('<B', 1) # ResponseVersion - 1 byte
temp += pack('<B', 1) # HiResponseVersion - 1 byte
temp += pack('<H', 0) # Z(2)
temp += pack('<I', 0) # Z(4) - total Z(6)
temp += self.pack_windows_timestamp() # time - 8 bytes
temp += self.client_challenge # random client challenge - 8 bytes
temp += pack('<I', 0) # Z(4)
temp += self.server_target_info_raw
temp += pack('<I', 0) # Z(4)
response_key_nt = self.ntowf_v2()
nt_proof_str = hmac.new(response_key_nt, self.server_challenge + temp).digest()
nt_challenge_response = nt_proof_str + temp
return nt_challenge_response
def ntowf_v2(self):
passparts = self._password.split(':')
if len(passparts) == 2 and len(passparts[0]) == 32 and len(passparts[1]) == 32:
# The specified password is an LM:NTLM hash
password_digest = binascii.unhexlify(passparts[1])
else:
password_digest = hashlib.new('MD4', self._password.encode('utf-16-le')).digest()
return hmac.new(password_digest, (self.user_name.upper() + self.user_domain).encode('utf-16-le')).digest()

Some files were not shown because too many files have changed in this diff Show More