mirror of https://github.com/tp4a/teleport
build on CentOS 7 and test. Upgrade to libssh-0.8.5. Upgrade all python-module like PIL, etc.
parent
5824f55722
commit
6ffa716e4b
|
@ -51,6 +51,8 @@ __pycache__
|
|||
/external/libuv
|
||||
/client/tools/putty
|
||||
/client/tools/winscp
|
||||
/server/**/test/
|
||||
/server/**/tests/
|
||||
|
||||
# for dist folder
|
||||
**/_tmp_
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
cmake_minimum_required(VERSION 3.5)
|
||||
|
||||
MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
MESSAGE(STATUS "root source directory is ${PROJECT_SOURCE_DIR}")
|
||||
MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${PROJECT_SOURCE_DIR}/out/server/x64/bin")
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${PROJECT_SOURCE_DIR}/out/server/x64/bin")
|
||||
|
||||
set(CMAKE_CONFIGURATION_TYPES Debug Release)
|
||||
|
||||
# Determine the platform.
|
||||
if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
|
||||
MESSAGE(STATUS "build on macOS...")
|
||||
set(OS_MACOS 1)
|
||||
set(OS_POSIX 1)
|
||||
set(TP_EXTERNAL_RELEASE_DIR "${PROJECT_SOURCE_DIR}/external/macos/release")
|
||||
elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux")
|
||||
set(OS_LINUX 1)
|
||||
set(OS_POSIX 1)
|
||||
MESSAGE(STATUS "build on Linux...")
|
||||
# add_subdirectory(server/tp_web/src)
|
||||
set(TP_EXTERNAL_RELEASE_DIR "${PROJECT_SOURCE_DIR}/external/linux/release")
|
||||
elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Windows")
|
||||
# MESSAGE(FATAL_ERROR "unsupported platform: Windows")
|
||||
else ()
|
||||
MESSAGE(FATAL_ERROR "unsupported platform: ${CMAKE_SYSTEM_NAME}")
|
||||
endif ()
|
||||
|
|
@ -1,31 +1,37 @@
|
|||
cmake_minimum_required(VERSION 3.5)
|
||||
project(teleport)
|
||||
|
||||
MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
include(CMakeCfg.txt)
|
||||
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/out/server/x64/bin")
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/out/server/x64/bin")
|
||||
#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
#
|
||||
#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/out/server/x64/bin")
|
||||
#set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/out/server/x64/bin")
|
||||
#
|
||||
#set(CMAKE_CONFIGURATION_TYPES Debug Release)
|
||||
#
|
||||
## Determine the platform.
|
||||
#if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
|
||||
# MESSAGE(STATUS "build on macOS...")
|
||||
# set(OS_MACOS 1)
|
||||
# set(OS_POSIX 1)
|
||||
# set(TP_EXTERNAL_RELEASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/macos/release")
|
||||
#elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux")
|
||||
# set(OS_LINUX 1)
|
||||
# set(OS_POSIX 1)
|
||||
# MESSAGE(STATUS "build on Linux...")
|
||||
# add_subdirectory(server/tp_web/src)
|
||||
# set(TP_EXTERNAL_RELEASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/linux/release")
|
||||
#elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Windows")
|
||||
# # MESSAGE(FATAL_ERROR "unsupported platform: Windows")
|
||||
#else ()
|
||||
# MESSAGE(FATAL_ERROR "unsupported platform: ${CMAKE_SYSTEM_NAME}")
|
||||
#endif ()
|
||||
|
||||
set(CMAKE_CONFIGURATION_TYPES Debug Release)
|
||||
|
||||
# Determine the platform.
|
||||
if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
|
||||
MESSAGE(STATUS "build on macOS...")
|
||||
set(OS_MACOS 1)
|
||||
set(OS_POSIX 1)
|
||||
set(TP_EXTERNAL_RELEASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/macos/release")
|
||||
elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux")
|
||||
set(OS_LINUX 1)
|
||||
set(OS_POSIX 1)
|
||||
MESSAGE(STATUS "build on Linux...")
|
||||
if (OS_LINUX)
|
||||
add_subdirectory(server/tp_web/src)
|
||||
set(TP_EXTERNAL_RELEASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/linux/release")
|
||||
elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Windows")
|
||||
# MESSAGE(FATAL_ERROR "unsupported platform: Windows")
|
||||
else ()
|
||||
MESSAGE(FATAL_ERROR "unsupported platform: ${CMAKE_SYSTEM_NAME}")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
add_subdirectory(server/tp_core/core)
|
||||
add_subdirectory(server/tp_core/protocol/ssh)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<content url="file://$MODULE_DIR$/builder" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
|
|
|
@ -46,7 +46,7 @@ class PYSBase:
|
|||
utils.sys_exec('{} -m pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pip --upgrade'.format(env.py_exec))
|
||||
|
||||
pip = self._get_pip()
|
||||
pypi_modules = ['mako', 'pymysql', 'qrcode', 'tornado', 'wheezy.captcha', 'Pillow', 'psutil']
|
||||
pypi_modules = ['ldap3', 'mako', 'Pillow', 'psutil', 'pymysql', 'qrcode', 'tornado', 'wheezy.captcha']
|
||||
for p in pypi_modules:
|
||||
cc.n('install {} ...'.format(p))
|
||||
utils.sys_exec('{} install -i https://pypi.tuna.tsinghua.edu.cn/simple {}'.format(pip, p), direct_output=True)
|
||||
|
|
|
@ -91,7 +91,7 @@ class BuilderLinux(BuilderBase):
|
|||
|
||||
utils.makedirs(out_path)
|
||||
|
||||
utils.cmake(os.path.join(env.root_path, 'server', 'cmake-build'), ctx.target_path, False)
|
||||
utils.cmake(os.path.join(env.root_path, 'cmake-build'), ctx.target_path, False)
|
||||
# utils.strip(out_file)
|
||||
|
||||
for f in out_files:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
openssl = 1.0.2p,1000210f
|
||||
libuv = 1.23.0
|
||||
mbedtls = 2.12.0
|
||||
libssh = 0.8.2
|
||||
libssh = 0.8.5
|
||||
jsoncpp = 0.10.6
|
||||
mongoose = 6.12
|
||||
|
||||
|
|
|
@ -3,8 +3,9 @@ cmake_minimum_required(VERSION 3.5)
|
|||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS " tp_core")
|
||||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
include(../../../CMakeCfg.txt)
|
||||
|
||||
ADD_DEFINITIONS(
|
||||
-DMG_ENABLE_THREADS
|
||||
|
|
|
@ -75,7 +75,7 @@ static void ev_handler(struct mg_connection *nc, int ev, void *ev_data)
|
|||
class MongooseManager {
|
||||
public:
|
||||
MongooseManager() {
|
||||
mg_mgr_free(&m_mg_mgr);
|
||||
//mg_mgr_free(&m_mg_mgr);
|
||||
m_initialized = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,10 @@ cmake_minimum_required(VERSION 3.5)
|
|||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS " libtpssh")
|
||||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
include(../../../../CMakeCfg.txt)
|
||||
|
||||
|
||||
set(CMAKE_CXX_FLAGS "-fPIC")
|
||||
set(CMAKE_C_FLAGS "-fPIC")
|
||||
|
|
|
@ -3,8 +3,9 @@ cmake_minimum_required(VERSION 3.5)
|
|||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS " libtptelnet")
|
||||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
include(../../../../CMakeCfg.txt)
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC")
|
||||
|
||||
|
|
|
@ -3,8 +3,10 @@ cmake_minimum_required(VERSION 3.5)
|
|||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS " libtptelnet")
|
||||
MESSAGE(STATUS "=======================================================")
|
||||
MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
#MESSAGE(STATUS "operation system is ${CMAKE_SYSTEM}")
|
||||
#MESSAGE(STATUS "current source directory is ${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
|
||||
include(../../../CMakeCfg.txt)
|
||||
|
||||
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
|
||||
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -110,20 +110,6 @@ class BdfFontFile(FontFile.FontFile):
|
|||
if s.find(b"LogicalFontDescription") < 0:
|
||||
comments.append(s[i+1:-1].decode('ascii'))
|
||||
|
||||
# font = props["FONT"].split("-")
|
||||
|
||||
# font[4] = bdf_slant[font[4].upper()]
|
||||
# font[11] = bdf_spacing[font[11].upper()]
|
||||
|
||||
# ascent = int(props["FONT_ASCENT"])
|
||||
# descent = int(props["FONT_DESCENT"])
|
||||
|
||||
# fontname = ";".join(font[1:])
|
||||
|
||||
# print("#", fontname)
|
||||
# for i in comments:
|
||||
# print("#", i)
|
||||
|
||||
while True:
|
||||
c = bdf_char(fp)
|
||||
if not c:
|
||||
|
|
|
@ -270,7 +270,7 @@ class BlpImageFile(ImageFile.ImageFile):
|
|||
self._blp_alpha_encoding, = struct.unpack("<b", self.fp.read(1))
|
||||
self._blp_mips, = struct.unpack("<b", self.fp.read(1))
|
||||
|
||||
self.size = struct.unpack("<II", self.fp.read(8))
|
||||
self._size = struct.unpack("<II", self.fp.read(8))
|
||||
|
||||
if self.magic == b"BLP1":
|
||||
# Only present for BLP1
|
||||
|
|
|
@ -60,7 +60,14 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
format_description = "Windows Bitmap"
|
||||
format = "BMP"
|
||||
# --------------------------------------------------- BMP Compression values
|
||||
COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5}
|
||||
COMPRESSIONS = {
|
||||
'RAW': 0,
|
||||
'RLE8': 1,
|
||||
'RLE4': 2,
|
||||
'BITFIELDS': 3,
|
||||
'JPEG': 4,
|
||||
'PNG': 5
|
||||
}
|
||||
RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5
|
||||
|
||||
def _bitmap(self, header=0, offset=0):
|
||||
|
@ -69,10 +76,13 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
if header:
|
||||
seek(header)
|
||||
file_info = {}
|
||||
file_info['header_size'] = i32(read(4)) # read bmp header size @offset 14 (this is part of the header size)
|
||||
# read bmp header size @offset 14 (this is part of the header size)
|
||||
file_info['header_size'] = i32(read(4))
|
||||
file_info['direction'] = -1
|
||||
# --------------------- If requested, read header at a specific position
|
||||
header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4) # read the rest of the bmp header, without its size
|
||||
# read the rest of the bmp header, without its size
|
||||
header_data = ImageFile._safe_read(self.fp,
|
||||
file_info['header_size'] - 4)
|
||||
# --------------------------------------------------- IBM OS/2 Bitmap v1
|
||||
# ------ This format has different offsets because of width/height types
|
||||
if file_info['header_size'] == 12:
|
||||
|
@ -88,12 +98,16 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
file_info['y_flip'] = i8(header_data[7]) == 0xff
|
||||
file_info['direction'] = 1 if file_info['y_flip'] else -1
|
||||
file_info['width'] = i32(header_data[0:4])
|
||||
file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8])
|
||||
file_info['height'] = (i32(header_data[4:8])
|
||||
if not file_info['y_flip']
|
||||
else 2**32 - i32(header_data[4:8]))
|
||||
file_info['planes'] = i16(header_data[8:10])
|
||||
file_info['bits'] = i16(header_data[10:12])
|
||||
file_info['compression'] = i32(header_data[12:16])
|
||||
file_info['data_size'] = i32(header_data[16:20]) # byte size of pixel data
|
||||
file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28]))
|
||||
# byte size of pixel data
|
||||
file_info['data_size'] = i32(header_data[16:20])
|
||||
file_info['pixels_per_meter'] = (i32(header_data[20:24]),
|
||||
i32(header_data[24:28]))
|
||||
file_info['colors'] = i32(header_data[28:32])
|
||||
file_info['palette_padding'] = 4
|
||||
self.info["dpi"] = tuple(
|
||||
|
@ -101,24 +115,35 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
file_info['pixels_per_meter']))
|
||||
if file_info['compression'] == self.BITFIELDS:
|
||||
if len(header_data) >= 52:
|
||||
for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']):
|
||||
for idx, mask in enumerate(['r_mask',
|
||||
'g_mask',
|
||||
'b_mask',
|
||||
'a_mask']):
|
||||
file_info[mask] = i32(header_data[36+idx*4:40+idx*4])
|
||||
else:
|
||||
# 40 byte headers only have the three components in the bitfields masks,
|
||||
# 40 byte headers only have the three components in the
|
||||
# bitfields masks,
|
||||
# ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
|
||||
# See also https://github.com/python-pillow/Pillow/issues/1293
|
||||
# There is a 4th component in the RGBQuad, in the alpha location, but it
|
||||
# is listed as a reserved component, and it is not generally an alpha channel
|
||||
# There is a 4th component in the RGBQuad, in the alpha
|
||||
# location, but it is listed as a reserved component,
|
||||
# and it is not generally an alpha channel
|
||||
file_info['a_mask'] = 0x0
|
||||
for mask in ['r_mask', 'g_mask', 'b_mask']:
|
||||
file_info[mask] = i32(read(4))
|
||||
file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'])
|
||||
file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask'])
|
||||
file_info['rgb_mask'] = (file_info['r_mask'],
|
||||
file_info['g_mask'],
|
||||
file_info['b_mask'])
|
||||
file_info['rgba_mask'] = (file_info['r_mask'],
|
||||
file_info['g_mask'],
|
||||
file_info['b_mask'],
|
||||
file_info['a_mask'])
|
||||
else:
|
||||
raise IOError("Unsupported BMP header type (%d)" % file_info['header_size'])
|
||||
raise IOError("Unsupported BMP header type (%d)" %
|
||||
file_info['header_size'])
|
||||
# ------------------ Special case : header is reported 40, which
|
||||
# ---------------------- is shorter than real size for bpp >= 16
|
||||
self.size = file_info['width'], file_info['height']
|
||||
self._size = file_info['width'], file_info['height']
|
||||
# -------- If color count was not found in the header, compute from bits
|
||||
file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits'])
|
||||
# -------------------------------- Check abnormal values for DOS attacks
|
||||
|
@ -127,11 +152,15 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
# ----------------------- Check bit depth for unusual unsupported values
|
||||
self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None))
|
||||
if self.mode is None:
|
||||
raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits'])
|
||||
raise IOError("Unsupported BMP pixel depth (%d)"
|
||||
% file_info['bits'])
|
||||
# ----------------- Process BMP with Bitfields compression (not palette)
|
||||
if file_info['compression'] == self.BITFIELDS:
|
||||
SUPPORTED = {
|
||||
32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0), (0xff000000, 0xff0000, 0xff00, 0x0)],
|
||||
32: [(0xff0000, 0xff00, 0xff, 0x0),
|
||||
(0xff0000, 0xff00, 0xff, 0xff000000),
|
||||
(0x0, 0x0, 0x0, 0x0),
|
||||
(0xff000000, 0xff0000, 0xff00, 0x0)],
|
||||
24: [(0xff0000, 0xff00, 0xff)],
|
||||
16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)]
|
||||
}
|
||||
|
@ -145,11 +174,15 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
(16, (0x7c00, 0x3e0, 0x1f)): "BGR;15"
|
||||
}
|
||||
if file_info['bits'] in SUPPORTED:
|
||||
if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]:
|
||||
if file_info['bits'] == 32 and \
|
||||
file_info['rgba_mask'] in SUPPORTED[file_info['bits']]:
|
||||
raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])]
|
||||
self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode
|
||||
elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]:
|
||||
raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])]
|
||||
elif (file_info['bits'] in (24, 16) and
|
||||
file_info['rgb_mask'] in SUPPORTED[file_info['bits']]):
|
||||
raw_mode = MASK_MODES[
|
||||
(file_info['bits'], file_info['rgb_mask'])
|
||||
]
|
||||
else:
|
||||
raise IOError("Unsupported BMP bitfields layout")
|
||||
else:
|
||||
|
@ -158,17 +191,20 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset
|
||||
raw_mode, self.mode = "BGRA", "RGBA"
|
||||
else:
|
||||
raise IOError("Unsupported BMP compression (%d)" % file_info['compression'])
|
||||
raise IOError("Unsupported BMP compression (%d)" %
|
||||
file_info['compression'])
|
||||
# ---------------- Once the header is processed, process the palette/LUT
|
||||
if self.mode == "P": # Paletted for 1, 4 and 8 bit images
|
||||
# ----------------------------------------------------- 1-bit images
|
||||
if not (0 < file_info['colors'] <= 65536):
|
||||
raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors'])
|
||||
raise IOError("Unsupported BMP Palette size (%d)" %
|
||||
file_info['colors'])
|
||||
else:
|
||||
padding = file_info['palette_padding']
|
||||
palette = read(padding * file_info['colors'])
|
||||
greyscale = True
|
||||
indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors']))
|
||||
indices = (0, 255) if file_info['colors'] == 2 else \
|
||||
list(range(file_info['colors']))
|
||||
# ------------------ Check if greyscale and ignore palette if so
|
||||
for ind, val in enumerate(indices):
|
||||
rgb = palette[ind*padding:ind*padding + 3]
|
||||
|
@ -180,13 +216,19 @@ class BmpImageFile(ImageFile.ImageFile):
|
|||
raw_mode = self.mode
|
||||
else:
|
||||
self.mode = "P"
|
||||
self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette)
|
||||
self.palette = ImagePalette.raw(
|
||||
"BGRX" if padding == 4 else "BGR", palette)
|
||||
|
||||
# ----------------------------- Finally set the tile data for the plugin
|
||||
self.info['compression'] = file_info['compression']
|
||||
self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(),
|
||||
(raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction'])
|
||||
)]
|
||||
self.tile = [
|
||||
('raw',
|
||||
(0, 0, file_info['width'], file_info['height']),
|
||||
offset or self.fp.tell(),
|
||||
(raw_mode,
|
||||
((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3),
|
||||
file_info['direction']))
|
||||
]
|
||||
|
||||
def _open(self):
|
||||
""" Open file, check magic number and read header """
|
||||
|
|
|
@ -47,7 +47,7 @@ class BufrStubImageFile(ImageFile.StubImageFile):
|
|||
|
||||
# make something up
|
||||
self.mode = "F"
|
||||
self.size = 1, 1
|
||||
self._size = 1, 1
|
||||
|
||||
loader = self._load()
|
||||
if loader:
|
||||
|
|
|
@ -107,10 +107,10 @@ class ContainerIO(object):
|
|||
|
||||
:returns: A list of 8-bit strings.
|
||||
"""
|
||||
l = []
|
||||
lines = []
|
||||
while True:
|
||||
s = self.readline()
|
||||
if not s:
|
||||
break
|
||||
l.append(s)
|
||||
return l
|
||||
lines.append(s)
|
||||
return lines
|
||||
|
|
|
@ -56,14 +56,6 @@ class CurImageFile(BmpImagePlugin.BmpImageFile):
|
|||
m = s
|
||||
elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]):
|
||||
m = s
|
||||
# print("width", i8(s[0]))
|
||||
# print("height", i8(s[1]))
|
||||
# print("colors", i8(s[2]))
|
||||
# print("reserved", i8(s[3]))
|
||||
# print("hotspot x", i16(s[4:]))
|
||||
# print("hotspot y", i16(s[6:]))
|
||||
# print("bytes", i32(s[8:]))
|
||||
# print("offset", i32(s[12:]))
|
||||
if not m:
|
||||
raise TypeError("No cursors were found")
|
||||
|
||||
|
@ -71,7 +63,7 @@ class CurImageFile(BmpImagePlugin.BmpImageFile):
|
|||
self._bitmap(i32(m[12:]) + offset)
|
||||
|
||||
# patch up the bitmap height
|
||||
self.size = self.size[0], self.size[1]//2
|
||||
self._size = self.size[0], self.size[1]//2
|
||||
d, e, o, a = self.tile[0]
|
||||
self.tile[0] = d, (0, 0)+self.size, o, a
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ class DdsImageFile(ImageFile.ImageFile):
|
|||
header = BytesIO(header_bytes)
|
||||
|
||||
flags, height, width = struct.unpack("<3I", header.read(12))
|
||||
self.size = (width, height)
|
||||
self._size = (width, height)
|
||||
self.mode = "RGBA"
|
||||
|
||||
pitch, depth, mipmaps = struct.unpack("<3I", header.read(12))
|
||||
|
@ -142,7 +142,8 @@ class DdsImageFile(ImageFile.ImageFile):
|
|||
# ignoring flags which pertain to volume textures and cubemaps
|
||||
dxt10 = BytesIO(self.fp.read(20))
|
||||
dxgi_format, dimension = struct.unpack("<II", dxt10.read(8))
|
||||
if dxgi_format in (DXGI_FORMAT_BC7_TYPELESS, DXGI_FORMAT_BC7_UNORM):
|
||||
if dxgi_format in (DXGI_FORMAT_BC7_TYPELESS,
|
||||
DXGI_FORMAT_BC7_UNORM):
|
||||
self.pixel_format = "BC7"
|
||||
n = 7
|
||||
elif dxgi_format == DXGI_FORMAT_BC7_UNORM_SRGB:
|
||||
|
|
|
@ -26,7 +26,6 @@ import os
|
|||
import sys
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i32le as i32
|
||||
from ._util import py3
|
||||
|
||||
__version__ = "0.5"
|
||||
|
||||
|
@ -42,7 +41,7 @@ if sys.platform.startswith('win'):
|
|||
if hasattr(shutil, 'which'):
|
||||
which = shutil.which
|
||||
else:
|
||||
# Python < 3.3
|
||||
# Python 2
|
||||
import distutils.spawn
|
||||
which = distutils.spawn.find_executable
|
||||
for binary in ('gswin32c', 'gswin64c', 'gs'):
|
||||
|
@ -63,7 +62,7 @@ def has_ghostscript():
|
|||
subprocess.check_call(['gs', '--version'], stdout=devnull)
|
||||
return True
|
||||
except OSError:
|
||||
# no ghostscript
|
||||
# No Ghostscript
|
||||
pass
|
||||
return False
|
||||
|
||||
|
@ -83,7 +82,6 @@ def Ghostscript(tile, size, fp, scale=1):
|
|||
# resolution is dependent on bbox and size
|
||||
res = (float((72.0 * size[0]) / (bbox[2]-bbox[0])),
|
||||
float((72.0 * size[1]) / (bbox[3]-bbox[1])))
|
||||
# print("Ghostscript", scale, size, orig_size, bbox, orig_bbox, res)
|
||||
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
@ -99,9 +97,9 @@ def Ghostscript(tile, size, fp, scale=1):
|
|||
os.close(in_fd)
|
||||
infile = infile_temp
|
||||
|
||||
# ignore length and offset!
|
||||
# ghostscript can read it
|
||||
# copy whole file to read in ghostscript
|
||||
# Ignore length and offset!
|
||||
# Ghostscript can read it
|
||||
# Copy whole file to read in Ghostscript
|
||||
with open(infile_temp, 'wb') as f:
|
||||
# fetch length of fp
|
||||
fp.seek(0, 2)
|
||||
|
@ -117,13 +115,13 @@ def Ghostscript(tile, size, fp, scale=1):
|
|||
lengthfile -= len(s)
|
||||
f.write(s)
|
||||
|
||||
# Build ghostscript command
|
||||
# Build Ghostscript command
|
||||
command = ["gs",
|
||||
"-q", # quiet mode
|
||||
"-g%dx%d" % size, # set output geometry (pixels)
|
||||
"-r%fx%f" % res, # set input DPI (dots per inch)
|
||||
"-dBATCH", # exit after processing
|
||||
"-dNOPAUSE", # don't pause between pages,
|
||||
"-dNOPAUSE", # don't pause between pages
|
||||
"-dSAFER", # safe mode
|
||||
"-sDEVICE=ppmraw", # ppm driver
|
||||
"-sOutputFile=%s" % outfile, # output file
|
||||
|
@ -138,10 +136,15 @@ def Ghostscript(tile, size, fp, scale=1):
|
|||
raise WindowsError('Unable to locate Ghostscript on paths')
|
||||
command[0] = gs_windows_binary
|
||||
|
||||
# push data through ghostscript
|
||||
# push data through Ghostscript
|
||||
try:
|
||||
with open(os.devnull, 'w+b') as devnull:
|
||||
subprocess.check_call(command, stdin=devnull, stdout=devnull)
|
||||
startupinfo = None
|
||||
if sys.platform.startswith('win'):
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
subprocess.check_call(command, stdin=devnull, stdout=devnull,
|
||||
startupinfo=startupinfo)
|
||||
im = Image.open(outfile)
|
||||
im.load()
|
||||
finally:
|
||||
|
@ -206,16 +209,7 @@ class EpsImageFile(ImageFile.ImageFile):
|
|||
|
||||
# Rewrap the open file pointer in something that will
|
||||
# convert line endings and decode to latin-1.
|
||||
try:
|
||||
if py3:
|
||||
# Python3, can use bare open command.
|
||||
fp = open(self.fp.name, "Ur", encoding='latin-1')
|
||||
else:
|
||||
# Python2, no encoding conversion necessary
|
||||
fp = open(self.fp.name, "Ur")
|
||||
except:
|
||||
# Expect this for bytesio/stringio
|
||||
fp = PSFile(self.fp)
|
||||
fp = PSFile(self.fp)
|
||||
|
||||
# go to offset - start of "%!PS"
|
||||
fp.seek(offset)
|
||||
|
@ -223,7 +217,7 @@ class EpsImageFile(ImageFile.ImageFile):
|
|||
box = None
|
||||
|
||||
self.mode = "RGB"
|
||||
self.size = 1, 1 # FIXME: huh?
|
||||
self._size = 1, 1 # FIXME: huh?
|
||||
|
||||
#
|
||||
# Load EPS header
|
||||
|
@ -238,7 +232,7 @@ class EpsImageFile(ImageFile.ImageFile):
|
|||
|
||||
try:
|
||||
m = split.match(s)
|
||||
except re.error as v:
|
||||
except re.error:
|
||||
raise SyntaxError("not an EPS file")
|
||||
|
||||
if m:
|
||||
|
@ -250,10 +244,10 @@ class EpsImageFile(ImageFile.ImageFile):
|
|||
# fields should be integers, but some drivers
|
||||
# put floating point values there anyway.
|
||||
box = [int(float(i)) for i in v.split()]
|
||||
self.size = box[2] - box[0], box[3] - box[1]
|
||||
self._size = box[2] - box[0], box[3] - box[1]
|
||||
self.tile = [("eps", (0, 0) + self.size, offset,
|
||||
(length, box))]
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
else:
|
||||
|
@ -299,7 +293,7 @@ class EpsImageFile(ImageFile.ImageFile):
|
|||
except ValueError:
|
||||
break
|
||||
|
||||
self.size = int(x), int(y)
|
||||
self._size = int(x), int(y)
|
||||
return
|
||||
|
||||
s = fp.readline().strip('\r\n')
|
||||
|
@ -337,7 +331,7 @@ class EpsImageFile(ImageFile.ImageFile):
|
|||
return
|
||||
self.im = Ghostscript(self.tile, self.size, self.fp, scale)
|
||||
self.mode = self.im.mode
|
||||
self.size = self.im.size
|
||||
self._size = self.im.size
|
||||
self.tile = []
|
||||
|
||||
def load_seek(self, *args, **kwargs):
|
||||
|
@ -367,54 +361,49 @@ def _save(im, fp, filename, eps=1):
|
|||
else:
|
||||
raise ValueError("image mode is not supported")
|
||||
|
||||
class NoCloseStream(object):
|
||||
def __init__(self, fp):
|
||||
self.fp = fp
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.fp, name)
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
base_fp = fp
|
||||
wrapped_fp = False
|
||||
if fp != sys.stdout:
|
||||
fp = NoCloseStream(fp)
|
||||
if sys.version_info.major > 2:
|
||||
fp = io.TextIOWrapper(fp, encoding='latin-1')
|
||||
wrapped_fp = True
|
||||
|
||||
try:
|
||||
if eps:
|
||||
#
|
||||
# write EPS header
|
||||
fp.write("%!PS-Adobe-3.0 EPSF-3.0\n")
|
||||
fp.write("%%Creator: PIL 0.1 EpsEncode\n")
|
||||
# fp.write("%%CreationDate: %s"...)
|
||||
fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size)
|
||||
fp.write("%%Pages: 1\n")
|
||||
fp.write("%%EndComments\n")
|
||||
fp.write("%%Page: 1 1\n")
|
||||
fp.write("%%ImageData: %d %d " % im.size)
|
||||
fp.write("%d %d 0 1 1 \"%s\"\n" % operator)
|
||||
|
||||
if eps:
|
||||
#
|
||||
# write EPS header
|
||||
fp.write("%!PS-Adobe-3.0 EPSF-3.0\n")
|
||||
fp.write("%%Creator: PIL 0.1 EpsEncode\n")
|
||||
# fp.write("%%CreationDate: %s"...)
|
||||
fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size)
|
||||
fp.write("%%Pages: 1\n")
|
||||
fp.write("%%EndComments\n")
|
||||
fp.write("%%Page: 1 1\n")
|
||||
fp.write("%%ImageData: %d %d " % im.size)
|
||||
fp.write("%d %d 0 1 1 \"%s\"\n" % operator)
|
||||
# image header
|
||||
fp.write("gsave\n")
|
||||
fp.write("10 dict begin\n")
|
||||
fp.write("/buf %d string def\n" % (im.size[0] * operator[1]))
|
||||
fp.write("%d %d scale\n" % im.size)
|
||||
fp.write("%d %d 8\n" % im.size) # <= bits
|
||||
fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
|
||||
fp.write("{ currentfile buf readhexstring pop } bind\n")
|
||||
fp.write(operator[2] + "\n")
|
||||
if hasattr(fp, "flush"):
|
||||
fp.flush()
|
||||
|
||||
#
|
||||
# image header
|
||||
fp.write("gsave\n")
|
||||
fp.write("10 dict begin\n")
|
||||
fp.write("/buf %d string def\n" % (im.size[0] * operator[1]))
|
||||
fp.write("%d %d scale\n" % im.size)
|
||||
fp.write("%d %d 8\n" % im.size) # <= bits
|
||||
fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
|
||||
fp.write("{ currentfile buf readhexstring pop } bind\n")
|
||||
fp.write(operator[2] + "\n")
|
||||
if hasattr(fp, "flush"):
|
||||
fp.flush()
|
||||
ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)])
|
||||
|
||||
ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)])
|
||||
|
||||
fp.write("\n%%%%EndBinary\n")
|
||||
fp.write("grestore end\n")
|
||||
if hasattr(fp, "flush"):
|
||||
fp.flush()
|
||||
fp.write("\n%%%%EndBinary\n")
|
||||
fp.write("grestore end\n")
|
||||
if hasattr(fp, "flush"):
|
||||
fp.flush()
|
||||
finally:
|
||||
if wrapped_fp:
|
||||
fp.detach()
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
|
|
|
@ -50,7 +50,7 @@ class FITSStubImageFile(ImageFile.StubImageFile):
|
|||
|
||||
# make something up
|
||||
self.mode = "F"
|
||||
self.size = 1, 1
|
||||
self._size = 1, 1
|
||||
|
||||
loader = self._load()
|
||||
if loader:
|
||||
|
|
|
@ -54,7 +54,7 @@ class FliImageFile(ImageFile.ImageFile):
|
|||
|
||||
# image characteristics
|
||||
self.mode = "P"
|
||||
self.size = i16(s[8:10]), i16(s[10:12])
|
||||
self._size = i16(s[8:10]), i16(s[10:12])
|
||||
|
||||
# animation speed
|
||||
duration = i32(s[16:20])
|
||||
|
|
|
@ -90,7 +90,6 @@ class FontFile(object):
|
|||
x = xx
|
||||
s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
|
||||
self.bitmap.paste(im.crop(src), s)
|
||||
# print(chr(i), dst, s)
|
||||
self.metrics[i] = d, dst, s
|
||||
|
||||
def save(self, filename):
|
||||
|
|
|
@ -81,7 +81,7 @@ class FpxImageFile(ImageFile.ImageFile):
|
|||
|
||||
# size (highest resolution)
|
||||
|
||||
self.size = prop[0x1000002], prop[0x1000003]
|
||||
self._size = prop[0x1000002], prop[0x1000003]
|
||||
|
||||
size = max(self.size)
|
||||
i = 1
|
||||
|
@ -114,8 +114,6 @@ class FpxImageFile(ImageFile.ImageFile):
|
|||
if id in prop:
|
||||
self.jpeg[i] = prop[id]
|
||||
|
||||
# print(len(self.jpeg), "tables loaded")
|
||||
|
||||
self._open_subimage(1, self.maxid)
|
||||
|
||||
def _open_subimage(self, index=1, subimage=0):
|
||||
|
@ -143,8 +141,6 @@ class FpxImageFile(ImageFile.ImageFile):
|
|||
offset = i32(s, 28)
|
||||
length = i32(s, 32)
|
||||
|
||||
# print(size, self.mode, self.rawmode)
|
||||
|
||||
if size != self.size:
|
||||
raise IOError("subimage mismatch")
|
||||
|
||||
|
|
|
@ -9,7 +9,8 @@ Full text of the CC0 license:
|
|||
Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
|
||||
|
||||
The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
|
||||
packed custom format called FTEX. This file format uses file extensions FTC and FTU.
|
||||
packed custom format called FTEX. This file format uses file extensions FTC
|
||||
and FTU.
|
||||
* FTC files are compressed textures (using standard texture compression).
|
||||
* FTU files are not compressed.
|
||||
Texture File Format
|
||||
|
@ -24,18 +25,21 @@ Where:
|
|||
* The "magic" number is "FTEX".
|
||||
* "width" and "height" are the dimensions of the texture.
|
||||
* "mipmap_count" is the number of mipmaps in the texture.
|
||||
* "format_count" is the number of texture formats (different versions of the same texture) in this file.
|
||||
* "format_count" is the number of texture formats (different versions of the
|
||||
same texture) in this file.
|
||||
|
||||
{format_directory} = format_count * { u32:format, u32:where }
|
||||
|
||||
The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB uncompressed textures.
|
||||
The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
|
||||
uncompressed textures.
|
||||
The texture data for a format starts at the position "where" in the file.
|
||||
|
||||
Each set of texture data in the file has the following structure:
|
||||
{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
|
||||
* "mipmap_size" is the number of bytes in that mip level. For compressed textures this is the
|
||||
size of the texture data compressed with DXT1. For 24 bit uncompressed textures, this is 3 * width * height.
|
||||
Following this are the image bytes for that mipmap level.
|
||||
* "mipmap_size" is the number of bytes in that mip level. For compressed
|
||||
textures this is the size of the texture data compressed with DXT1. For 24 bit
|
||||
uncompressed textures, this is 3 * width * height. Following this are the image
|
||||
bytes for that mipmap level.
|
||||
|
||||
Note: All data is stored in little-Endian (Intel) byte order.
|
||||
"""
|
||||
|
@ -57,12 +61,13 @@ class FtexImageFile(ImageFile.ImageFile):
|
|||
def _open(self):
|
||||
magic = struct.unpack("<I", self.fp.read(4))
|
||||
version = struct.unpack("<i", self.fp.read(4))
|
||||
self.size = struct.unpack("<2i", self.fp.read(8))
|
||||
self._size = struct.unpack("<2i", self.fp.read(8))
|
||||
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
|
||||
|
||||
self.mode = "RGB"
|
||||
|
||||
# Only support single-format files. I don't know of any multi-format file.
|
||||
# Only support single-format files.
|
||||
# I don't know of any multi-format file.
|
||||
assert format_count == 1
|
||||
|
||||
format, where = struct.unpack("<2i", self.fp.read(8))
|
||||
|
@ -77,7 +82,8 @@ class FtexImageFile(ImageFile.ImageFile):
|
|||
elif format == FORMAT_UNCOMPRESSED:
|
||||
self.tile = [("raw", (0, 0) + self.size, 0, ('RGB', 0, 1))]
|
||||
else:
|
||||
raise ValueError("Invalid texture compression format: %r" % (format))
|
||||
raise ValueError(
|
||||
"Invalid texture compression format: %r" % (format))
|
||||
|
||||
self.fp.close()
|
||||
self.fp = BytesIO(data)
|
||||
|
|
|
@ -29,7 +29,8 @@ from ._binary import i32be as i32
|
|||
|
||||
|
||||
def _accept(prefix):
|
||||
return len(prefix) >= 8 and i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2)
|
||||
return len(prefix) >= 8 and \
|
||||
i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2)
|
||||
|
||||
|
||||
##
|
||||
|
@ -54,7 +55,8 @@ class GbrImageFile(ImageFile.ImageFile):
|
|||
if width <= 0 or height <= 0:
|
||||
raise SyntaxError("not a GIMP brush")
|
||||
if color_depth not in (1, 4):
|
||||
raise SyntaxError("Unsupported GIMP brush color depth: %s" % color_depth)
|
||||
raise SyntaxError(
|
||||
"Unsupported GIMP brush color depth: %s" % color_depth)
|
||||
|
||||
if version == 1:
|
||||
comment_length = header_size-20
|
||||
|
@ -72,7 +74,7 @@ class GbrImageFile(ImageFile.ImageFile):
|
|||
else:
|
||||
self.mode = 'RGBA'
|
||||
|
||||
self.size = width, height
|
||||
self._size = width, height
|
||||
|
||||
self.info["comment"] = comment
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ class GdImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("Not a valid GD 2.x .gd file")
|
||||
|
||||
self.mode = "L" # FIXME: "P"
|
||||
self.size = i16(s[2:4]), i16(s[4:6])
|
||||
self._size = i16(s[2:4]), i16(s[4:6])
|
||||
|
||||
trueColor = i8(s[6])
|
||||
trueColorOffset = 2 if trueColor else 0
|
||||
|
@ -61,7 +61,8 @@ class GdImageFile(ImageFile.ImageFile):
|
|||
|
||||
self.palette = ImagePalette.raw("XBGR", s[7+trueColorOffset+4:7+trueColorOffset+4+256*4])
|
||||
|
||||
self.tile = [("raw", (0, 0)+self.size, 7+trueColorOffset+4+256*4, ("L", 0, 1))]
|
||||
self.tile = [("raw", (0, 0)+self.size, 7+trueColorOffset+4+256*4,
|
||||
("L", 0, 1))]
|
||||
|
||||
|
||||
def open(fp, mode="r"):
|
||||
|
|
|
@ -65,7 +65,7 @@ class GifImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("not a GIF file")
|
||||
|
||||
self.info["version"] = s[:6]
|
||||
self.size = i16(s[6:]), i16(s[8:])
|
||||
self._size = i16(s[6:]), i16(s[8:])
|
||||
self.tile = []
|
||||
flags = i8(s[10])
|
||||
bits = (flags & 7) + 1
|
||||
|
@ -166,6 +166,7 @@ class GifImageFile(ImageFile.ImageFile):
|
|||
from copy import copy
|
||||
self.palette = copy(self.global_palette)
|
||||
|
||||
info = {}
|
||||
while True:
|
||||
|
||||
s = self.fp.read(1)
|
||||
|
@ -184,8 +185,8 @@ class GifImageFile(ImageFile.ImageFile):
|
|||
#
|
||||
flags = i8(block[0])
|
||||
if flags & 1:
|
||||
self.info["transparency"] = i8(block[3])
|
||||
self.info["duration"] = i16(block[1:3]) * 10
|
||||
info["transparency"] = i8(block[3])
|
||||
info["duration"] = i16(block[1:3]) * 10
|
||||
|
||||
# disposal method - find the value of bits 4 - 6
|
||||
dispose_bits = 0b00011100 & flags
|
||||
|
@ -200,16 +201,16 @@ class GifImageFile(ImageFile.ImageFile):
|
|||
#
|
||||
# comment extension
|
||||
#
|
||||
self.info["comment"] = block
|
||||
info["comment"] = block
|
||||
elif i8(s) == 255:
|
||||
#
|
||||
# application extension
|
||||
#
|
||||
self.info["extension"] = block, self.fp.tell()
|
||||
info["extension"] = block, self.fp.tell()
|
||||
if block[:11] == b"NETSCAPE2.0":
|
||||
block = self.data()
|
||||
if len(block) >= 3 and i8(block[0]) == 1:
|
||||
self.info["loop"] = i16(block[1:3])
|
||||
info["loop"] = i16(block[1:3])
|
||||
while self.data():
|
||||
pass
|
||||
|
||||
|
@ -268,6 +269,12 @@ class GifImageFile(ImageFile.ImageFile):
|
|||
# self.__fp = None
|
||||
raise EOFError
|
||||
|
||||
for k in ["transparency", "duration", "comment", "extension", "loop"]:
|
||||
if k in info:
|
||||
self.info[k] = info[k]
|
||||
elif k in self.info:
|
||||
del self.info[k]
|
||||
|
||||
self.mode = "L"
|
||||
if self.palette:
|
||||
self.mode = "P"
|
||||
|
@ -397,7 +404,8 @@ def _write_multiple_frames(im, fp, palette):
|
|||
|
||||
im_frames = []
|
||||
frame_count = 0
|
||||
for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])):
|
||||
for imSequence in itertools.chain([im],
|
||||
im.encoderinfo.get("append_images", [])):
|
||||
for im_frame in ImageSequence.Iterator(imSequence):
|
||||
# a copy is required here since seek can still mutate the image
|
||||
im_frame = _normalize_mode(im_frame.copy())
|
||||
|
@ -413,17 +421,19 @@ def _write_multiple_frames(im, fp, palette):
|
|||
if im_frames:
|
||||
# delta frame
|
||||
previous = im_frames[-1]
|
||||
if _get_palette_bytes(im_frame) == _get_palette_bytes(previous['im']):
|
||||
if _get_palette_bytes(im_frame) == \
|
||||
_get_palette_bytes(previous['im']):
|
||||
delta = ImageChops.subtract_modulo(im_frame,
|
||||
previous['im'])
|
||||
else:
|
||||
delta = ImageChops.subtract_modulo(im_frame.convert('RGB'),
|
||||
previous['im'].convert('RGB'))
|
||||
delta = ImageChops.subtract_modulo(
|
||||
im_frame.convert('RGB'), previous['im'].convert('RGB'))
|
||||
bbox = delta.getbbox()
|
||||
if not bbox:
|
||||
# This frame is identical to the previous frame
|
||||
if duration:
|
||||
previous['encoderinfo']['duration'] += encoderinfo['duration']
|
||||
previous['encoderinfo']['duration'] += \
|
||||
encoderinfo['duration']
|
||||
continue
|
||||
else:
|
||||
bbox = None
|
||||
|
@ -525,7 +535,8 @@ def _write_local_header(fp, im, offset, flags):
|
|||
o8(transparency) + # transparency index
|
||||
o8(0))
|
||||
|
||||
if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]) <= 255:
|
||||
if "comment" in im.encoderinfo and \
|
||||
1 <= len(im.encoderinfo["comment"]) <= 255:
|
||||
fp.write(b"!" +
|
||||
o8(254) + # extension intro
|
||||
o8(len(im.encoderinfo["comment"])) +
|
||||
|
@ -543,7 +554,6 @@ def _write_local_header(fp, im, offset, flags):
|
|||
o8(0))
|
||||
include_color_table = im.encoderinfo.get('include_color_table')
|
||||
if include_color_table:
|
||||
palette = im.encoderinfo.get("palette", None)
|
||||
palette_bytes = _get_palette_bytes(im)
|
||||
color_table_size = _get_color_table_size(palette_bytes)
|
||||
if color_table_size:
|
||||
|
@ -692,7 +702,8 @@ def _get_global_header(im, info):
|
|||
for extensionKey in ["transparency", "duration", "loop", "comment"]:
|
||||
if info and extensionKey in info:
|
||||
if ((extensionKey == "duration" and info[extensionKey] == 0) or
|
||||
(extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255))):
|
||||
(extensionKey == "comment" and
|
||||
not (1 <= len(info[extensionKey]) <= 255))):
|
||||
continue
|
||||
version = b"89a"
|
||||
break
|
||||
|
|
|
@ -48,7 +48,7 @@ class GribStubImageFile(ImageFile.StubImageFile):
|
|||
|
||||
# make something up
|
||||
self.mode = "F"
|
||||
self.size = 1, 1
|
||||
self._size = 1, 1
|
||||
|
||||
loader = self._load()
|
||||
if loader:
|
||||
|
|
|
@ -47,7 +47,7 @@ class HDF5StubImageFile(ImageFile.StubImageFile):
|
|||
|
||||
# make something up
|
||||
self.mode = "F"
|
||||
self.size = 1, 1
|
||||
self._size = 1, 1
|
||||
|
||||
loader = self._load()
|
||||
if loader:
|
||||
|
|
|
@ -265,13 +265,33 @@ class IcnsImageFile(ImageFile.ImageFile):
|
|||
def _open(self):
|
||||
self.icns = IcnsFile(self.fp)
|
||||
self.mode = 'RGBA'
|
||||
self.info['sizes'] = self.icns.itersizes()
|
||||
self.best_size = self.icns.bestsize()
|
||||
self.size = (self.best_size[0] * self.best_size[2],
|
||||
self.best_size[1] * self.best_size[2])
|
||||
self.info['sizes'] = self.icns.itersizes()
|
||||
# Just use this to see if it's loaded or not yet.
|
||||
self.tile = ('',)
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return self._size
|
||||
|
||||
@size.setter
|
||||
def size(self, value):
|
||||
info_size = value
|
||||
if info_size not in self.info['sizes'] and len(info_size) == 2:
|
||||
info_size = (info_size[0], info_size[1], 1)
|
||||
if info_size not in self.info['sizes'] and len(info_size) == 3 and \
|
||||
info_size[2] == 1:
|
||||
simple_sizes = [(size[0] * size[2], size[1] * size[2])
|
||||
for size in self.info['sizes']]
|
||||
if value in simple_sizes:
|
||||
info_size = self.info['sizes'][simple_sizes.index(value)]
|
||||
if info_size not in self.info['sizes']:
|
||||
raise ValueError(
|
||||
"This is not one of the allowed sizes of this image")
|
||||
self._size = value
|
||||
|
||||
def load(self):
|
||||
if len(self.size) == 3:
|
||||
self.best_size = self.size
|
||||
|
|
|
@ -169,7 +169,7 @@ class IcoFile(object):
|
|||
im = BmpImagePlugin.DibImageFile(self.buf)
|
||||
|
||||
# change tile dimension to only encompass XOR image
|
||||
im.size = (im.size[0], int(im.size[1] / 2))
|
||||
im._size = (im.size[0], int(im.size[1] / 2))
|
||||
d, e, o, a = im.tile[0]
|
||||
im.tile[0] = d, (0, 0) + im.size, o, a
|
||||
|
||||
|
@ -263,6 +263,17 @@ class IcoImageFile(ImageFile.ImageFile):
|
|||
self.size = self.ico.entry[0]['dim']
|
||||
self.load()
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return self._size
|
||||
|
||||
@size.setter
|
||||
def size(self, value):
|
||||
if value not in self.info['sizes']:
|
||||
raise ValueError(
|
||||
"This is not one of the allowed sizes of this image")
|
||||
self._size = value
|
||||
|
||||
def load(self):
|
||||
im = self.ico.getimage(self.size)
|
||||
# if tile is PNG, it won't really be loaded yet
|
||||
|
|
|
@ -196,7 +196,7 @@ class ImImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("Not an IM file")
|
||||
|
||||
# Basic attributes
|
||||
self.size = self.info[SIZE]
|
||||
self._size = self.info[SIZE]
|
||||
self.mode = self.info[MODE]
|
||||
|
||||
# Skip forward to start of image data
|
||||
|
|
|
@ -443,7 +443,6 @@ def _getdecoder(mode, decoder_name, args, extra=()):
|
|||
try:
|
||||
# get decoder
|
||||
decoder = getattr(core, decoder_name + "_decoder")
|
||||
# print(decoder, mode, args + extra)
|
||||
return decoder(mode, *args + extra)
|
||||
except AttributeError:
|
||||
raise IOError("decoder %s not available" % decoder_name)
|
||||
|
@ -465,7 +464,6 @@ def _getencoder(mode, encoder_name, args, extra=()):
|
|||
try:
|
||||
# get encoder
|
||||
encoder = getattr(core, encoder_name + "_encoder")
|
||||
# print(encoder, mode, args + extra)
|
||||
return encoder(mode, *args + extra)
|
||||
except AttributeError:
|
||||
raise IOError("encoder %s not available" % encoder_name)
|
||||
|
@ -533,7 +531,7 @@ class Image(object):
|
|||
# FIXME: turn mode and size into delegating properties?
|
||||
self.im = None
|
||||
self.mode = ""
|
||||
self.size = (0, 0)
|
||||
self._size = (0, 0)
|
||||
self.palette = None
|
||||
self.info = {}
|
||||
self.category = NORMAL
|
||||
|
@ -548,11 +546,15 @@ class Image(object):
|
|||
def height(self):
|
||||
return self.size[1]
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return self._size
|
||||
|
||||
def _new(self, im):
|
||||
new = Image()
|
||||
new.im = im
|
||||
new.mode = im.mode
|
||||
new.size = im.size
|
||||
new._size = im.size
|
||||
if im.mode in ('P', 'PA'):
|
||||
if self.palette:
|
||||
new.palette = self.palette.copy()
|
||||
|
@ -700,7 +702,7 @@ class Image(object):
|
|||
info, mode, size, palette, data = state
|
||||
self.info = info
|
||||
self.mode = mode
|
||||
self.size = size
|
||||
self._size = size
|
||||
self.im = core.new(mode, size)
|
||||
if mode in ("L", "P") and palette:
|
||||
self.putpalette(palette)
|
||||
|
@ -875,12 +877,17 @@ class Image(object):
|
|||
use other thresholds, use the :py:meth:`~PIL.Image.Image.point`
|
||||
method.
|
||||
|
||||
When converting from "RGBA" to "P" without a **matrix** argument,
|
||||
this passes the operation to :py:meth:`~PIL.Image.Image.quantize`,
|
||||
and **dither** and **palette** are ignored.
|
||||
|
||||
:param mode: The requested mode. See: :ref:`concept-modes`.
|
||||
:param matrix: An optional conversion matrix. If given, this
|
||||
should be 4- or 12-tuple containing floating point values.
|
||||
:param dither: Dithering method, used when converting from
|
||||
mode "RGB" to "P" or from "RGB" or "L" to "1".
|
||||
Available methods are NONE or FLOYDSTEINBERG (default).
|
||||
Note that this is not used when **matrix** is supplied.
|
||||
:param palette: Palette to use when converting from mode "RGB"
|
||||
to "P". Available palettes are WEB or ADAPTIVE.
|
||||
:param colors: Number of colors to use for the ADAPTIVE palette.
|
||||
|
@ -900,12 +907,28 @@ class Image(object):
|
|||
if not mode or (mode == self.mode and not matrix):
|
||||
return self.copy()
|
||||
|
||||
has_transparency = self.info.get('transparency') is not None
|
||||
if matrix:
|
||||
# matrix conversion
|
||||
if mode not in ("L", "RGB"):
|
||||
raise ValueError("illegal conversion")
|
||||
im = self.im.convert_matrix(mode, matrix)
|
||||
return self._new(im)
|
||||
new = self._new(im)
|
||||
if has_transparency and self.im.bands == 3:
|
||||
transparency = new.info['transparency']
|
||||
|
||||
def convert_transparency(m, v):
|
||||
v = m[0]*v[0] + m[1]*v[1] + m[2]*v[2] + m[3]*0.5
|
||||
return max(0, min(255, int(v)))
|
||||
if mode == "L":
|
||||
transparency = convert_transparency(matrix, transparency)
|
||||
elif len(mode) == 3:
|
||||
transparency = tuple([
|
||||
convert_transparency(matrix[i*4:i*4+4], transparency)
|
||||
for i in range(0, len(transparency))
|
||||
])
|
||||
new.info['transparency'] = transparency
|
||||
return new
|
||||
|
||||
if mode == "P" and self.mode == "RGBA":
|
||||
return self.quantize(colors)
|
||||
|
@ -913,8 +936,7 @@ class Image(object):
|
|||
trns = None
|
||||
delete_trns = False
|
||||
# transparency handling
|
||||
if "transparency" in self.info and \
|
||||
self.info['transparency'] is not None:
|
||||
if has_transparency:
|
||||
if self.mode in ('L', 'RGB') and mode == 'RGBA':
|
||||
# Use transparent conversion to promote from transparent
|
||||
# color to an alpha channel.
|
||||
|
@ -1104,12 +1126,9 @@ class Image(object):
|
|||
|
||||
x0, y0, x1, y1 = map(int, map(round, box))
|
||||
|
||||
if x1 < x0:
|
||||
x1 = x0
|
||||
if y1 < y0:
|
||||
y1 = y0
|
||||
absolute_values = (abs(x1 - x0), abs(y1 - y0))
|
||||
|
||||
_decompression_bomb_check((x1, y1))
|
||||
_decompression_bomb_check(absolute_values)
|
||||
|
||||
return im.crop((x0, y0, x1, y1))
|
||||
|
||||
|
@ -1894,7 +1913,7 @@ class Image(object):
|
|||
parameter should always be used.
|
||||
:param params: Extra parameters to the image writer.
|
||||
:returns: None
|
||||
:exception KeyError: If the output format could not be determined
|
||||
:exception ValueError: If the output format could not be determined
|
||||
from the file name. Use the format option to solve this.
|
||||
:exception IOError: If the file could not be written. The file
|
||||
may have been created, and may contain partial data.
|
||||
|
@ -2089,7 +2108,7 @@ class Image(object):
|
|||
|
||||
self.im = im.im
|
||||
self.mode = im.mode
|
||||
self.size = size
|
||||
self._size = size
|
||||
|
||||
self.readonly = 0
|
||||
self.pyaccess = None
|
||||
|
@ -2440,9 +2459,20 @@ def fromarray(obj, mode=None):
|
|||
Creates an image memory from an object exporting the array interface
|
||||
(using the buffer protocol).
|
||||
|
||||
If obj is not contiguous, then the tobytes method is called
|
||||
If **obj** is not contiguous, then the tobytes method is called
|
||||
and :py:func:`~PIL.Image.frombuffer` is used.
|
||||
|
||||
If you have an image in NumPy::
|
||||
|
||||
from PIL import Image
|
||||
import numpy as np
|
||||
im = Image.open('hopper.jpg')
|
||||
a = np.asarray(im)
|
||||
|
||||
Then this can be used to convert it to a Pillow image::
|
||||
|
||||
im = Image.fromarray(a)
|
||||
|
||||
:param obj: Object with array interface
|
||||
:param mode: Mode to use (will be determined from type if None)
|
||||
See: :ref:`concept-modes`.
|
||||
|
@ -2459,7 +2489,6 @@ def fromarray(obj, mode=None):
|
|||
typekey = (1, 1) + shape[2:], arr['typestr']
|
||||
mode, rawmode = _fromarray_typemap[typekey]
|
||||
except KeyError:
|
||||
# print(typekey)
|
||||
raise TypeError("Cannot handle this data type")
|
||||
else:
|
||||
rawmode = mode
|
||||
|
@ -2590,11 +2619,15 @@ def open(fp, mode="r"):
|
|||
|
||||
preinit()
|
||||
|
||||
accept_warnings = []
|
||||
def _open_core(fp, filename, prefix):
|
||||
for i in ID:
|
||||
try:
|
||||
factory, accept = OPEN[i]
|
||||
if not accept or accept(prefix):
|
||||
result = not accept or accept(prefix)
|
||||
if type(result) in [str, bytes]:
|
||||
accept_warnings.append(result)
|
||||
elif result:
|
||||
fp.seek(0)
|
||||
im = factory(fp, filename)
|
||||
_decompression_bomb_check(im.size)
|
||||
|
@ -2618,6 +2651,8 @@ def open(fp, mode="r"):
|
|||
|
||||
if exclusive_fp:
|
||||
fp.close()
|
||||
for message in accept_warnings:
|
||||
warnings.warn(message)
|
||||
raise IOError("cannot identify image file %r"
|
||||
% (filename if filename else fp))
|
||||
|
||||
|
|
|
@ -305,10 +305,10 @@ def profileToProfile(
|
|||
:param renderingIntent: Integer (0-3) specifying the rendering intent you
|
||||
wish to use for the transform
|
||||
|
||||
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
|
||||
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
|
||||
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
|
||||
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
|
||||
ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT)
|
||||
ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
|
||||
ImageCms.INTENT_SATURATION = 2
|
||||
ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
|
||||
|
||||
see the pyCMS documentation for details on rendering intents and what
|
||||
they do.
|
||||
|
@ -424,10 +424,10 @@ def buildTransform(
|
|||
:param renderingIntent: Integer (0-3) specifying the rendering intent you
|
||||
wish to use for the transform
|
||||
|
||||
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
|
||||
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
|
||||
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
|
||||
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
|
||||
ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT)
|
||||
ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
|
||||
ImageCms.INTENT_SATURATION = 2
|
||||
ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
|
||||
|
||||
see the pyCMS documentation for details on rendering intents and what
|
||||
they do.
|
||||
|
@ -512,20 +512,20 @@ def buildProofTransform(
|
|||
:param renderingIntent: Integer (0-3) specifying the rendering intent you
|
||||
wish to use for the input->proof (simulated) transform
|
||||
|
||||
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
|
||||
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
|
||||
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
|
||||
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
|
||||
ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT)
|
||||
ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
|
||||
ImageCms.INTENT_SATURATION = 2
|
||||
ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
|
||||
|
||||
see the pyCMS documentation for details on rendering intents and what
|
||||
they do.
|
||||
:param proofRenderingIntent: Integer (0-3) specifying the rendering intent you
|
||||
wish to use for proof->output transform
|
||||
:param proofRenderingIntent: Integer (0-3) specifying the rendering intent
|
||||
you wish to use for proof->output transform
|
||||
|
||||
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
|
||||
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
|
||||
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
|
||||
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
|
||||
ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT)
|
||||
ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
|
||||
ImageCms.INTENT_SATURATION = 2
|
||||
ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
|
||||
|
||||
see the pyCMS documentation for details on rendering intents and what
|
||||
they do.
|
||||
|
@ -875,10 +875,10 @@ def getDefaultIntent(profile):
|
|||
:returns: Integer 0-3 specifying the default rendering intent for this
|
||||
profile.
|
||||
|
||||
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
|
||||
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
|
||||
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
|
||||
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
|
||||
ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT)
|
||||
ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
|
||||
ImageCms.INTENT_SATURATION = 2
|
||||
ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
|
||||
|
||||
see the pyCMS documentation for details on rendering intents and what
|
||||
they do.
|
||||
|
@ -913,15 +913,15 @@ def isIntentSupported(profile, intent, direction):
|
|||
:param intent: Integer (0-3) specifying the rendering intent you wish to
|
||||
use with this profile
|
||||
|
||||
INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL)
|
||||
INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC)
|
||||
INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION)
|
||||
INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC)
|
||||
ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT)
|
||||
ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1
|
||||
ImageCms.INTENT_SATURATION = 2
|
||||
ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3
|
||||
|
||||
see the pyCMS documentation for details on rendering intents and what
|
||||
they do.
|
||||
:param direction: Integer specifying if the profile is to be used for input,
|
||||
output, or proof
|
||||
:param direction: Integer specifying if the profile is to be used for
|
||||
input, output, or proof
|
||||
|
||||
INPUT = 0 (or use ImageCms.DIRECTION_INPUT)
|
||||
OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT)
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
|
||||
import math
|
||||
import numbers
|
||||
|
||||
from . import Image, ImageColor
|
||||
|
@ -118,11 +119,11 @@ class ImageDraw(object):
|
|||
fill = self.draw.draw_ink(fill, self.mode)
|
||||
return ink, fill
|
||||
|
||||
def arc(self, xy, start, end, fill=None):
|
||||
def arc(self, xy, start, end, fill=None, width=0):
|
||||
"""Draw an arc."""
|
||||
ink, fill = self._getink(fill)
|
||||
if ink is not None:
|
||||
self.draw.draw_arc(xy, start, end, ink)
|
||||
self.draw.draw_arc(xy, start, end, ink, width)
|
||||
|
||||
def bitmap(self, xy, bitmap, fill=None):
|
||||
"""Draw a bitmap."""
|
||||
|
@ -133,27 +134,80 @@ class ImageDraw(object):
|
|||
if ink is not None:
|
||||
self.draw.draw_bitmap(xy, bitmap.im, ink)
|
||||
|
||||
def chord(self, xy, start, end, fill=None, outline=None):
|
||||
def chord(self, xy, start, end, fill=None, outline=None, width=0):
|
||||
"""Draw a chord."""
|
||||
ink, fill = self._getink(outline, fill)
|
||||
if fill is not None:
|
||||
self.draw.draw_chord(xy, start, end, fill, 1)
|
||||
if ink is not None:
|
||||
self.draw.draw_chord(xy, start, end, ink, 0)
|
||||
if ink is not None and ink != fill:
|
||||
self.draw.draw_chord(xy, start, end, ink, 0, width)
|
||||
|
||||
def ellipse(self, xy, fill=None, outline=None):
|
||||
def ellipse(self, xy, fill=None, outline=None, width=0):
|
||||
"""Draw an ellipse."""
|
||||
ink, fill = self._getink(outline, fill)
|
||||
if fill is not None:
|
||||
self.draw.draw_ellipse(xy, fill, 1)
|
||||
if ink is not None:
|
||||
self.draw.draw_ellipse(xy, ink, 0)
|
||||
if ink is not None and ink != fill:
|
||||
self.draw.draw_ellipse(xy, ink, 0, width)
|
||||
|
||||
def line(self, xy, fill=None, width=0):
|
||||
def line(self, xy, fill=None, width=0, joint=None):
|
||||
"""Draw a line, or a connected sequence of line segments."""
|
||||
ink, fill = self._getink(fill)
|
||||
ink = self._getink(fill)[0]
|
||||
if ink is not None:
|
||||
self.draw.draw_lines(xy, ink, width)
|
||||
if joint == "curve" and width > 4:
|
||||
for i in range(1, len(xy)-1):
|
||||
point = xy[i]
|
||||
angles = [
|
||||
math.degrees(math.atan2(
|
||||
end[0] - start[0], start[1] - end[1]
|
||||
)) % 360
|
||||
for start, end in ((xy[i-1], point), (point, xy[i+1]))
|
||||
]
|
||||
if angles[0] == angles[1]:
|
||||
# This is a straight line, so no joint is required
|
||||
continue
|
||||
|
||||
def coord_at_angle(coord, angle):
|
||||
x, y = coord
|
||||
angle -= 90
|
||||
distance = width/2 - 1
|
||||
return tuple([
|
||||
p +
|
||||
(math.floor(p_d) if p_d > 0 else math.ceil(p_d))
|
||||
for p, p_d in
|
||||
((x, distance * math.cos(math.radians(angle))),
|
||||
(y, distance * math.sin(math.radians(angle))))
|
||||
])
|
||||
flipped = ((angles[1] > angles[0] and
|
||||
angles[1] - 180 > angles[0]) or
|
||||
(angles[1] < angles[0] and
|
||||
angles[1] + 180 > angles[0]))
|
||||
coords = [
|
||||
(point[0] - width/2 + 1, point[1] - width/2 + 1),
|
||||
(point[0] + width/2 - 1, point[1] + width/2 - 1)
|
||||
]
|
||||
if flipped:
|
||||
start, end = (angles[1] + 90, angles[0] + 90)
|
||||
else:
|
||||
start, end = (angles[0] - 90, angles[1] - 90)
|
||||
self.pieslice(coords, start - 90, end - 90, fill)
|
||||
|
||||
if width > 8:
|
||||
# Cover potential gaps between the line and the joint
|
||||
if flipped:
|
||||
gapCoords = [
|
||||
coord_at_angle(point, angles[0]+90),
|
||||
point,
|
||||
coord_at_angle(point, angles[1]+90)
|
||||
]
|
||||
else:
|
||||
gapCoords = [
|
||||
coord_at_angle(point, angles[0]-90),
|
||||
point,
|
||||
coord_at_angle(point, angles[1]-90)
|
||||
]
|
||||
self.line(gapCoords, fill, width=3)
|
||||
|
||||
def shape(self, shape, fill=None, outline=None):
|
||||
"""(Experimental) Draw a shape."""
|
||||
|
@ -161,16 +215,16 @@ class ImageDraw(object):
|
|||
ink, fill = self._getink(outline, fill)
|
||||
if fill is not None:
|
||||
self.draw.draw_outline(shape, fill, 1)
|
||||
if ink is not None:
|
||||
if ink is not None and ink != fill:
|
||||
self.draw.draw_outline(shape, ink, 0)
|
||||
|
||||
def pieslice(self, xy, start, end, fill=None, outline=None):
|
||||
def pieslice(self, xy, start, end, fill=None, outline=None, width=0):
|
||||
"""Draw a pieslice."""
|
||||
ink, fill = self._getink(outline, fill)
|
||||
if fill is not None:
|
||||
self.draw.draw_pieslice(xy, start, end, fill, 1)
|
||||
if ink is not None:
|
||||
self.draw.draw_pieslice(xy, start, end, ink, 0)
|
||||
if ink is not None and ink != fill:
|
||||
self.draw.draw_pieslice(xy, start, end, ink, 0, width)
|
||||
|
||||
def point(self, xy, fill=None):
|
||||
"""Draw one or more individual pixels."""
|
||||
|
@ -183,16 +237,16 @@ class ImageDraw(object):
|
|||
ink, fill = self._getink(outline, fill)
|
||||
if fill is not None:
|
||||
self.draw.draw_polygon(xy, fill, 1)
|
||||
if ink is not None:
|
||||
if ink is not None and ink != fill:
|
||||
self.draw.draw_polygon(xy, ink, 0)
|
||||
|
||||
def rectangle(self, xy, fill=None, outline=None):
|
||||
def rectangle(self, xy, fill=None, outline=None, width=0):
|
||||
"""Draw a rectangle."""
|
||||
ink, fill = self._getink(outline, fill)
|
||||
if fill is not None:
|
||||
self.draw.draw_rectangle(xy, fill, 1)
|
||||
if ink is not None:
|
||||
self.draw.draw_rectangle(xy, ink, 0)
|
||||
if ink is not None and ink != fill:
|
||||
self.draw.draw_rectangle(xy, ink, 0, width)
|
||||
|
||||
def _multiline_check(self, text):
|
||||
"""Draw text."""
|
||||
|
@ -217,7 +271,8 @@ class ImageDraw(object):
|
|||
ink = fill
|
||||
if ink is not None:
|
||||
try:
|
||||
mask, offset = font.getmask2(text, self.fontmode, *args, **kwargs)
|
||||
mask, offset = font.getmask2(text, self.fontmode,
|
||||
*args, **kwargs)
|
||||
xy = xy[0] + offset[0], xy[1] + offset[1]
|
||||
except AttributeError:
|
||||
try:
|
||||
|
@ -245,7 +300,7 @@ class ImageDraw(object):
|
|||
elif align == "right":
|
||||
left += (max_width - widths[idx])
|
||||
else:
|
||||
assert False, 'align must be "left", "center" or "right"'
|
||||
raise ValueError('align must be "left", "center" or "right"')
|
||||
self.text((left, top), line, fill, font, anchor,
|
||||
direction=direction, features=features)
|
||||
top += line_spacing
|
||||
|
@ -340,6 +395,7 @@ def floodfill(image, xy, value, border=None, thresh=0):
|
|||
homogeneous, but similar, colors.
|
||||
"""
|
||||
# based on an implementation by Eric S. Raymond
|
||||
# amended by yo1995 @20180806
|
||||
pixel = image.load()
|
||||
x, y = xy
|
||||
try:
|
||||
|
@ -349,39 +405,36 @@ def floodfill(image, xy, value, border=None, thresh=0):
|
|||
pixel[x, y] = value
|
||||
except (ValueError, IndexError):
|
||||
return # seed point outside image
|
||||
edge = [(x, y)]
|
||||
if border is None:
|
||||
while edge:
|
||||
newedge = []
|
||||
for (x, y) in edge:
|
||||
for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)):
|
||||
try:
|
||||
p = pixel[s, t]
|
||||
except IndexError:
|
||||
pass
|
||||
edge = {(x, y)}
|
||||
full_edge = set() # use a set to keep record of current and previous edge pixels to reduce memory consumption
|
||||
while edge:
|
||||
new_edge = set()
|
||||
for (x, y) in edge: # 4 adjacent method
|
||||
for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)):
|
||||
if (s, t) in full_edge:
|
||||
continue # if already processed, skip
|
||||
try:
|
||||
p = pixel[s, t]
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
else:
|
||||
full_edge.add((s, t))
|
||||
if border is None:
|
||||
fill = _color_diff(p, background) <= thresh
|
||||
else:
|
||||
if _color_diff(p, background) <= thresh:
|
||||
pixel[s, t] = value
|
||||
newedge.append((s, t))
|
||||
edge = newedge
|
||||
fill = p != value and p != border
|
||||
if fill:
|
||||
pixel[s, t] = value
|
||||
new_edge.add((s, t))
|
||||
full_edge = edge # discard pixels processed
|
||||
edge = new_edge
|
||||
|
||||
|
||||
def _color_diff(color1, color2):
|
||||
"""
|
||||
Uses 1-norm distance to calculate difference between two values.
|
||||
"""
|
||||
if isinstance(color2, tuple):
|
||||
return sum([abs(color1[i]-color2[i]) for i in range(0, len(color2))])
|
||||
else:
|
||||
while edge:
|
||||
newedge = []
|
||||
for (x, y) in edge:
|
||||
for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)):
|
||||
try:
|
||||
p = pixel[s, t]
|
||||
except IndexError:
|
||||
pass
|
||||
else:
|
||||
if p != value and p != border:
|
||||
pixel[s, t] = value
|
||||
newedge.append((s, t))
|
||||
edge = newedge
|
||||
|
||||
|
||||
def _color_diff(rgb1, rgb2):
|
||||
"""
|
||||
Uses 1-norm distance to calculate difference between two rgb values.
|
||||
"""
|
||||
return abs(rgb1[0]-rgb2[0]) + abs(rgb1[1]-rgb2[1]) + abs(rgb1[2]-rgb2[2])
|
||||
return abs(color1-color2)
|
||||
|
|
|
@ -51,7 +51,8 @@ class Color(_Enhance):
|
|||
if 'A' in image.getbands():
|
||||
self.intermediate_mode = 'LA'
|
||||
|
||||
self.degenerate = image.convert(self.intermediate_mode).convert(image.mode)
|
||||
self.degenerate = image.convert(
|
||||
self.intermediate_mode).convert(image.mode)
|
||||
|
||||
|
||||
class Contrast(_Enhance):
|
||||
|
|
|
@ -30,7 +30,6 @@
|
|||
from . import Image
|
||||
from ._util import isPath
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import struct
|
||||
|
||||
|
@ -166,8 +165,9 @@ class ImageFile(Image.Image):
|
|||
if use_mmap:
|
||||
# try memory mapping
|
||||
decoder_name, extents, offset, args = self.tile[0]
|
||||
if decoder_name == "raw" and len(args) >= 3 and args[0] == self.mode \
|
||||
and args[0] in Image._MAPMODES:
|
||||
if decoder_name == "raw" and len(args) >= 3 and \
|
||||
args[0] == self.mode and \
|
||||
args[0] in Image._MAPMODES:
|
||||
try:
|
||||
if hasattr(Image.core, "map"):
|
||||
# use built-in mapper WIN32 only
|
||||
|
@ -180,12 +180,14 @@ class ImageFile(Image.Image):
|
|||
# use mmap, if possible
|
||||
import mmap
|
||||
with open(self.filename, "r") as fp:
|
||||
self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
|
||||
self.map = mmap.mmap(fp.fileno(), 0,
|
||||
access=mmap.ACCESS_READ)
|
||||
self.im = Image.core.map_buffer(
|
||||
self.map, self.size, decoder_name, extents, offset, args
|
||||
)
|
||||
self.map, self.size, decoder_name, extents,
|
||||
offset, args)
|
||||
readonly = 1
|
||||
# After trashing self.im, we might need to reload the palette data.
|
||||
# After trashing self.im,
|
||||
# we might need to reload the palette data.
|
||||
if self.palette:
|
||||
self.palette.dirty = 1
|
||||
except (AttributeError, EnvironmentError, ImportError):
|
||||
|
@ -217,7 +219,8 @@ class ImageFile(Image.Image):
|
|||
while True:
|
||||
try:
|
||||
s = read(self.decodermaxblock)
|
||||
except (IndexError, struct.error): # truncated png/gif
|
||||
except (IndexError, struct.error):
|
||||
# truncated png/gif
|
||||
if LOAD_TRUNCATED_IMAGES:
|
||||
break
|
||||
else:
|
||||
|
@ -229,7 +232,8 @@ class ImageFile(Image.Image):
|
|||
else:
|
||||
self.tile = []
|
||||
raise IOError("image file is truncated "
|
||||
"(%d bytes not processed)" % len(b))
|
||||
"(%d bytes not processed)" %
|
||||
len(b))
|
||||
|
||||
b = b + s
|
||||
n, err_code = decoder.decode(b)
|
||||
|
@ -588,10 +592,12 @@ class PyDecoder(object):
|
|||
"""
|
||||
Override to perform the decoding process.
|
||||
|
||||
:param buffer: A bytes object with the data to be decoded. If `handles_eof`
|
||||
is set, then `buffer` will be empty and `self.fd` will be set.
|
||||
:returns: A tuple of (bytes consumed, errcode). If finished with decoding
|
||||
return <0 for the bytes consumed. Err codes are from `ERRORS`
|
||||
:param buffer: A bytes object with the data to be decoded.
|
||||
If `handles_eof` is set, then `buffer` will be empty and `self.fd`
|
||||
will be set.
|
||||
:returns: A tuple of (bytes consumed, errcode).
|
||||
If finished with decoding return <0 for the bytes consumed.
|
||||
Err codes are from `ERRORS`
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
@ -650,8 +656,8 @@ class PyDecoder(object):
|
|||
Convenience method to set the internal image from a stream of raw data
|
||||
|
||||
:param data: Bytes to be set
|
||||
:param rawmode: The rawmode to be used for the decoder. If not specified,
|
||||
it will default to the mode of the image
|
||||
:param rawmode: The rawmode to be used for the decoder.
|
||||
If not specified, it will default to the mode of the image
|
||||
:returns: None
|
||||
"""
|
||||
|
||||
|
|
|
@ -33,7 +33,14 @@ class MultibandFilter(Filter):
|
|||
pass
|
||||
|
||||
|
||||
class Kernel(MultibandFilter):
|
||||
class BuiltinFilter(MultibandFilter):
|
||||
def filter(self, image):
|
||||
if image.mode == "P":
|
||||
raise ValueError("cannot filter palette images")
|
||||
return image.filter(*self.filterargs)
|
||||
|
||||
|
||||
class Kernel(BuiltinFilter):
|
||||
"""
|
||||
Create a convolution kernel. The current version only
|
||||
supports 3x3 and 5x5 integer and floating point kernels.
|
||||
|
@ -60,16 +67,6 @@ class Kernel(MultibandFilter):
|
|||
raise ValueError("not enough coefficients in kernel")
|
||||
self.filterargs = size, scale, offset, kernel
|
||||
|
||||
def filter(self, image):
|
||||
if image.mode == "P":
|
||||
raise ValueError("cannot filter palette images")
|
||||
return image.filter(*self.filterargs)
|
||||
|
||||
|
||||
class BuiltinFilter(Kernel):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class RankFilter(Filter):
|
||||
"""
|
||||
|
|
|
@ -141,7 +141,8 @@ class FreeTypeFont(object):
|
|||
self.layout_engine = layout_engine
|
||||
|
||||
if isPath(font):
|
||||
self.font = core.getfont(font, size, index, encoding, layout_engine=layout_engine)
|
||||
self.font = core.getfont(font, size, index, encoding,
|
||||
layout_engine=layout_engine)
|
||||
else:
|
||||
self.font_bytes = font.read()
|
||||
self.font = core.getfont(
|
||||
|
@ -161,7 +162,8 @@ class FreeTypeFont(object):
|
|||
size, offset = self.font.getsize(text, direction, features)
|
||||
return (size[0] + offset[0], size[1] + offset[1])
|
||||
|
||||
def getsize_multiline(self, text, direction=None, spacing=4, features=None):
|
||||
def getsize_multiline(self, text, direction=None,
|
||||
spacing=4, features=None):
|
||||
max_width = 0
|
||||
lines = self._multiline_split(text)
|
||||
line_spacing = self.getsize('A')[1] + spacing
|
||||
|
@ -175,9 +177,11 @@ class FreeTypeFont(object):
|
|||
return self.font.getsize(text)[1]
|
||||
|
||||
def getmask(self, text, mode="", direction=None, features=None):
|
||||
return self.getmask2(text, mode, direction=direction, features=features)[0]
|
||||
return self.getmask2(text, mode, direction=direction,
|
||||
features=features)[0]
|
||||
|
||||
def getmask2(self, text, mode="", fill=Image.core.fill, direction=None, features=None, *args, **kwargs):
|
||||
def getmask2(self, text, mode="", fill=Image.core.fill, direction=None,
|
||||
features=None, *args, **kwargs):
|
||||
size, offset = self.font.getsize(text, direction, features)
|
||||
im = fill("L", size, 0)
|
||||
self.font.render(text, im.id, mode == "1", direction, features)
|
||||
|
@ -194,12 +198,13 @@ class FreeTypeFont(object):
|
|||
|
||||
:return: A FreeTypeFont object.
|
||||
"""
|
||||
return FreeTypeFont(font=self.path if font is None else font,
|
||||
size=self.size if size is None else size,
|
||||
index=self.index if index is None else index,
|
||||
encoding=self.encoding if encoding is None else encoding,
|
||||
layout_engine=self.layout_engine if layout_engine is None else layout_engine
|
||||
)
|
||||
return FreeTypeFont(
|
||||
font=self.path if font is None else font,
|
||||
size=self.size if size is None else size,
|
||||
index=self.index if index is None else index,
|
||||
encoding=self.encoding if encoding is None else encoding,
|
||||
layout_engine=self.layout_engine if layout_engine is None else layout_engine
|
||||
)
|
||||
|
||||
|
||||
class TransposedFont(object):
|
||||
|
@ -303,12 +308,16 @@ def truetype(font=None, size=10, index=0, encoding="",
|
|||
for walkfilename in walkfilenames:
|
||||
if ext and walkfilename == ttf_filename:
|
||||
fontpath = os.path.join(walkroot, walkfilename)
|
||||
return FreeTypeFont(fontpath, size, index, encoding, layout_engine)
|
||||
elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename:
|
||||
return FreeTypeFont(fontpath, size, index,
|
||||
encoding, layout_engine)
|
||||
elif (not ext and
|
||||
os.path.splitext(walkfilename)[0] == ttf_filename):
|
||||
fontpath = os.path.join(walkroot, walkfilename)
|
||||
if os.path.splitext(fontpath)[1] == '.ttf':
|
||||
return FreeTypeFont(fontpath, size, index, encoding, layout_engine)
|
||||
if not ext and first_font_with_a_different_extension is None:
|
||||
return FreeTypeFont(fontpath, size, index,
|
||||
encoding, layout_engine)
|
||||
if not ext \
|
||||
and first_font_with_a_different_extension is None:
|
||||
first_font_with_a_different_extension = fontpath
|
||||
if first_font_with_a_different_extension:
|
||||
return FreeTypeFont(first_font_with_a_different_extension, size,
|
||||
|
|
|
@ -42,7 +42,8 @@ def getmode(mode):
|
|||
for m, (basemode, basetype, bands) in Image._MODEINFO.items():
|
||||
modes[m] = ModeDescriptor(m, bands, basemode, basetype)
|
||||
# extra experimental modes
|
||||
modes["RGBa"] = ModeDescriptor("RGBa", ("R", "G", "B", "a"), "RGB", "L")
|
||||
modes["RGBa"] = ModeDescriptor("RGBa",
|
||||
("R", "G", "B", "a"), "RGB", "L")
|
||||
modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L")
|
||||
modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L")
|
||||
modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L")
|
||||
|
|
|
@ -151,11 +151,6 @@ class LutBuilder(object):
|
|||
|
||||
patterns += self._pattern_permute(pattern, options, result)
|
||||
|
||||
# # Debugging
|
||||
# for p, r in patterns:
|
||||
# print(p, r)
|
||||
# print('--')
|
||||
|
||||
# compile the patterns into regular expressions for speed
|
||||
for i, pattern in enumerate(patterns):
|
||||
p = pattern[0].replace('.', 'X').replace('X', '[01]')
|
||||
|
|
|
@ -136,32 +136,135 @@ def autocontrast(image, cutoff=0, ignore=None):
|
|||
return _lut(image, lut)
|
||||
|
||||
|
||||
def colorize(image, black, white):
|
||||
def colorize(image, black, white, mid=None, blackpoint=0,
|
||||
whitepoint=255, midpoint=127):
|
||||
"""
|
||||
Colorize grayscale image. The **black** and **white**
|
||||
arguments should be RGB tuples; this function calculates a color
|
||||
wedge mapping all black pixels in the source image to the first
|
||||
color, and all white pixels to the second color.
|
||||
Colorize grayscale image.
|
||||
This function calculates a color wedge which maps all black pixels in
|
||||
the source image to the first color and all white pixels to the
|
||||
second color. If **mid** is specified, it uses three-color mapping.
|
||||
The **black** and **white** arguments should be RGB tuples or color names;
|
||||
optionally you can use three-color mapping by also specifying **mid**.
|
||||
Mapping positions for any of the colors can be specified
|
||||
(e.g. **blackpoint**), where these parameters are the integer
|
||||
value corresponding to where the corresponding color should be mapped.
|
||||
These parameters must have logical order, such that
|
||||
**blackpoint** <= **midpoint** <= **whitepoint** (if **mid** is specified).
|
||||
|
||||
:param image: The image to colorize.
|
||||
:param black: The color to use for black input pixels.
|
||||
:param white: The color to use for white input pixels.
|
||||
:param mid: The color to use for midtone input pixels.
|
||||
:param blackpoint: an int value [0, 255] for the black mapping.
|
||||
:param whitepoint: an int value [0, 255] for the white mapping.
|
||||
:param midpoint: an int value [0, 255] for the midtone mapping.
|
||||
:return: An image.
|
||||
"""
|
||||
|
||||
# Initial asserts
|
||||
assert image.mode == "L"
|
||||
if mid is None:
|
||||
assert 0 <= blackpoint <= whitepoint <= 255
|
||||
else:
|
||||
assert 0 <= blackpoint <= midpoint <= whitepoint <= 255
|
||||
|
||||
# Define colors from arguments
|
||||
black = _color(black, "RGB")
|
||||
white = _color(white, "RGB")
|
||||
if mid is not None:
|
||||
mid = _color(mid, "RGB")
|
||||
|
||||
# Empty lists for the mapping
|
||||
red = []
|
||||
green = []
|
||||
blue = []
|
||||
for i in range(256):
|
||||
red.append(black[0]+i*(white[0]-black[0])//255)
|
||||
green.append(black[1]+i*(white[1]-black[1])//255)
|
||||
blue.append(black[2]+i*(white[2]-black[2])//255)
|
||||
|
||||
# Create the low-end values
|
||||
for i in range(0, blackpoint):
|
||||
red.append(black[0])
|
||||
green.append(black[1])
|
||||
blue.append(black[2])
|
||||
|
||||
# Create the mapping (2-color)
|
||||
if mid is None:
|
||||
|
||||
range_map = range(0, whitepoint - blackpoint)
|
||||
|
||||
for i in range_map:
|
||||
red.append(black[0] + i * (white[0] - black[0]) // len(range_map))
|
||||
green.append(black[1] + i * (white[1] - black[1]) // len(range_map))
|
||||
blue.append(black[2] + i * (white[2] - black[2]) // len(range_map))
|
||||
|
||||
# Create the mapping (3-color)
|
||||
else:
|
||||
|
||||
range_map1 = range(0, midpoint - blackpoint)
|
||||
range_map2 = range(0, whitepoint - midpoint)
|
||||
|
||||
for i in range_map1:
|
||||
red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1))
|
||||
green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1))
|
||||
blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1))
|
||||
for i in range_map2:
|
||||
red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2))
|
||||
green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2))
|
||||
blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2))
|
||||
|
||||
# Create the high-end values
|
||||
for i in range(0, 256 - whitepoint):
|
||||
red.append(white[0])
|
||||
green.append(white[1])
|
||||
blue.append(white[2])
|
||||
|
||||
# Return converted image
|
||||
image = image.convert("RGB")
|
||||
return _lut(image, red + green + blue)
|
||||
|
||||
|
||||
def pad(image, size, method=Image.NEAREST, color=None, centering=(0.5, 0.5)):
|
||||
"""
|
||||
Returns a sized and padded version of the image, expanded to fill the
|
||||
requested aspect ratio and size.
|
||||
|
||||
:param image: The image to size and crop.
|
||||
:param size: The requested output size in pixels, given as a
|
||||
(width, height) tuple.
|
||||
:param method: What resampling method to use. Default is
|
||||
:py:attr:`PIL.Image.NEAREST`.
|
||||
:param color: The background color of the padded image.
|
||||
:param centering: Control the position of the original image within the
|
||||
padded version.
|
||||
(0.5, 0.5) will keep the image centered
|
||||
(0, 0) will keep the image aligned to the top left
|
||||
(1, 1) will keep the image aligned to the bottom
|
||||
right
|
||||
:return: An image.
|
||||
"""
|
||||
|
||||
im_ratio = image.width / image.height
|
||||
dest_ratio = float(size[0]) / size[1]
|
||||
|
||||
if im_ratio == dest_ratio:
|
||||
out = image.resize(size, resample=method)
|
||||
else:
|
||||
out = Image.new(image.mode, size, color)
|
||||
if im_ratio > dest_ratio:
|
||||
new_height = int(image.height / image.width * size[0])
|
||||
if new_height != size[1]:
|
||||
image = image.resize((size[0], new_height), resample=method)
|
||||
|
||||
y = int((size[1] - new_height) * max(0, min(centering[1], 1)))
|
||||
out.paste(image, (0, y))
|
||||
else:
|
||||
new_width = int(image.width / image.height * size[1])
|
||||
if new_width != size[0]:
|
||||
image = image.resize((new_width, size[1]), resample=method)
|
||||
|
||||
x = int((size[0] - new_width) * max(0, min(centering[0], 1)))
|
||||
out.paste(image, (x, 0))
|
||||
return out
|
||||
|
||||
|
||||
def crop(image, border=0):
|
||||
"""
|
||||
Remove border from image. The same amount of pixels are removed
|
||||
|
|
|
@ -59,7 +59,7 @@ class ImagePalette(object):
|
|||
|
||||
def getdata(self):
|
||||
"""
|
||||
Get palette contents in format suitable # for the low-level
|
||||
Get palette contents in format suitable for the low-level
|
||||
``im.putpalette`` primitive.
|
||||
|
||||
.. warning:: This method is experimental.
|
||||
|
|
|
@ -23,16 +23,21 @@ import sys
|
|||
|
||||
qt_versions = [
|
||||
['5', 'PyQt5'],
|
||||
['side2', 'PySide2'],
|
||||
['4', 'PyQt4'],
|
||||
['side', 'PySide']
|
||||
]
|
||||
# If a version has already been imported, attempt it first
|
||||
qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True)
|
||||
qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules,
|
||||
reverse=True)
|
||||
for qt_version, qt_module in qt_versions:
|
||||
try:
|
||||
if qt_module == 'PyQt5':
|
||||
from PyQt5.QtGui import QImage, qRgba, QPixmap
|
||||
from PyQt5.QtCore import QBuffer, QIODevice
|
||||
elif qt_module == 'PySide2':
|
||||
from PySide2.QtGui import QImage, qRgba, QPixmap
|
||||
from PySide2.QtCore import QBuffer, QIODevice
|
||||
elif qt_module == 'PyQt4':
|
||||
from PyQt4.QtGui import QImage, qRgba, QPixmap
|
||||
from PyQt4.QtCore import QBuffer, QIODevice
|
||||
|
@ -119,7 +124,8 @@ def align8to32(bytes, width, mode):
|
|||
|
||||
new_data = []
|
||||
for i in range(len(bytes) // bytes_per_line):
|
||||
new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding)
|
||||
new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line]
|
||||
+ b'\x00' * extra_padding)
|
||||
|
||||
return b''.join(new_data)
|
||||
|
||||
|
|
|
@ -110,11 +110,11 @@ class Stat(object):
|
|||
v = []
|
||||
for i in self.bands:
|
||||
s = 0
|
||||
l = self.count[i]//2
|
||||
half = self.count[i]//2
|
||||
b = i * 256
|
||||
for j in range(256):
|
||||
s = s + self.h[b+j]
|
||||
if s > l:
|
||||
if s > half:
|
||||
break
|
||||
v.append(j)
|
||||
return v
|
||||
|
|
|
@ -32,13 +32,6 @@ if sys.version_info.major > 2:
|
|||
else:
|
||||
import Tkinter as tkinter
|
||||
|
||||
# required for pypy, which always has cffi installed
|
||||
try:
|
||||
from cffi import FFI
|
||||
ffi = FFI()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from . import Image
|
||||
from io import BytesIO
|
||||
|
||||
|
@ -192,10 +185,15 @@ class PhotoImage(object):
|
|||
from . import _imagingtk
|
||||
try:
|
||||
if hasattr(tk, 'interp'):
|
||||
# Pypy is using a ffi cdata element
|
||||
# Required for PyPy, which always has CFFI installed
|
||||
from cffi import FFI
|
||||
ffi = FFI()
|
||||
|
||||
# PyPy is using an FFI CDATA element
|
||||
# (Pdb) self.tk.interp
|
||||
# <cdata 'Tcl_Interp *' 0x3061b50>
|
||||
_imagingtk.tkinit(int(ffi.cast("uintptr_t", tk.interp)), 1)
|
||||
_imagingtk.tkinit(
|
||||
int(ffi.cast("uintptr_t", tk.interp)), 1)
|
||||
else:
|
||||
_imagingtk.tkinit(tk.interpaddr(), 1)
|
||||
except AttributeError:
|
||||
|
|
|
@ -78,10 +78,10 @@ class ImtImageFile(ImageFile.ImageFile):
|
|||
k, v = m.group(1, 2)
|
||||
if k == "width":
|
||||
xsize = int(v)
|
||||
self.size = xsize, ysize
|
||||
self._size = xsize, ysize
|
||||
elif k == "height":
|
||||
ysize = int(v)
|
||||
self.size = xsize, ysize
|
||||
self._size = xsize, ysize
|
||||
elif k == "pixel" and v == "n8":
|
||||
self.mode = "L"
|
||||
|
||||
|
|
|
@ -103,8 +103,6 @@ class IptcImageFile(ImageFile.ImageFile):
|
|||
else:
|
||||
self.info[tag] = tagdata
|
||||
|
||||
# print(tag, self.info[tag])
|
||||
|
||||
# mode
|
||||
layers = i8(self.info[(3, 60)][0])
|
||||
component = i8(self.info[(3, 60)][1])
|
||||
|
@ -120,7 +118,7 @@ class IptcImageFile(ImageFile.ImageFile):
|
|||
self.mode = "CMYK"[id]
|
||||
|
||||
# size
|
||||
self.size = self.getint((3, 20)), self.getint((3, 30))
|
||||
self._size = self.getint((3, 20)), self.getint((3, 30))
|
||||
|
||||
# compression
|
||||
try:
|
||||
|
|
|
@ -159,13 +159,13 @@ class Jpeg2KImageFile(ImageFile.ImageFile):
|
|||
sig = self.fp.read(4)
|
||||
if sig == b'\xff\x4f\xff\x51':
|
||||
self.codec = "j2k"
|
||||
self.size, self.mode = _parse_codestream(self.fp)
|
||||
self._size, self.mode = _parse_codestream(self.fp)
|
||||
else:
|
||||
sig = sig + self.fp.read(8)
|
||||
|
||||
if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a':
|
||||
self.codec = "jp2"
|
||||
self.size, self.mode = _parse_jp2_header(self.fp)
|
||||
self._size, self.mode = _parse_jp2_header(self.fp)
|
||||
else:
|
||||
raise SyntaxError('not a JPEG 2000 file')
|
||||
|
||||
|
@ -198,8 +198,8 @@ class Jpeg2KImageFile(ImageFile.ImageFile):
|
|||
if self.reduce:
|
||||
power = 1 << self.reduce
|
||||
adjust = power >> 1
|
||||
self.size = (int((self.size[0] + adjust) / power),
|
||||
int((self.size[1] + adjust) / power))
|
||||
self._size = (int((self.size[0] + adjust) / power),
|
||||
int((self.size[1] + adjust) / power))
|
||||
|
||||
if self.tile:
|
||||
# Update the reduce and layers settings
|
||||
|
@ -270,7 +270,8 @@ def _save(im, fp, filename):
|
|||
Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept)
|
||||
Image.register_save(Jpeg2KImageFile.format, _save)
|
||||
|
||||
Image.register_extensions(Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"])
|
||||
Image.register_extensions(Jpeg2KImageFile.format,
|
||||
[".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"])
|
||||
|
||||
Image.register_mime(Jpeg2KImageFile.format, 'image/jp2')
|
||||
Image.register_mime(Jpeg2KImageFile.format, 'image/jpx')
|
||||
|
|
|
@ -159,7 +159,7 @@ def SOF(self, marker):
|
|||
|
||||
n = i16(self.fp.read(2))-2
|
||||
s = ImageFile._safe_read(self.fp, n)
|
||||
self.size = i16(s[3:]), i16(s[1:])
|
||||
self._size = i16(s[3:]), i16(s[1:])
|
||||
|
||||
self.bits = i8(s[0])
|
||||
if self.bits != 8:
|
||||
|
@ -334,7 +334,6 @@ class JpegImageFile(ImageFile.ImageFile):
|
|||
|
||||
if i in MARKER:
|
||||
name, description, handler = MARKER[i]
|
||||
# print(hex(i), name, description)
|
||||
if handler is not None:
|
||||
handler(self, i)
|
||||
if i == 0xFFDA: # start of scan
|
||||
|
@ -391,7 +390,7 @@ class JpegImageFile(ImageFile.ImageFile):
|
|||
if scale >= s:
|
||||
break
|
||||
e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1]
|
||||
self.size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s)
|
||||
self._size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s)
|
||||
scale = s
|
||||
|
||||
self.tile = [(d, e, o, a)]
|
||||
|
@ -424,7 +423,7 @@ class JpegImageFile(ImageFile.ImageFile):
|
|||
pass
|
||||
|
||||
self.mode = self.im.mode
|
||||
self.size = self.im.size
|
||||
self._size = self.im.size
|
||||
|
||||
self.tile = []
|
||||
|
||||
|
@ -793,12 +792,13 @@ def jpeg_factory(fp=None, filename=None):
|
|||
return im
|
||||
|
||||
|
||||
# -------------------------------------------------------------------q-
|
||||
# ---------------------------------------------------------------------
|
||||
# Registry stuff
|
||||
|
||||
Image.register_open(JpegImageFile.format, jpeg_factory, _accept)
|
||||
Image.register_save(JpegImageFile.format, _save)
|
||||
|
||||
Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"])
|
||||
Image.register_extensions(JpegImageFile.format,
|
||||
[".jfif", ".jpe", ".jpg", ".jpeg"])
|
||||
|
||||
Image.register_mime(JpegImageFile.format, "image/jpeg")
|
||||
|
|
|
@ -59,7 +59,7 @@ class McIdasImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("unsupported McIdas format")
|
||||
|
||||
self.mode = mode
|
||||
self.size = w[10], w[9]
|
||||
self._size = w[10], w[9]
|
||||
|
||||
offset = w[34] + w[15]
|
||||
stride = w[15] + w[10]*w[11]*w[14]
|
||||
|
|
|
@ -72,7 +72,7 @@ class MpegImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("not an MPEG file")
|
||||
|
||||
self.mode = "RGB"
|
||||
self.size = s.read(12), s.read(12)
|
||||
self._size = s.read(12), s.read(12)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
|
|
@ -85,7 +85,7 @@ class MpoImageFile(JpegImagePlugin.JpegImageFile):
|
|||
return self.__frame
|
||||
|
||||
|
||||
# -------------------------------------------------------------------q-
|
||||
# ---------------------------------------------------------------------
|
||||
# Registry stuff
|
||||
|
||||
# Note that since MPO shares a factory with JPEG, we do not need to do a
|
||||
|
|
|
@ -63,7 +63,7 @@ class MspImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("bad MSP checksum")
|
||||
|
||||
self.mode = "1"
|
||||
self.size = i16(s[4:]), i16(s[6:])
|
||||
self._size = i16(s[4:]), i16(s[6:])
|
||||
|
||||
if s[:4] == b"DanM":
|
||||
self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))]
|
||||
|
@ -126,8 +126,9 @@ class MspDecoder(ImageFile.PyDecoder):
|
|||
continue
|
||||
row = self.fd.read(rowlen)
|
||||
if len(row) != rowlen:
|
||||
raise IOError("Truncated MSP file, expected %d bytes on row %s",
|
||||
(rowlen, x))
|
||||
raise IOError(
|
||||
"Truncated MSP file, expected %d bytes on row %s",
|
||||
(rowlen, x))
|
||||
idx = 0
|
||||
while idx < rowlen:
|
||||
runtype = i8(row[idx])
|
||||
|
|
|
@ -48,14 +48,14 @@ class PcdImageFile(ImageFile.ImageFile):
|
|||
self.tile_post_rotate = -90
|
||||
|
||||
self.mode = "RGB"
|
||||
self.size = 768, 512 # FIXME: not correct for rotated images!
|
||||
self._size = 768, 512 # FIXME: not correct for rotated images!
|
||||
self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)]
|
||||
|
||||
def load_end(self):
|
||||
if self.tile_post_rotate:
|
||||
# Handle rotated PCDs
|
||||
self.im = self.im.rotate(self.tile_post_rotate)
|
||||
self.size = self.im.size
|
||||
self._size = self.im.size
|
||||
|
||||
|
||||
#
|
||||
|
|
|
@ -100,7 +100,7 @@ class PcxImageFile(ImageFile.ImageFile):
|
|||
raise IOError("unknown PCX mode")
|
||||
|
||||
self.mode = mode
|
||||
self.size = bbox[2]-bbox[0], bbox[3]-bbox[1]
|
||||
self._size = bbox[2]-bbox[0], bbox[3]-bbox[1]
|
||||
|
||||
bbox = (0, 0) + self.size
|
||||
logger.debug("size: %sx%s", *self.size)
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
|
||||
from . import Image, ImageFile, ImageSequence, PdfParser
|
||||
import io
|
||||
import os
|
||||
import time
|
||||
|
||||
__version__ = "0.5"
|
||||
|
||||
|
@ -45,32 +47,30 @@ def _save_all(im, fp, filename):
|
|||
# (Internal) Image save plugin for the PDF format.
|
||||
|
||||
def _save(im, fp, filename, save_all=False):
|
||||
resolution = im.encoderinfo.get("resolution", 72.0)
|
||||
is_appending = im.encoderinfo.get("append", False)
|
||||
title = im.encoderinfo.get("title", None)
|
||||
author = im.encoderinfo.get("author", None)
|
||||
subject = im.encoderinfo.get("subject", None)
|
||||
keywords = im.encoderinfo.get("keywords", None)
|
||||
creator = im.encoderinfo.get("creator", None)
|
||||
producer = im.encoderinfo.get("producer", None)
|
||||
|
||||
if is_appending:
|
||||
existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b")
|
||||
else:
|
||||
existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b")
|
||||
|
||||
if title:
|
||||
existing_pdf.info.Title = title
|
||||
if author:
|
||||
existing_pdf.info.Author = author
|
||||
if subject:
|
||||
existing_pdf.info.Subject = subject
|
||||
if keywords:
|
||||
existing_pdf.info.Keywords = keywords
|
||||
if creator:
|
||||
existing_pdf.info.Creator = creator
|
||||
if producer:
|
||||
existing_pdf.info.Producer = producer
|
||||
resolution = im.encoderinfo.get("resolution", 72.0)
|
||||
|
||||
info = {
|
||||
"title": None if is_appending else os.path.splitext(
|
||||
os.path.basename(filename)
|
||||
)[0],
|
||||
"author": None,
|
||||
"subject": None,
|
||||
"keywords": None,
|
||||
"creator": None,
|
||||
"producer": None,
|
||||
"creationDate": None if is_appending else time.gmtime(),
|
||||
"modDate": None if is_appending else time.gmtime()
|
||||
}
|
||||
for k, default in info.items():
|
||||
v = im.encoderinfo.get(k) if k in im.encoderinfo else default
|
||||
if v:
|
||||
existing_pdf.info[k[0].upper() + k[1:]] = v
|
||||
|
||||
#
|
||||
# make sure image data is available
|
||||
|
@ -98,7 +98,8 @@ def _save(im, fp, filename, save_all=False):
|
|||
try:
|
||||
im_numberOfPages = im.n_frames
|
||||
except AttributeError:
|
||||
# Image format does not have n_frames. It is a single frame image
|
||||
# Image format does not have n_frames.
|
||||
# It is a single frame image
|
||||
pass
|
||||
numberOfPages += im_numberOfPages
|
||||
for i in range(im_numberOfPages):
|
||||
|
@ -115,9 +116,9 @@ def _save(im, fp, filename, save_all=False):
|
|||
for imSequence in ims:
|
||||
im_pages = ImageSequence.Iterator(imSequence) if save_all else [imSequence]
|
||||
for im in im_pages:
|
||||
# FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits)
|
||||
# or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports
|
||||
# Flatedecode (zip compression).
|
||||
# FIXME: Should replace ASCIIHexDecode with RunLengthDecode
|
||||
# (packbits) or LZWDecode (tiff/lzw compression). Note that
|
||||
# PDF 1.2 also supports Flatedecode (zip compression).
|
||||
|
||||
bits = 8
|
||||
params = None
|
||||
|
@ -135,7 +136,12 @@ def _save(im, fp, filename, save_all=False):
|
|||
elif im.mode == "P":
|
||||
filter = "ASCIIHexDecode"
|
||||
palette = im.im.getpalette("RGB")
|
||||
colorspace = [PdfParser.PdfName("Indexed"), PdfParser.PdfName("DeviceRGB"), 255, PdfParser.PdfBinary(palette)]
|
||||
colorspace = [
|
||||
PdfParser.PdfName("Indexed"),
|
||||
PdfParser.PdfName("DeviceRGB"),
|
||||
255,
|
||||
PdfParser.PdfBinary(palette)
|
||||
]
|
||||
procset = "ImageI" # indexed color
|
||||
elif im.mode == "RGB":
|
||||
filter = "DCTDecode"
|
||||
|
@ -166,7 +172,8 @@ def _save(im, fp, filename, save_all=False):
|
|||
elif filter == "FlateDecode":
|
||||
ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)])
|
||||
elif filter == "RunLengthDecode":
|
||||
ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)])
|
||||
ImageFile._save(im, op,
|
||||
[("packbits", (0, 0)+im.size, 0, im.mode)])
|
||||
else:
|
||||
raise ValueError("unsupported PDF filter (%s)" % filter)
|
||||
|
||||
|
@ -175,26 +182,37 @@ def _save(im, fp, filename, save_all=False):
|
|||
|
||||
width, height = im.size
|
||||
|
||||
existing_pdf.write_obj(image_refs[pageNumber], stream=op.getvalue(),
|
||||
Type=PdfParser.PdfName("XObject"),
|
||||
Subtype=PdfParser.PdfName("Image"),
|
||||
Width=width, # * 72.0 / resolution,
|
||||
Height=height, # * 72.0 / resolution,
|
||||
Filter=PdfParser.PdfName(filter),
|
||||
BitsPerComponent=bits,
|
||||
DecodeParams=params,
|
||||
ColorSpace=colorspace)
|
||||
existing_pdf.write_obj(image_refs[pageNumber],
|
||||
stream=op.getvalue(),
|
||||
Type=PdfParser.PdfName("XObject"),
|
||||
Subtype=PdfParser.PdfName("Image"),
|
||||
Width=width, # * 72.0 / resolution,
|
||||
Height=height, # * 72.0 / resolution,
|
||||
Filter=PdfParser.PdfName(filter),
|
||||
BitsPerComponent=bits,
|
||||
DecodeParams=params,
|
||||
ColorSpace=colorspace)
|
||||
|
||||
#
|
||||
# page
|
||||
|
||||
existing_pdf.write_page(page_refs[pageNumber],
|
||||
Resources=PdfParser.PdfDict(
|
||||
ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)],
|
||||
XObject=PdfParser.PdfDict(image=image_refs[pageNumber])),
|
||||
MediaBox=[0, 0, int(width * 72.0 / resolution), int(height * 72.0 / resolution)],
|
||||
Contents=contents_refs[pageNumber]
|
||||
)
|
||||
Resources=PdfParser.PdfDict(
|
||||
ProcSet=[
|
||||
PdfParser.PdfName("PDF"),
|
||||
PdfParser.PdfName(procset)
|
||||
],
|
||||
XObject=PdfParser.PdfDict(
|
||||
image=image_refs[pageNumber]
|
||||
)
|
||||
),
|
||||
MediaBox=[
|
||||
0,
|
||||
0,
|
||||
int(width * 72.0 / resolution),
|
||||
int(height * 72.0 / resolution)
|
||||
],
|
||||
Contents=contents_refs[pageNumber])
|
||||
|
||||
#
|
||||
# page contents
|
||||
|
@ -204,7 +222,8 @@ def _save(im, fp, filename, save_all=False):
|
|||
int(width * 72.0 / resolution),
|
||||
int(height * 72.0 / resolution)))
|
||||
|
||||
existing_pdf.write_obj(contents_refs[pageNumber], stream=page_contents)
|
||||
existing_pdf.write_obj(contents_refs[pageNumber],
|
||||
stream=page_contents)
|
||||
|
||||
pageNumber += 1
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import calendar
|
||||
import codecs
|
||||
import collections
|
||||
import mmap
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import zlib
|
||||
from ._util import py3
|
||||
|
||||
|
@ -20,7 +22,8 @@ else: # Python 2.x
|
|||
return s # pragma: no cover
|
||||
|
||||
|
||||
# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set on page 656
|
||||
# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set
|
||||
# on page 656
|
||||
def encode_text(s):
|
||||
return codecs.BOM_UTF16_BE + s.encode("utf_16_be")
|
||||
|
||||
|
@ -80,7 +83,8 @@ def decode_text(b):
|
|||
|
||||
|
||||
class PdfFormatError(RuntimeError):
|
||||
"""An error that probably indicates a syntactic or semantic error in the PDF file structure"""
|
||||
"""An error that probably indicates a syntactic or semantic error in the
|
||||
PDF file structure"""
|
||||
pass
|
||||
|
||||
|
||||
|
@ -89,7 +93,8 @@ def check_format_condition(condition, error_message):
|
|||
raise PdfFormatError(error_message)
|
||||
|
||||
|
||||
class IndirectReference(collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"])):
|
||||
class IndirectReference(collections.namedtuple("IndirectReferenceTuple",
|
||||
["object_id", "generation"])):
|
||||
def __str__(self):
|
||||
return "%s %s R" % self
|
||||
|
||||
|
@ -97,7 +102,9 @@ class IndirectReference(collections.namedtuple("IndirectReferenceTuple", ["objec
|
|||
return self.__str__().encode("us-ascii")
|
||||
|
||||
def __eq__(self, other):
|
||||
return other.__class__ is self.__class__ and other.object_id == self.object_id and other.generation == self.generation
|
||||
return other.__class__ is self.__class__ and \
|
||||
other.object_id == self.object_id and \
|
||||
other.generation == self.generation
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
@ -143,19 +150,26 @@ class XrefTable:
|
|||
elif key in self.deleted_entries:
|
||||
generation = self.deleted_entries[key]
|
||||
else:
|
||||
raise IndexError("object ID " + str(key) + " cannot be deleted because it doesn't exist")
|
||||
raise IndexError("object ID " + str(key) +
|
||||
" cannot be deleted because it doesn't exist")
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.existing_entries or key in self.new_entries
|
||||
|
||||
def __len__(self):
|
||||
return len(set(self.existing_entries.keys()) | set(self.new_entries.keys()) | set(self.deleted_entries.keys()))
|
||||
return len(set(self.existing_entries.keys()) |
|
||||
set(self.new_entries.keys()) |
|
||||
set(self.deleted_entries.keys()))
|
||||
|
||||
def keys(self):
|
||||
return (set(self.existing_entries.keys()) - set(self.deleted_entries.keys())) | set(self.new_entries.keys())
|
||||
return (
|
||||
set(self.existing_entries.keys()) -
|
||||
set(self.deleted_entries.keys())
|
||||
) | set(self.new_entries.keys())
|
||||
|
||||
def write(self, f):
|
||||
keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys()))
|
||||
keys = sorted(set(self.new_entries.keys()) |
|
||||
set(self.deleted_entries.keys()))
|
||||
deleted_keys = sorted(set(self.deleted_entries.keys()))
|
||||
startxref = f.tell()
|
||||
f.write(b"xref\n")
|
||||
|
@ -172,10 +186,12 @@ class XrefTable:
|
|||
else:
|
||||
contiguous_keys = keys
|
||||
keys = None
|
||||
f.write(make_bytes("%d %d\n" % (contiguous_keys[0], len(contiguous_keys))))
|
||||
f.write(make_bytes("%d %d\n" %
|
||||
(contiguous_keys[0], len(contiguous_keys))))
|
||||
for object_id in contiguous_keys:
|
||||
if object_id in self.new_entries:
|
||||
f.write(make_bytes("%010d %05d n \n" % self.new_entries[object_id]))
|
||||
f.write(make_bytes("%010d %05d n \n" %
|
||||
self.new_entries[object_id]))
|
||||
else:
|
||||
this_deleted_object_id = deleted_keys.pop(0)
|
||||
check_format_condition(object_id == this_deleted_object_id,
|
||||
|
@ -186,7 +202,9 @@ class XrefTable:
|
|||
next_in_linked_list = deleted_keys[0]
|
||||
except IndexError:
|
||||
next_in_linked_list = 0
|
||||
f.write(make_bytes("%010d %05d f \n" % (next_in_linked_list, self.deleted_entries[object_id])))
|
||||
f.write(make_bytes("%010d %05d f \n" %
|
||||
(next_in_linked_list,
|
||||
self.deleted_entries[object_id])))
|
||||
return startxref
|
||||
|
||||
|
||||
|
@ -203,7 +221,8 @@ class PdfName:
|
|||
return self.name.decode("us-ascii")
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, PdfName) and other.name == self.name) or other == self.name
|
||||
return (isinstance(other, PdfName) and other.name == self.name) or \
|
||||
other == self.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.name)
|
||||
|
@ -263,9 +282,26 @@ class PdfDict(UserDict):
|
|||
except KeyError:
|
||||
raise AttributeError(key)
|
||||
if isinstance(value, bytes):
|
||||
return decode_text(value)
|
||||
else:
|
||||
return value
|
||||
value = decode_text(value)
|
||||
if key.endswith("Date"):
|
||||
if value.startswith("D:"):
|
||||
value = value[2:]
|
||||
|
||||
relationship = 'Z'
|
||||
if len(value) > 17:
|
||||
relationship = value[14]
|
||||
offset = int(value[15:17]) * 60
|
||||
if len(value) > 20:
|
||||
offset += int(value[18:20])
|
||||
|
||||
format = '%Y%m%d%H%M%S'[:len(value) - 2]
|
||||
value = time.strptime(value[:len(format)+2], format)
|
||||
if relationship in ['+', '-']:
|
||||
offset *= 60
|
||||
if relationship == '+':
|
||||
offset *= -1
|
||||
value = time.gmtime(calendar.timegm(value) + offset)
|
||||
return value
|
||||
|
||||
def __bytes__(self):
|
||||
out = bytearray(b"<<")
|
||||
|
@ -313,7 +349,9 @@ class PdfStream:
|
|||
expected_length = self.dictionary.Length
|
||||
return zlib.decompress(self.buf, bufsize=int(expected_length))
|
||||
else:
|
||||
raise NotImplementedError("stream filter %s unknown/unsupported" % repr(self.dictionary.Filter))
|
||||
raise NotImplementedError(
|
||||
"stream filter %s unknown/unsupported" %
|
||||
repr(self.dictionary.Filter))
|
||||
|
||||
|
||||
def pdf_repr(x):
|
||||
|
@ -323,18 +361,26 @@ def pdf_repr(x):
|
|||
return b"false"
|
||||
elif x is None:
|
||||
return b"null"
|
||||
elif isinstance(x, PdfName) or isinstance(x, PdfDict) or isinstance(x, PdfArray) or isinstance(x, PdfBinary):
|
||||
elif (isinstance(x, PdfName) or isinstance(x, PdfDict) or
|
||||
isinstance(x, PdfArray) or isinstance(x, PdfBinary)):
|
||||
return bytes(x)
|
||||
elif isinstance(x, int):
|
||||
return str(x).encode("us-ascii")
|
||||
elif isinstance(x, time.struct_time):
|
||||
return b'(D:'+time.strftime('%Y%m%d%H%M%SZ', x).encode("us-ascii")+b')'
|
||||
elif isinstance(x, dict):
|
||||
return bytes(PdfDict(x))
|
||||
elif isinstance(x, list):
|
||||
return bytes(PdfArray(x))
|
||||
elif (py3 and isinstance(x, str)) or (not py3 and isinstance(x, unicode)):
|
||||
elif ((py3 and isinstance(x, str)) or
|
||||
(not py3 and isinstance(x, unicode))):
|
||||
return pdf_repr(encode_text(x))
|
||||
elif isinstance(x, bytes):
|
||||
return b"(" + x.replace(b"\\", b"\\\\").replace(b"(", b"\\(").replace(b")", b"\\)") + b")" # XXX escape more chars? handle binary garbage
|
||||
# XXX escape more chars? handle binary garbage
|
||||
x = x.replace(b"\\", b"\\\\")
|
||||
x = x.replace(b"(", b"\\(")
|
||||
x = x.replace(b")", b"\\)")
|
||||
return b"(" + x + b")"
|
||||
else:
|
||||
return bytes(x)
|
||||
|
||||
|
@ -344,10 +390,13 @@ class PdfParser:
|
|||
Supports PDF up to 1.4
|
||||
"""
|
||||
|
||||
def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"):
|
||||
# type: (PdfParser, str, file, Union[bytes, bytearray], int, str) -> None
|
||||
def __init__(self, filename=None, f=None,
|
||||
buf=None, start_offset=0, mode="rb"):
|
||||
# type: (PdfParser, str, file, Union[bytes, bytearray], int, str)
|
||||
# -> None
|
||||
if buf and f:
|
||||
raise RuntimeError("specify buf or f or filename, but not both buf and f")
|
||||
raise RuntimeError(
|
||||
"specify buf or f or filename, but not both buf and f")
|
||||
self.filename = filename
|
||||
self.buf = buf
|
||||
self.f = f
|
||||
|
@ -422,12 +471,12 @@ class PdfParser:
|
|||
self.pages_ref = self.next_object_id(0)
|
||||
self.rewrite_pages()
|
||||
self.write_obj(self.root_ref,
|
||||
Type=PdfName(b"Catalog"),
|
||||
Pages=self.pages_ref)
|
||||
Type=PdfName(b"Catalog"),
|
||||
Pages=self.pages_ref)
|
||||
self.write_obj(self.pages_ref,
|
||||
Type=PdfName(b"Pages"),
|
||||
Count=len(self.pages),
|
||||
Kids=self.pages)
|
||||
Type=PdfName(b"Pages"),
|
||||
Count=len(self.pages),
|
||||
Kids=self.pages)
|
||||
return self.root_ref
|
||||
|
||||
def rewrite_pages(self):
|
||||
|
@ -473,7 +522,8 @@ class PdfParser:
|
|||
if self.info:
|
||||
trailer_dict[b"Info"] = self.info_ref
|
||||
self.last_xref_section_offset = start_xref
|
||||
self.f.write(b"trailer\n" + bytes(PdfDict(trailer_dict)) + make_bytes("\nstartxref\n%d\n%%%%EOF" % start_xref))
|
||||
self.f.write(b"trailer\n" + bytes(PdfDict(trailer_dict)) +
|
||||
make_bytes("\nstartxref\n%d\n%%%%EOF" % start_xref))
|
||||
|
||||
def write_page(self, ref, *objs, **dict_obj):
|
||||
if isinstance(ref, int):
|
||||
|
@ -535,13 +585,18 @@ class PdfParser:
|
|||
else:
|
||||
self.info = PdfDict(self.read_indirect(self.info_ref))
|
||||
check_format_condition(b"Type" in self.root, "/Type missing in Root")
|
||||
check_format_condition(self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog")
|
||||
check_format_condition(self.root[b"Type"] == b"Catalog",
|
||||
"/Type in Root is not /Catalog")
|
||||
check_format_condition(b"Pages" in self.root, "/Pages missing in Root")
|
||||
check_format_condition(isinstance(self.root[b"Pages"], IndirectReference), "/Pages in Root is not an indirect reference")
|
||||
check_format_condition(isinstance(self.root[b"Pages"],
|
||||
IndirectReference),
|
||||
"/Pages in Root is not an indirect reference")
|
||||
self.pages_ref = self.root[b"Pages"]
|
||||
self.page_tree_root = self.read_indirect(self.pages_ref)
|
||||
self.pages = self.linearize_page_tree(self.page_tree_root)
|
||||
# save the original list of page references in case the user modifies, adds or deletes some pages and we need to rewrite the pages and their list
|
||||
# save the original list of page references
|
||||
# in case the user modifies, adds or deletes some pages
|
||||
# and we need to rewrite the pages and their list
|
||||
self.orig_pages = self.pages[:]
|
||||
|
||||
def next_object_id(self, offset=None):
|
||||
|
@ -562,10 +617,14 @@ class PdfParser:
|
|||
whitespace_mandatory = whitespace + b"+"
|
||||
newline_only = br"[\r\n]+"
|
||||
newline = whitespace_optional + newline_only + whitespace_optional
|
||||
re_trailer_end = re.compile(whitespace_mandatory + br"trailer" + whitespace_optional + br"\<\<(.*\>\>)" + newline
|
||||
+ br"startxref" + newline + br"([0-9]+)" + newline + br"%%EOF" + whitespace_optional + br"$", re.DOTALL)
|
||||
re_trailer_prev = re.compile(whitespace_optional + br"trailer" + whitespace_optional + br"\<\<(.*?\>\>)" + newline
|
||||
+ br"startxref" + newline + br"([0-9]+)" + newline + br"%%EOF" + whitespace_optional, re.DOTALL)
|
||||
re_trailer_end = re.compile(
|
||||
whitespace_mandatory + br"trailer" + whitespace_optional +
|
||||
br"\<\<(.*\>\>)" + newline + br"startxref" + newline + br"([0-9]+)" +
|
||||
newline + br"%%EOF" + whitespace_optional + br"$", re.DOTALL)
|
||||
re_trailer_prev = re.compile(
|
||||
whitespace_optional + br"trailer" + whitespace_optional +
|
||||
br"\<\<(.*?\>\>)" + newline + br"startxref" + newline + br"([0-9]+)" +
|
||||
newline + br"%%EOF" + whitespace_optional, re.DOTALL)
|
||||
|
||||
def read_trailer(self):
|
||||
search_start_offset = len(self.buf) - 16384
|
||||
|
@ -589,19 +648,26 @@ class PdfParser:
|
|||
self.read_prev_trailer(self.trailer_dict[b"Prev"])
|
||||
|
||||
def read_prev_trailer(self, xref_section_offset):
|
||||
trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset)
|
||||
m = self.re_trailer_prev.search(self.buf[trailer_offset:trailer_offset+16384])
|
||||
trailer_offset = self.read_xref_table(
|
||||
xref_section_offset=xref_section_offset)
|
||||
m = self.re_trailer_prev.search(
|
||||
self.buf[trailer_offset:trailer_offset+16384])
|
||||
check_format_condition(m, "previous trailer not found")
|
||||
trailer_data = m.group(1)
|
||||
check_format_condition(int(m.group(2)) == xref_section_offset, "xref section offset in previous trailer doesn't match what was expected")
|
||||
check_format_condition(int(m.group(2)) == xref_section_offset,
|
||||
"xref section offset in previous trailer "
|
||||
"doesn't match what was expected")
|
||||
trailer_dict = self.interpret_trailer(trailer_data)
|
||||
if b"Prev" in trailer_dict:
|
||||
self.read_prev_trailer(trailer_dict[b"Prev"])
|
||||
|
||||
re_whitespace_optional = re.compile(whitespace_optional)
|
||||
re_name = re.compile(whitespace_optional + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + delimiter_or_ws + br")")
|
||||
re_name = re.compile(
|
||||
whitespace_optional + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" +
|
||||
delimiter_or_ws + br")")
|
||||
re_dict_start = re.compile(whitespace_optional + br"\<\<")
|
||||
re_dict_end = re.compile(whitespace_optional + br"\>\>" + whitespace_optional)
|
||||
re_dict_end = re.compile(
|
||||
whitespace_optional + br"\>\>" + whitespace_optional)
|
||||
|
||||
@classmethod
|
||||
def interpret_trailer(cls, trailer_data):
|
||||
|
@ -611,13 +677,21 @@ class PdfParser:
|
|||
m = cls.re_name.match(trailer_data, offset)
|
||||
if not m:
|
||||
m = cls.re_dict_end.match(trailer_data, offset)
|
||||
check_format_condition(m and m.end() == len(trailer_data), "name not found in trailer, remaining data: " + repr(trailer_data[offset:]))
|
||||
check_format_condition(
|
||||
m and m.end() == len(trailer_data),
|
||||
"name not found in trailer, remaining data: " +
|
||||
repr(trailer_data[offset:]))
|
||||
break
|
||||
key = cls.interpret_name(m.group(1))
|
||||
value, offset = cls.get_value(trailer_data, m.end())
|
||||
trailer[key] = value
|
||||
check_format_condition(b"Size" in trailer and isinstance(trailer[b"Size"], int), "/Size not in trailer or not an integer")
|
||||
check_format_condition(b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), "/Root not in trailer or not an indirect reference")
|
||||
check_format_condition(
|
||||
b"Size" in trailer and isinstance(trailer[b"Size"], int),
|
||||
"/Size not in trailer or not an integer")
|
||||
check_format_condition(
|
||||
b"Root" in trailer and
|
||||
isinstance(trailer[b"Root"], IndirectReference),
|
||||
"/Root not in trailer or not an indirect reference")
|
||||
return trailer
|
||||
|
||||
re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?")
|
||||
|
@ -627,7 +701,8 @@ class PdfParser:
|
|||
name = b""
|
||||
for m in cls.re_hashes_in_name.finditer(raw):
|
||||
if m.group(3):
|
||||
name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii"))
|
||||
name += m.group(1) + \
|
||||
bytearray.fromhex(m.group(3).decode("us-ascii"))
|
||||
else:
|
||||
name += m.group(1)
|
||||
if as_text:
|
||||
|
@ -635,21 +710,37 @@ class PdfParser:
|
|||
else:
|
||||
return bytes(name)
|
||||
|
||||
re_null = re.compile(whitespace_optional + br"null(?=" + delimiter_or_ws + br")")
|
||||
re_true = re.compile(whitespace_optional + br"true(?=" + delimiter_or_ws + br")")
|
||||
re_false = re.compile(whitespace_optional + br"false(?=" + delimiter_or_ws + br")")
|
||||
re_int = re.compile(whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")")
|
||||
re_real = re.compile(whitespace_optional + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + delimiter_or_ws + br")")
|
||||
re_null = re.compile(
|
||||
whitespace_optional + br"null(?=" + delimiter_or_ws + br")")
|
||||
re_true = re.compile(
|
||||
whitespace_optional + br"true(?=" + delimiter_or_ws + br")")
|
||||
re_false = re.compile(
|
||||
whitespace_optional + br"false(?=" + delimiter_or_ws + br")")
|
||||
re_int = re.compile(
|
||||
whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")")
|
||||
re_real = re.compile(
|
||||
whitespace_optional + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" +
|
||||
delimiter_or_ws + br")")
|
||||
re_array_start = re.compile(whitespace_optional + br"\[")
|
||||
re_array_end = re.compile(whitespace_optional + br"]")
|
||||
re_string_hex = re.compile(whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>")
|
||||
re_string_hex = re.compile(
|
||||
whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>")
|
||||
re_string_lit = re.compile(whitespace_optional + br"\(")
|
||||
re_indirect_reference = re.compile(whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + br"([-+]?[0-9]+)" + whitespace_mandatory + br"R(?=" + delimiter_or_ws + br")")
|
||||
re_indirect_def_start = re.compile(whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + br"([-+]?[0-9]+)" + whitespace_mandatory + br"obj(?=" + delimiter_or_ws + br")")
|
||||
re_indirect_def_end = re.compile(whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")")
|
||||
re_comment = re.compile(br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*")
|
||||
re_indirect_reference = re.compile(
|
||||
whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory +
|
||||
br"([-+]?[0-9]+)" + whitespace_mandatory + br"R(?=" + delimiter_or_ws +
|
||||
br")")
|
||||
re_indirect_def_start = re.compile(
|
||||
whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory +
|
||||
br"([-+]?[0-9]+)" + whitespace_mandatory + br"obj(?=" +
|
||||
delimiter_or_ws + br")")
|
||||
re_indirect_def_end = re.compile(
|
||||
whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")")
|
||||
re_comment = re.compile(
|
||||
br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*")
|
||||
re_stream_start = re.compile(whitespace_optional + br"stream\r?\n")
|
||||
re_stream_end = re.compile(whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")")
|
||||
re_stream_end = re.compile(
|
||||
whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")")
|
||||
|
||||
@classmethod
|
||||
def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1):
|
||||
|
@ -660,21 +751,34 @@ class PdfParser:
|
|||
offset = m.end()
|
||||
m = cls.re_indirect_def_start.match(data, offset)
|
||||
if m:
|
||||
check_format_condition(int(m.group(1)) > 0, "indirect object definition: object ID must be greater than 0")
|
||||
check_format_condition(int(m.group(2)) >= 0, "indirect object definition: generation must be non-negative")
|
||||
check_format_condition(expect_indirect is None or expect_indirect == IndirectReference(int(m.group(1)), int(m.group(2))),
|
||||
check_format_condition(
|
||||
int(m.group(1)) > 0,
|
||||
"indirect object definition: object ID must be greater than 0")
|
||||
check_format_condition(
|
||||
int(m.group(2)) >= 0,
|
||||
"indirect object definition: generation must be non-negative")
|
||||
check_format_condition(
|
||||
expect_indirect is None or expect_indirect ==
|
||||
IndirectReference(int(m.group(1)), int(m.group(2))),
|
||||
"indirect object definition different than expected")
|
||||
object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting-1)
|
||||
object, offset = cls.get_value(
|
||||
data, m.end(), max_nesting=max_nesting-1)
|
||||
if offset is None:
|
||||
return object, None
|
||||
m = cls.re_indirect_def_end.match(data, offset)
|
||||
check_format_condition(m, "indirect object definition end not found")
|
||||
check_format_condition(
|
||||
m, "indirect object definition end not found")
|
||||
return object, m.end()
|
||||
check_format_condition(not expect_indirect, "indirect object definition not found")
|
||||
check_format_condition(
|
||||
not expect_indirect, "indirect object definition not found")
|
||||
m = cls.re_indirect_reference.match(data, offset)
|
||||
if m:
|
||||
check_format_condition(int(m.group(1)) > 0, "indirect object reference: object ID must be greater than 0")
|
||||
check_format_condition(int(m.group(2)) >= 0, "indirect object reference: generation must be non-negative")
|
||||
check_format_condition(
|
||||
int(m.group(1)) > 0,
|
||||
"indirect object reference: object ID must be greater than 0")
|
||||
check_format_condition(
|
||||
int(m.group(2)) >= 0,
|
||||
"indirect object reference: generation must be non-negative")
|
||||
return IndirectReference(int(m.group(1)), int(m.group(2))), m.end()
|
||||
m = cls.re_dict_start.match(data, offset)
|
||||
if m:
|
||||
|
@ -682,10 +786,12 @@ class PdfParser:
|
|||
result = {}
|
||||
m = cls.re_dict_end.match(data, offset)
|
||||
while not m:
|
||||
key, offset = cls.get_value(data, offset, max_nesting=max_nesting-1)
|
||||
key, offset = cls.get_value(
|
||||
data, offset, max_nesting=max_nesting-1)
|
||||
if offset is None:
|
||||
return result, None
|
||||
value, offset = cls.get_value(data, offset, max_nesting=max_nesting-1)
|
||||
value, offset = cls.get_value(
|
||||
data, offset, max_nesting=max_nesting-1)
|
||||
result[key] = value
|
||||
if offset is None:
|
||||
return result, None
|
||||
|
@ -696,7 +802,9 @@ class PdfParser:
|
|||
try:
|
||||
stream_len = int(result[b"Length"])
|
||||
except (TypeError, KeyError, ValueError):
|
||||
raise PdfFormatError("bad or missing Length in stream dict (%r)" % result.get(b"Length", None))
|
||||
raise PdfFormatError(
|
||||
"bad or missing Length in stream dict (%r)" %
|
||||
result.get(b"Length", None))
|
||||
stream_data = data[m.end():m.end() + stream_len]
|
||||
m = cls.re_stream_end.match(data, m.end() + stream_len)
|
||||
check_format_condition(m, "stream end not found")
|
||||
|
@ -711,7 +819,8 @@ class PdfParser:
|
|||
result = []
|
||||
m = cls.re_array_end.match(data, offset)
|
||||
while not m:
|
||||
value, offset = cls.get_value(data, offset, max_nesting=max_nesting-1)
|
||||
value, offset = cls.get_value(
|
||||
data, offset, max_nesting=max_nesting-1)
|
||||
result.append(value)
|
||||
if offset is None:
|
||||
return result, None
|
||||
|
@ -734,18 +843,25 @@ class PdfParser:
|
|||
return int(m.group(1)), m.end()
|
||||
m = cls.re_real.match(data, offset)
|
||||
if m:
|
||||
return float(m.group(1)), m.end() # XXX Decimal instead of float???
|
||||
# XXX Decimal instead of float???
|
||||
return float(m.group(1)), m.end()
|
||||
m = cls.re_string_hex.match(data, offset)
|
||||
if m:
|
||||
hex_string = bytearray([b for b in m.group(1) if b in b"0123456789abcdefABCDEF"]) # filter out whitespace
|
||||
# filter out whitespace
|
||||
hex_string = bytearray([
|
||||
b for b in m.group(1)
|
||||
if b in b"0123456789abcdefABCDEF"
|
||||
])
|
||||
if len(hex_string) % 2 == 1:
|
||||
hex_string.append(ord(b"0")) # append a 0 if the length is not even - yes, at the end
|
||||
# append a 0 if the length is not even - yes, at the end
|
||||
hex_string.append(ord(b"0"))
|
||||
return bytearray.fromhex(hex_string.decode("us-ascii")), m.end()
|
||||
m = cls.re_string_lit.match(data, offset)
|
||||
if m:
|
||||
return cls.get_literal_string(data, m.end())
|
||||
#return None, offset # fallback (only for debugging)
|
||||
raise PdfFormatError("unrecognized object: " + repr(data[offset:offset+32]))
|
||||
# return None, offset # fallback (only for debugging)
|
||||
raise PdfFormatError(
|
||||
"unrecognized object: " + repr(data[offset:offset+32]))
|
||||
|
||||
re_lit_str_token = re.compile(br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))")
|
||||
escaped_chars = {
|
||||
|
@ -792,19 +908,24 @@ class PdfParser:
|
|||
offset = m.end()
|
||||
raise PdfFormatError("unfinished literal string")
|
||||
|
||||
re_xref_section_start = re.compile(whitespace_optional + br"xref" + newline)
|
||||
re_xref_subsection_start = re.compile(whitespace_optional + br"([0-9]+)" + whitespace_mandatory + br"([0-9]+)" + whitespace_optional + newline_only)
|
||||
re_xref_section_start = re.compile(
|
||||
whitespace_optional + br"xref" + newline)
|
||||
re_xref_subsection_start = re.compile(
|
||||
whitespace_optional + br"([0-9]+)" + whitespace_mandatory +
|
||||
br"([0-9]+)" + whitespace_optional + newline_only)
|
||||
re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)")
|
||||
|
||||
def read_xref_table(self, xref_section_offset):
|
||||
subsection_found = False
|
||||
m = self.re_xref_section_start.match(self.buf, xref_section_offset + self.start_offset)
|
||||
m = self.re_xref_section_start.match(
|
||||
self.buf, xref_section_offset + self.start_offset)
|
||||
check_format_condition(m, "xref section start not found")
|
||||
offset = m.end()
|
||||
while True:
|
||||
m = self.re_xref_subsection_start.match(self.buf, offset)
|
||||
if not m:
|
||||
check_format_condition(subsection_found, "xref subsection start not found")
|
||||
check_format_condition(
|
||||
subsection_found, "xref subsection start not found")
|
||||
break
|
||||
subsection_found = True
|
||||
offset = m.end()
|
||||
|
@ -818,22 +939,31 @@ class PdfParser:
|
|||
generation = int(m.group(2))
|
||||
if not is_free:
|
||||
new_entry = (int(m.group(1)), generation)
|
||||
check_format_condition(i not in self.xref_table or self.xref_table[i] == new_entry, "xref entry duplicated (and not identical)")
|
||||
check_format_condition(
|
||||
i not in self.xref_table or
|
||||
self.xref_table[i] == new_entry,
|
||||
"xref entry duplicated (and not identical)")
|
||||
self.xref_table[i] = new_entry
|
||||
return offset
|
||||
|
||||
def read_indirect(self, ref, max_nesting=-1):
|
||||
offset, generation = self.xref_table[ref[0]]
|
||||
check_format_condition(generation == ref[1], "expected to find generation %s for object ID %s in xref table, instead found generation %s at offset %s" \
|
||||
check_format_condition(
|
||||
generation == ref[1],
|
||||
"expected to find generation %s for object ID %s in xref table, "
|
||||
"instead found generation %s at offset %s"
|
||||
% (ref[1], ref[0], generation, offset))
|
||||
value = self.get_value(self.buf, offset + self.start_offset, expect_indirect=IndirectReference(*ref), max_nesting=max_nesting)[0]
|
||||
value = self.get_value(self.buf, offset + self.start_offset,
|
||||
expect_indirect=IndirectReference(*ref),
|
||||
max_nesting=max_nesting)[0]
|
||||
self.cached_objects[ref] = value
|
||||
return value
|
||||
|
||||
def linearize_page_tree(self, node=None):
|
||||
if node is None:
|
||||
node = self.page_tree_root
|
||||
check_format_condition(node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages")
|
||||
check_format_condition(
|
||||
node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages")
|
||||
pages = []
|
||||
for kid in node[b"Kids"]:
|
||||
kid_object = self.read_indirect(kid)
|
||||
|
|
|
@ -50,7 +50,7 @@ class PixarImageFile(ImageFile.ImageFile):
|
|||
# read rest of header
|
||||
s = s + self.fp.read(508)
|
||||
|
||||
self.size = i16(s[418:420]), i16(s[416:418])
|
||||
self._size = i16(s[418:420]), i16(s[416:418])
|
||||
|
||||
# get channel/depth descriptions
|
||||
mode = i16(s[424:426]), i16(s[426:428])
|
||||
|
|
|
@ -142,7 +142,8 @@ class ChunkStream(object):
|
|||
def crc(self, cid, data):
|
||||
"Read and verify checksum"
|
||||
|
||||
# Skip CRC checks for ancillary chunks if allowed to load truncated images
|
||||
# Skip CRC checks for ancillary chunks if allowed to load truncated
|
||||
# images
|
||||
# 5th byte of first char is 1 [specs, section 5.4]
|
||||
if ImageFile.LOAD_TRUNCATED_IMAGES and (i8(cid[0]) >> 5 & 1):
|
||||
self.crc_skip(cid, data)
|
||||
|
@ -301,8 +302,8 @@ class PngStream(ChunkStream):
|
|||
def check_text_memory(self, chunklen):
|
||||
self.text_memory += chunklen
|
||||
if self.text_memory > MAX_TEXT_MEMORY:
|
||||
raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" %
|
||||
self.text_memory)
|
||||
raise ValueError("Too much memory used in text chunks: "
|
||||
"%s>MAX_TEXT_MEMORY" % self.text_memory)
|
||||
|
||||
def chunk_iCCP(self, pos, length):
|
||||
|
||||
|
@ -576,7 +577,7 @@ class PngImageFile(ImageFile.ImageFile):
|
|||
# (believe me, I've tried ;-)
|
||||
|
||||
self.mode = self.png.im_mode
|
||||
self.size = self.png.im_size
|
||||
self._size = self.png.im_size
|
||||
self.info = self.png.im_info
|
||||
self.text = self.png.im_text # experimental
|
||||
self.tile = self.png.im_tile
|
||||
|
|
|
@ -83,7 +83,8 @@ class PpmImageFile(ImageFile.ImageFile):
|
|||
if s not in b_whitespace:
|
||||
break
|
||||
if s == b"":
|
||||
raise ValueError("File does not extend beyond magic number")
|
||||
raise ValueError(
|
||||
"File does not extend beyond magic number")
|
||||
if s != b"#":
|
||||
break
|
||||
s = self.fp.readline()
|
||||
|
@ -106,7 +107,7 @@ class PpmImageFile(ImageFile.ImageFile):
|
|||
self.mode = 'I'
|
||||
rawmode = 'I;32B'
|
||||
|
||||
self.size = xsize, ysize
|
||||
self._size = xsize, ysize
|
||||
self.tile = [("raw",
|
||||
(0, 0, xsize, ysize),
|
||||
self.fp.tell(),
|
||||
|
|
|
@ -71,7 +71,7 @@ class PsdImageFile(ImageFile.ImageFile):
|
|||
raise IOError("not enough channels")
|
||||
|
||||
self.mode = mode
|
||||
self.size = i32(s[18:]), i32(s[14:])
|
||||
self._size = i32(s[18:]), i32(s[14:])
|
||||
|
||||
#
|
||||
# color mode data
|
||||
|
|
|
@ -96,7 +96,7 @@ class SgiImageFile(ImageFile.ImageFile):
|
|||
if rawmode == "":
|
||||
raise ValueError("Unsupported SGI image mode")
|
||||
|
||||
self.size = xsize, ysize
|
||||
self._size = xsize, ysize
|
||||
self.mode = rawmode.split(";")[0]
|
||||
|
||||
# orientation -1 : scanlines begins at the bottom-left corner
|
||||
|
@ -222,6 +222,7 @@ Image.register_save(SgiImageFile.format, _save)
|
|||
Image.register_mime(SgiImageFile.format, "image/sgi")
|
||||
Image.register_mime(SgiImageFile.format, "image/rgb")
|
||||
|
||||
Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"])
|
||||
Image.register_extensions(SgiImageFile.format,
|
||||
[".bw", ".rgb", ".rgba", ".sgi"])
|
||||
|
||||
# End of file
|
||||
|
|
|
@ -74,7 +74,6 @@ def isSpiderHeader(t):
|
|||
labrec = int(h[13]) # no. records in file header
|
||||
labbyt = int(h[22]) # total no. of bytes in header
|
||||
lenbyt = int(h[23]) # record length in bytes
|
||||
# print("labrec = %d, labbyt = %d, lenbyt = %d" % (labrec,labbyt,lenbyt))
|
||||
if labbyt != (labrec * lenbyt):
|
||||
return 0
|
||||
# looks like a valid header
|
||||
|
@ -121,7 +120,7 @@ class SpiderImageFile(ImageFile.ImageFile):
|
|||
if iform != 1:
|
||||
raise SyntaxError("not a Spider 2D image")
|
||||
|
||||
self.size = int(h[12]), int(h[2]) # size in pixels (width, height)
|
||||
self._size = int(h[12]), int(h[2]) # size in pixels (width, height)
|
||||
self.istack = int(h[24])
|
||||
self.imgnumber = int(h[27])
|
||||
|
||||
|
|
|
@ -59,10 +59,10 @@ class SunImageFile(ImageFile.ImageFile):
|
|||
|
||||
offset = 32
|
||||
|
||||
self.size = i32(s[4:8]), i32(s[8:12])
|
||||
self._size = i32(s[4:8]), i32(s[8:12])
|
||||
|
||||
depth = i32(s[12:16])
|
||||
data_length = i32(s[16:20]) # unreliable, ignore.
|
||||
# data_length = i32(s[16:20]) # unreliable, ignore.
|
||||
file_type = i32(s[20:24])
|
||||
palette_type = i32(s[24:28]) # 0: None, 1: RGB, 2: Raw/arbitrary
|
||||
palette_length = i32(s[28:32])
|
||||
|
@ -94,7 +94,8 @@ class SunImageFile(ImageFile.ImageFile):
|
|||
raise SyntaxError("Unsupported Palette Type")
|
||||
|
||||
offset = offset + palette_length
|
||||
self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length))
|
||||
self.palette = ImagePalette.raw("RGB;L",
|
||||
self.fp.read(palette_length))
|
||||
if self.mode == "L":
|
||||
self.mode = "P"
|
||||
rawmode = rawmode.replace('L', 'P')
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
from . import Image, ImageFile, ImagePalette
|
||||
from ._binary import i8, i16le as i16, o8, o16le as o16
|
||||
|
||||
import warnings
|
||||
|
||||
__version__ = "0.3"
|
||||
|
||||
|
||||
|
@ -53,7 +55,7 @@ class TgaImageFile(ImageFile.ImageFile):
|
|||
# process header
|
||||
s = self.fp.read(18)
|
||||
|
||||
idlen = i8(s[0])
|
||||
id_len = i8(s[0])
|
||||
|
||||
colormaptype = i8(s[1])
|
||||
imagetype = i8(s[2])
|
||||
|
@ -62,7 +64,7 @@ class TgaImageFile(ImageFile.ImageFile):
|
|||
|
||||
flags = i8(s[17])
|
||||
|
||||
self.size = i16(s[12:]), i16(s[14:])
|
||||
self._size = i16(s[12:]), i16(s[14:])
|
||||
|
||||
# validate header fields
|
||||
if colormaptype not in (0, 1) or\
|
||||
|
@ -100,8 +102,8 @@ class TgaImageFile(ImageFile.ImageFile):
|
|||
if imagetype & 8:
|
||||
self.info["compression"] = "tga_rle"
|
||||
|
||||
if idlen:
|
||||
self.info["id_section"] = self.fp.read(idlen)
|
||||
if id_len:
|
||||
self.info["id_section"] = self.fp.read(id_len)
|
||||
|
||||
if colormaptype:
|
||||
# read palette
|
||||
|
@ -151,11 +153,23 @@ def _save(im, fp, filename):
|
|||
except KeyError:
|
||||
raise IOError("cannot write mode %s as TGA" % im.mode)
|
||||
|
||||
rle = im.encoderinfo.get("rle", False)
|
||||
|
||||
if "rle" in im.encoderinfo:
|
||||
rle = im.encoderinfo["rle"]
|
||||
else:
|
||||
compression = im.encoderinfo.get("compression",
|
||||
im.info.get("compression"))
|
||||
rle = compression == "tga_rle"
|
||||
if rle:
|
||||
imagetype += 8
|
||||
|
||||
id_section = im.encoderinfo.get("id_section",
|
||||
im.info.get("id_section", ""))
|
||||
id_len = len(id_section)
|
||||
if id_len > 255:
|
||||
id_len = 255
|
||||
id_section = id_section[:255]
|
||||
warnings.warn("id_section has been trimmed to 255 characters")
|
||||
|
||||
if colormaptype:
|
||||
colormapfirst, colormaplength, colormapentry = 0, 256, 24
|
||||
else:
|
||||
|
@ -166,11 +180,12 @@ def _save(im, fp, filename):
|
|||
else:
|
||||
flags = 0
|
||||
|
||||
orientation = im.info.get("orientation", -1)
|
||||
orientation = im.encoderinfo.get("orientation",
|
||||
im.info.get("orientation", -1))
|
||||
if orientation > 0:
|
||||
flags = flags | 0x20
|
||||
|
||||
fp.write(b"\000" +
|
||||
fp.write(o8(id_len) +
|
||||
o8(colormaptype) +
|
||||
o8(imagetype) +
|
||||
o16(colormapfirst) +
|
||||
|
@ -183,6 +198,9 @@ def _save(im, fp, filename):
|
|||
o8(bits) +
|
||||
o8(flags))
|
||||
|
||||
if id_section:
|
||||
fp.write(id_section)
|
||||
|
||||
if colormaptype:
|
||||
fp.write(im.im.getpalette("RGB", "BGR"))
|
||||
|
||||
|
|
|
@ -45,7 +45,6 @@ from . import Image, ImageFile, ImagePalette, TiffTags
|
|||
from ._binary import i8, o8
|
||||
from ._util import py3
|
||||
|
||||
import collections
|
||||
from fractions import Fraction
|
||||
from numbers import Number, Rational
|
||||
|
||||
|
@ -208,8 +207,16 @@ OPEN_INFO = {
|
|||
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
||||
(II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
||||
(MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
||||
|
||||
|
@ -250,14 +257,10 @@ OPEN_INFO = {
|
|||
(II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
||||
(MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
||||
|
||||
(II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"),
|
||||
(MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"),
|
||||
(II, 6, (1,), 1, (8, 8, 8, 8), (0,)): ("YCbCr", "YCbCrX"),
|
||||
(MM, 6, (1,), 1, (8, 8, 8, 8), (0,)): ("YCbCr", "YCbCrX"),
|
||||
(II, 6, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("YCbCr", "YCbCrXXX"),
|
||||
(MM, 6, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("YCbCr", "YCbCrXXX"),
|
||||
(II, 6, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("YCbCr", "YCbCrXXX"),
|
||||
(MM, 6, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("YCbCr", "YCbCrXXX"),
|
||||
# JPEG compressed images handled by LibTiff and auto-converted to RGB
|
||||
# Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
|
||||
(II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
||||
(MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
||||
|
||||
(II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
||||
(MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
||||
|
@ -431,7 +434,8 @@ class ImageFileDirectory_v2(MutableMapping):
|
|||
* self.tagtype = {}
|
||||
|
||||
* Key: numerical tiff tag number
|
||||
* Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES`
|
||||
* Value: integer corresponding to the data type from
|
||||
~PIL.TiffTags.TYPES`
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
"""
|
||||
|
@ -567,6 +571,9 @@ class ImageFileDirectory_v2(MutableMapping):
|
|||
if self.tagtype[tag] == 7 and py3:
|
||||
values = [value.encode("ascii", 'replace') if isinstance(
|
||||
value, str) else value]
|
||||
elif self.tagtype[tag] == 5:
|
||||
values = [float(v) if isinstance(v, int) else v
|
||||
for v in values]
|
||||
|
||||
values = tuple(info.cvt_enum(value) for value in values)
|
||||
|
||||
|
@ -577,8 +584,8 @@ class ImageFileDirectory_v2(MutableMapping):
|
|||
# Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
|
||||
# No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
|
||||
# Don't mess with the legacy api, since it's frozen.
|
||||
if ((info.length == 1) or
|
||||
(info.length is None and len(values) == 1 and not legacy_api)):
|
||||
if (info.length == 1) or \
|
||||
(info.length is None and len(values) == 1 and not legacy_api):
|
||||
# Don't mess with the legacy api, since it's frozen.
|
||||
if legacy_api and self.tagtype[tag] in [5, 10]: # rationals
|
||||
values = values,
|
||||
|
@ -1042,19 +1049,18 @@ class TiffImageFile(ImageFile.ImageFile):
|
|||
"Return the current frame number"
|
||||
return self.__frame
|
||||
|
||||
def _decoder(self, rawmode, layer, tile=None):
|
||||
"Setup decoder contexts"
|
||||
@property
|
||||
def size(self):
|
||||
return self._size
|
||||
|
||||
args = None
|
||||
if rawmode == "RGB" and self._planar_configuration == 2:
|
||||
rawmode = rawmode[layer]
|
||||
compression = self._compression
|
||||
if compression == "raw":
|
||||
args = (rawmode, 0, 1)
|
||||
elif compression == "packbits":
|
||||
args = rawmode
|
||||
|
||||
return args
|
||||
@size.setter
|
||||
def size(self, value):
|
||||
warnings.warn(
|
||||
'Setting the size of a TIFF image directly is deprecated, and will'
|
||||
' be removed in a future version. Use the resize method instead.',
|
||||
DeprecationWarning
|
||||
)
|
||||
self._size = value
|
||||
|
||||
def load(self):
|
||||
if self.use_load_libtiff:
|
||||
|
@ -1176,11 +1182,12 @@ class TiffImageFile(ImageFile.ImageFile):
|
|||
print("- photometric_interpretation:", photo)
|
||||
print("- planar_configuration:", self._planar_configuration)
|
||||
print("- fill_order:", fillorder)
|
||||
print("- YCbCr subsampling:", self.tag.get(530))
|
||||
|
||||
# size
|
||||
xsize = self.tag_v2.get(IMAGEWIDTH)
|
||||
ysize = self.tag_v2.get(IMAGELENGTH)
|
||||
self.size = xsize, ysize
|
||||
self._size = xsize, ysize
|
||||
|
||||
if DEBUG:
|
||||
print("- size:", self.size)
|
||||
|
@ -1245,99 +1252,86 @@ class TiffImageFile(ImageFile.ImageFile):
|
|||
self.info["resolution"] = xres, yres
|
||||
|
||||
# build tile descriptors
|
||||
x = y = l = 0
|
||||
x = y = layer = 0
|
||||
self.tile = []
|
||||
self.use_load_libtiff = False
|
||||
if STRIPOFFSETS in self.tag_v2:
|
||||
self.use_load_libtiff = READ_LIBTIFF or self._compression != 'raw'
|
||||
if self.use_load_libtiff:
|
||||
# Decoder expects entire file as one tile.
|
||||
# There's a buffer size limit in load (64k)
|
||||
# so large g4 images will fail if we use that
|
||||
# function.
|
||||
#
|
||||
# Setup the one tile for the whole image, then
|
||||
# use the _load_libtiff function.
|
||||
|
||||
# libtiff handles the fillmode for us, so 1;IR should
|
||||
# actually be 1;I. Including the R double reverses the
|
||||
# bits, so stripes of the image are reversed. See
|
||||
# https://github.com/python-pillow/Pillow/issues/279
|
||||
if fillorder == 2:
|
||||
# Replace fillorder with fillorder=1
|
||||
key = key[:3] + (1,) + key[4:]
|
||||
if DEBUG:
|
||||
print("format key:", key)
|
||||
# this should always work, since all the
|
||||
# fillorder==2 modes have a corresponding
|
||||
# fillorder=1 mode
|
||||
self.mode, rawmode = OPEN_INFO[key]
|
||||
# libtiff always returns the bytes in native order.
|
||||
# we're expecting image byte order. So, if the rawmode
|
||||
# contains I;16, we need to convert from native to image
|
||||
# byte order.
|
||||
if rawmode == 'I;16':
|
||||
rawmode = 'I;16N'
|
||||
if ';16B' in rawmode:
|
||||
rawmode = rawmode.replace(';16B', ';16N')
|
||||
if ';16L' in rawmode:
|
||||
rawmode = rawmode.replace(';16L', ';16N')
|
||||
|
||||
# Offset in the tile tuple is 0, we go from 0,0 to
|
||||
# w,h, and we only do this once -- eds
|
||||
a = (rawmode, self._compression, False)
|
||||
self.tile.append(
|
||||
(self._compression,
|
||||
(0, 0, xsize, ysize),
|
||||
0, a))
|
||||
|
||||
elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
|
||||
# striped image
|
||||
offsets = self.tag_v2[STRIPOFFSETS]
|
||||
h = self.tag_v2.get(ROWSPERSTRIP, ysize)
|
||||
w = self.size[0]
|
||||
if READ_LIBTIFF or self._compression != 'raw':
|
||||
# if DEBUG:
|
||||
# print("Activating g4 compression for whole file")
|
||||
|
||||
# Decoder expects entire file as one tile.
|
||||
# There's a buffer size limit in load (64k)
|
||||
# so large g4 images will fail if we use that
|
||||
# function.
|
||||
#
|
||||
# Setup the one tile for the whole image, then
|
||||
# use the _load_libtiff function.
|
||||
|
||||
self.use_load_libtiff = True
|
||||
|
||||
# libtiff handles the fillmode for us, so 1;IR should
|
||||
# actually be 1;I. Including the R double reverses the
|
||||
# bits, so stripes of the image are reversed. See
|
||||
# https://github.com/python-pillow/Pillow/issues/279
|
||||
if fillorder == 2:
|
||||
key = (
|
||||
self.tag_v2.prefix, photo, sampleFormat, 1,
|
||||
self.tag_v2.get(BITSPERSAMPLE, (1,)),
|
||||
self.tag_v2.get(EXTRASAMPLES, ())
|
||||
)
|
||||
if DEBUG:
|
||||
print("format key:", key)
|
||||
# this should always work, since all the
|
||||
# fillorder==2 modes have a corresponding
|
||||
# fillorder=1 mode
|
||||
self.mode, rawmode = OPEN_INFO[key]
|
||||
# libtiff always returns the bytes in native order.
|
||||
# we're expecting image byte order. So, if the rawmode
|
||||
# contains I;16, we need to convert from native to image
|
||||
# byte order.
|
||||
if rawmode == 'I;16':
|
||||
rawmode = 'I;16N'
|
||||
if ';16B' in rawmode:
|
||||
rawmode = rawmode.replace(';16B', ';16N')
|
||||
if ';16L' in rawmode:
|
||||
rawmode = rawmode.replace(';16L', ';16N')
|
||||
|
||||
# Offset in the tile tuple is 0, we go from 0,0 to
|
||||
# w,h, and we only do this once -- eds
|
||||
a = (rawmode, self._compression, False)
|
||||
self.tile.append(
|
||||
(self._compression,
|
||||
(0, 0, w, ysize),
|
||||
0, a))
|
||||
a = None
|
||||
|
||||
if STRIPOFFSETS in self.tag_v2:
|
||||
offsets = self.tag_v2[STRIPOFFSETS]
|
||||
h = self.tag_v2.get(ROWSPERSTRIP, ysize)
|
||||
w = self.size[0]
|
||||
else:
|
||||
for i, offset in enumerate(offsets):
|
||||
a = self._decoder(rawmode, l, i)
|
||||
self.tile.append(
|
||||
(self._compression,
|
||||
(0, min(y, ysize), w, min(y+h, ysize)),
|
||||
offset, a))
|
||||
if DEBUG:
|
||||
print("tiles: ", self.tile)
|
||||
y = y + h
|
||||
if y >= self.size[1]:
|
||||
x = y = 0
|
||||
l += 1
|
||||
a = None
|
||||
elif TILEOFFSETS in self.tag_v2:
|
||||
# tiled image
|
||||
w = self.tag_v2.get(322)
|
||||
h = self.tag_v2.get(323)
|
||||
a = None
|
||||
for o in self.tag_v2[TILEOFFSETS]:
|
||||
if not a:
|
||||
a = self._decoder(rawmode, l)
|
||||
# FIXME: this doesn't work if the image size
|
||||
# is not a multiple of the tile size...
|
||||
# tiled image
|
||||
offsets = self.tag_v2[TILEOFFSETS]
|
||||
w = self.tag_v2.get(322)
|
||||
h = self.tag_v2.get(323)
|
||||
|
||||
for offset in offsets:
|
||||
if x + w > xsize:
|
||||
stride = w * sum(bps_tuple) / 8 # bytes per line
|
||||
else:
|
||||
stride = 0
|
||||
|
||||
tile_rawmode = rawmode
|
||||
if self._planar_configuration == 2:
|
||||
# each band on it's own layer
|
||||
tile_rawmode = rawmode[layer]
|
||||
# adjust stride width accordingly
|
||||
stride /= bps_count
|
||||
|
||||
a = (tile_rawmode, int(stride), 1)
|
||||
self.tile.append(
|
||||
(self._compression,
|
||||
(x, y, x+w, y+h),
|
||||
o, a))
|
||||
(x, y, min(x+w, xsize), min(y+h, ysize)),
|
||||
offset, a))
|
||||
x = x + w
|
||||
if x >= self.size[0]:
|
||||
x, y = 0, y + h
|
||||
if y >= self.size[1]:
|
||||
x = y = 0
|
||||
l += 1
|
||||
a = None
|
||||
layer += 1
|
||||
else:
|
||||
if DEBUG:
|
||||
print("- unsupported data organization")
|
||||
|
@ -1396,8 +1390,9 @@ def _save(im, fp, filename):
|
|||
|
||||
ifd = ImageFileDirectory_v2(prefix=prefix)
|
||||
|
||||
compression = im.encoderinfo.get('compression',
|
||||
im.info.get('compression', 'raw'))
|
||||
compression = im.encoderinfo.get('compression', im.info.get('compression'))
|
||||
if compression is None:
|
||||
compression = 'raw'
|
||||
|
||||
libtiff = WRITE_LIBTIFF or compression != 'raw'
|
||||
|
||||
|
@ -1529,7 +1524,6 @@ def _save(im, fp, filename):
|
|||
rawmode = 'I;16N'
|
||||
|
||||
a = (rawmode, compression, _fp, filename, atts)
|
||||
# print(im.mode, compression, a, im.encoderconfig)
|
||||
e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig)
|
||||
e.setimage(im.im, (0, 0)+im.size)
|
||||
while True:
|
||||
|
|
|
@ -23,7 +23,8 @@ from collections import namedtuple
|
|||
class TagInfo(namedtuple("_TagInfo", "value name type length enum")):
|
||||
__slots__ = []
|
||||
|
||||
def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None):
|
||||
def __new__(cls, value=None, name="unknown",
|
||||
type=None, length=None, enum=None):
|
||||
return super(TagInfo, cls).__new__(
|
||||
cls, value, name, type, length, enum or {})
|
||||
|
||||
|
@ -72,8 +73,8 @@ TAGS_V2 = {
|
|||
257: ("ImageLength", LONG, 1),
|
||||
258: ("BitsPerSample", SHORT, 0),
|
||||
259: ("Compression", SHORT, 1,
|
||||
{"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4,
|
||||
"LZW": 5, "JPEG": 6, "PackBits": 32773}),
|
||||
{"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3,
|
||||
"Group 4 Fax": 4, "LZW": 5, "JPEG": 6, "PackBits": 32773}),
|
||||
|
||||
262: ("PhotometricInterpretation", SHORT, 1,
|
||||
{"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RGB Palette": 3,
|
||||
|
@ -121,7 +122,7 @@ TAGS_V2 = {
|
|||
316: ("HostComputer", ASCII, 1),
|
||||
317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}),
|
||||
318: ("WhitePoint", RATIONAL, 2),
|
||||
319: ("PrimaryChromaticities", SHORT, 6),
|
||||
319: ("PrimaryChromaticities", RATIONAL, 6),
|
||||
|
||||
320: ("ColorMap", SHORT, 0),
|
||||
321: ("HalftoneHints", SHORT, 2),
|
||||
|
@ -158,7 +159,7 @@ TAGS_V2 = {
|
|||
529: ("YCbCrCoefficients", RATIONAL, 3),
|
||||
530: ("YCbCrSubSampling", SHORT, 2),
|
||||
531: ("YCbCrPositioning", SHORT, 1),
|
||||
532: ("ReferenceBlackWhite", LONG, 0),
|
||||
532: ("ReferenceBlackWhite", RATIONAL, 6),
|
||||
|
||||
700: ('XMP', BYTE, 1),
|
||||
|
||||
|
@ -437,7 +438,7 @@ LIBTIFF_CORE.remove(301) # Array of short, crashes
|
|||
LIBTIFF_CORE.remove(532) # Array of long, crashes
|
||||
|
||||
LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes
|
||||
LIBTIFF_CORE.remove(322) # We don't have support for tiled images in libtiff
|
||||
LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff
|
||||
LIBTIFF_CORE.remove(323) # Tiled images
|
||||
LIBTIFF_CORE.remove(333) # Ink Names either
|
||||
|
||||
|
|
|
@ -1,10 +1,16 @@
|
|||
from . import Image, ImageFile, _webp
|
||||
from . import Image, ImageFile
|
||||
try:
|
||||
from . import _webp
|
||||
SUPPORTED = True
|
||||
except ImportError as e:
|
||||
SUPPORTED = False
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
_VALID_WEBP_MODES = {
|
||||
"RGBX": True,
|
||||
"RGBA": True,
|
||||
"RGB": True,
|
||||
}
|
||||
|
||||
_VALID_WEBP_LEGACY_MODES = {
|
||||
|
@ -24,7 +30,10 @@ def _accept(prefix):
|
|||
is_webp_file = prefix[8:12] == b"WEBP"
|
||||
is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER
|
||||
|
||||
return is_riff_file_format and is_webp_file and is_valid_vp8_mode
|
||||
if is_riff_file_format and is_webp_file and is_valid_vp8_mode:
|
||||
if not SUPPORTED:
|
||||
return "image file could not be identified because WEBP support not installed"
|
||||
return True
|
||||
|
||||
|
||||
class WebPImageFile(ImageFile.ImageFile):
|
||||
|
@ -41,7 +50,7 @@ class WebPImageFile(ImageFile.ImageFile):
|
|||
self.info["icc_profile"] = icc_profile
|
||||
if exif:
|
||||
self.info["exif"] = exif
|
||||
self.size = width, height
|
||||
self._size = width, height
|
||||
self.fp = BytesIO(data)
|
||||
self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
|
||||
self._n_frames = 1
|
||||
|
@ -54,7 +63,7 @@ class WebPImageFile(ImageFile.ImageFile):
|
|||
# Get info from decoder
|
||||
width, height, loop_count, bgcolor, frame_count, mode = \
|
||||
self._decoder.get_info()
|
||||
self.size = width, height
|
||||
self._size = width, height
|
||||
self.info["loop"] = loop_count
|
||||
bg_a, bg_r, bg_g, bg_b = \
|
||||
(bgcolor >> 24) & 0xFF, \
|
||||
|
@ -63,7 +72,8 @@ class WebPImageFile(ImageFile.ImageFile):
|
|||
bgcolor & 0xFF
|
||||
self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
|
||||
self._n_frames = frame_count
|
||||
self.mode = mode
|
||||
self.mode = 'RGB' if mode == 'RGBX' else mode
|
||||
self.rawmode = mode
|
||||
self.tile = []
|
||||
|
||||
# Attempt to read ICC / EXIF / XMP chunks from file
|
||||
|
@ -153,8 +163,10 @@ class WebPImageFile(ImageFile.ImageFile):
|
|||
self.__loaded = self.__logical_frame
|
||||
|
||||
# Set tile
|
||||
if self.fp:
|
||||
self.fp.close()
|
||||
self.fp = BytesIO(data)
|
||||
self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
|
||||
self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)]
|
||||
|
||||
return super(WebPImageFile, self).load()
|
||||
|
||||
|
@ -240,16 +252,23 @@ def _save_all(im, fp, filename):
|
|||
|
||||
# Make sure image mode is supported
|
||||
frame = ims
|
||||
rawmode = ims.mode
|
||||
if ims.mode not in _VALID_WEBP_MODES:
|
||||
alpha = ims.mode == 'P' and 'A' in ims.im.getpalettemode()
|
||||
frame = ims.convert('RGBA' if alpha else 'RGBX')
|
||||
alpha = 'A' in ims.mode or 'a' in ims.mode \
|
||||
or (ims.mode == 'P' and 'A' in ims.im.getpalettemode())
|
||||
rawmode = 'RGBA' if alpha else 'RGB'
|
||||
frame = ims.convert(rawmode)
|
||||
|
||||
if rawmode == 'RGB':
|
||||
# For faster conversion, use RGBX
|
||||
rawmode = 'RGBX'
|
||||
|
||||
# Append the frame to the animation encoder
|
||||
enc.add(
|
||||
frame.tobytes(),
|
||||
frame.tobytes('raw', rawmode),
|
||||
timestamp,
|
||||
frame.size[0], frame.size[1],
|
||||
frame.mode,
|
||||
rawmode,
|
||||
lossless,
|
||||
quality,
|
||||
method
|
||||
|
@ -288,7 +307,8 @@ def _save(im, fp, filename):
|
|||
xmp = im.encoderinfo.get("xmp", "")
|
||||
|
||||
if im.mode not in _VALID_WEBP_LEGACY_MODES:
|
||||
alpha = im.mode == 'P' and 'A' in im.im.getpalettemode()
|
||||
alpha = 'A' in im.mode or 'a' in im.mode \
|
||||
or (im.mode == 'P' and 'A' in im.im.getpalettemode())
|
||||
im = im.convert('RGBA' if alpha else 'RGB')
|
||||
|
||||
data = _webp.WebPEncode(
|
||||
|
@ -309,8 +329,9 @@ def _save(im, fp, filename):
|
|||
|
||||
|
||||
Image.register_open(WebPImageFile.format, WebPImageFile, _accept)
|
||||
Image.register_save(WebPImageFile.format, _save)
|
||||
if _webp.HAVE_WEBPANIM:
|
||||
Image.register_save_all(WebPImageFile.format, _save_all)
|
||||
Image.register_extension(WebPImageFile.format, ".webp")
|
||||
Image.register_mime(WebPImageFile.format, "image/webp")
|
||||
if SUPPORTED:
|
||||
Image.register_save(WebPImageFile.format, _save)
|
||||
if _webp.HAVE_WEBPANIM:
|
||||
Image.register_save_all(WebPImageFile.format, _save_all)
|
||||
Image.register_extension(WebPImageFile.format, ".webp")
|
||||
Image.register_mime(WebPImageFile.format, "image/webp")
|
||||
|
|
|
@ -22,7 +22,8 @@
|
|||
from __future__ import print_function
|
||||
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i16le as word, si16le as short, i32le as dword, si32le as _long
|
||||
from ._binary import i16le as word, si16le as short, \
|
||||
i32le as dword, si32le as _long
|
||||
from ._util import py3
|
||||
|
||||
|
||||
|
@ -108,8 +109,6 @@ class WmfStubImageFile(ImageFile.StubImageFile):
|
|||
|
||||
self.info["dpi"] = 72
|
||||
|
||||
# print(self.mode, self.size, self.info)
|
||||
|
||||
# sanity check (standard metafile header)
|
||||
if s[22:26] != b"\x01\x00\t\x00":
|
||||
raise SyntaxError("Unsupported WMF file format")
|
||||
|
@ -144,7 +143,7 @@ class WmfStubImageFile(ImageFile.StubImageFile):
|
|||
raise SyntaxError("Unsupported file format")
|
||||
|
||||
self.mode = "RGB"
|
||||
self.size = size
|
||||
self._size = size
|
||||
|
||||
loader = self._load()
|
||||
if loader:
|
||||
|
|
|
@ -65,7 +65,7 @@ class XVThumbImageFile(ImageFile.ImageFile):
|
|||
s = s.strip().split()
|
||||
|
||||
self.mode = "P"
|
||||
self.size = int(s[0]), int(s[1])
|
||||
self._size = int(s[0]), int(s[1])
|
||||
|
||||
self.palette = ImagePalette.raw("RGB", PALETTE)
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ class XbmImageFile(ImageFile.ImageFile):
|
|||
)
|
||||
|
||||
self.mode = "1"
|
||||
self.size = xsize, ysize
|
||||
self._size = xsize, ysize
|
||||
|
||||
self.tile = [("xbm", (0, 0)+self.size, m.end(), None)]
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ class XpmImageFile(ImageFile.ImageFile):
|
|||
if m:
|
||||
break
|
||||
|
||||
self.size = int(m.group(1)), int(m.group(2))
|
||||
self._size = int(m.group(1)), int(m.group(2))
|
||||
|
||||
pal = int(m.group(3))
|
||||
bpp = int(m.group(4))
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Pillow {} (Fork of the Python Imaging Library)
|
||||
"""Pillow (Fork of the Python Imaging Library)
|
||||
|
||||
Pillow is the friendly PIL fork by Alex Clark and Contributors.
|
||||
https://github.com/python-pillow/Pillow/
|
||||
|
@ -24,8 +24,6 @@ PILLOW_VERSION = __version__ = _version.__version__
|
|||
|
||||
del _version
|
||||
|
||||
#__doc__ = __doc__.format(__version__) # include version in docstring
|
||||
|
||||
|
||||
_plugins = ['BlpImagePlugin',
|
||||
'BmpImagePlugin',
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -1,4 +1,5 @@
|
|||
import os, sys
|
||||
import os
|
||||
import sys
|
||||
|
||||
py3 = sys.version_info.major >= 3
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
# Master version for Pillow
|
||||
__version__ = '5.2.0'
|
||||
__version__ = '5.3.0'
|
||||
|
|
Binary file not shown.
|
@ -14,7 +14,7 @@ __summary__ = ("cryptography is a package which provides cryptographic recipes"
|
|||
" and primitives to Python developers.")
|
||||
__uri__ = "https://github.com/pyca/cryptography"
|
||||
|
||||
__version__ = "2.3.1"
|
||||
__version__ = "2.4.2"
|
||||
|
||||
__author__ = "The cryptography developers"
|
||||
__email__ = "cryptography-dev@python.org"
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
class ObjectIdentifier(object):
|
||||
def __init__(self, dotted_string):
|
||||
self._dotted_string = dotted_string
|
||||
|
||||
nodes = self._dotted_string.split(".")
|
||||
intnodes = []
|
||||
|
||||
# There must be at least 2 nodes, the first node must be 0..2, and
|
||||
# if less than 2, the second node cannot have a value outside the
|
||||
# range 0..39. All nodes must be integers.
|
||||
for node in nodes:
|
||||
try:
|
||||
intnodes.append(int(node, 0))
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (non-integer nodes)" % (
|
||||
self._dotted_string))
|
||||
|
||||
if len(nodes) < 2:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (insufficient number of nodes)" % (
|
||||
self._dotted_string))
|
||||
|
||||
if intnodes[0] > 2:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (first node outside valid range)" % (
|
||||
self._dotted_string))
|
||||
|
||||
if intnodes[0] < 2 and intnodes[1] >= 40:
|
||||
raise ValueError(
|
||||
"Malformed OID: %s (second node outside valid range)" % (
|
||||
self._dotted_string))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, ObjectIdentifier):
|
||||
return NotImplemented
|
||||
|
||||
return self.dotted_string == other.dotted_string
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return "<ObjectIdentifier(oid={0}, name={1})>".format(
|
||||
self.dotted_string,
|
||||
self._name
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.dotted_string)
|
||||
|
||||
@property
|
||||
def _name(self):
|
||||
# Lazy import to avoid an import cycle
|
||||
from cryptography.x509.oid import _OID_NAMES
|
||||
return _OID_NAMES.get(self, "Unknown OID")
|
||||
|
||||
dotted_string = utils.read_only_property("_dotted_string")
|
|
@ -11,6 +11,8 @@ import contextlib
|
|||
import itertools
|
||||
from contextlib import contextmanager
|
||||
|
||||
import asn1crypto.core
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils, x509
|
||||
|
@ -23,7 +25,9 @@ from cryptography.hazmat.backends.interfaces import (
|
|||
from cryptography.hazmat.backends.openssl import aead
|
||||
from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
|
||||
from cryptography.hazmat.backends.openssl.cmac import _CMACContext
|
||||
from cryptography.hazmat.backends.openssl.decode_asn1 import _Integers
|
||||
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||
_CRL_ENTRY_REASON_ENUM_TO_CODE, _Integers
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.dh import (
|
||||
_DHParameters, _DHPrivateKey, _DHPublicKey, _dh_params_dup
|
||||
)
|
||||
|
@ -36,10 +40,15 @@ from cryptography.hazmat.backends.openssl.ec import (
|
|||
from cryptography.hazmat.backends.openssl.encode_asn1 import (
|
||||
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS,
|
||||
_CRL_EXTENSION_ENCODE_HANDLERS, _EXTENSION_ENCODE_HANDLERS,
|
||||
_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS,
|
||||
_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS,
|
||||
_encode_asn1_int_gc, _encode_asn1_str_gc, _encode_name_gc, _txt2obj_gc,
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.hashes import _HashContext
|
||||
from cryptography.hazmat.backends.openssl.hmac import _HMACContext
|
||||
from cryptography.hazmat.backends.openssl.ocsp import (
|
||||
_OCSPRequest, _OCSPResponse
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.rsa import (
|
||||
_RSAPrivateKey, _RSAPublicKey
|
||||
)
|
||||
|
@ -63,6 +72,7 @@ from cryptography.hazmat.primitives.ciphers.modes import (
|
|||
CBC, CFB, CFB8, CTR, ECB, GCM, OFB, XTS
|
||||
)
|
||||
from cryptography.hazmat.primitives.kdf import scrypt
|
||||
from cryptography.x509 import ocsp
|
||||
|
||||
|
||||
_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
|
||||
|
@ -171,7 +181,7 @@ class Backend(object):
|
|||
def create_hmac_ctx(self, key, algorithm):
|
||||
return _HMACContext(self, key, algorithm)
|
||||
|
||||
def _build_openssl_digest_name(self, algorithm):
|
||||
def _evp_md_from_algorithm(self, algorithm):
|
||||
if algorithm.name == "blake2b" or algorithm.name == "blake2s":
|
||||
alg = "{0}{1}".format(
|
||||
algorithm.name, algorithm.digest_size * 8
|
||||
|
@ -179,12 +189,17 @@ class Backend(object):
|
|||
else:
|
||||
alg = algorithm.name.encode("ascii")
|
||||
|
||||
return alg
|
||||
evp_md = self._lib.EVP_get_digestbyname(alg)
|
||||
return evp_md
|
||||
|
||||
def _evp_md_non_null_from_algorithm(self, algorithm):
|
||||
evp_md = self._evp_md_from_algorithm(algorithm)
|
||||
self.openssl_assert(evp_md != self._ffi.NULL)
|
||||
return evp_md
|
||||
|
||||
def hash_supported(self, algorithm):
|
||||
name = self._build_openssl_digest_name(algorithm)
|
||||
digest = self._lib.EVP_get_digestbyname(name)
|
||||
return digest != self._ffi.NULL
|
||||
evp_md = self._evp_md_from_algorithm(algorithm)
|
||||
return evp_md != self._ffi.NULL
|
||||
|
||||
def hmac_supported(self, algorithm):
|
||||
return self.hash_supported(algorithm)
|
||||
|
@ -276,9 +291,7 @@ class Backend(object):
|
|||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||
key_material):
|
||||
buf = self._ffi.new("unsigned char[]", length)
|
||||
evp_md = self._lib.EVP_get_digestbyname(
|
||||
algorithm.name.encode("ascii"))
|
||||
self.openssl_assert(evp_md != self._ffi.NULL)
|
||||
evp_md = self._evp_md_non_null_from_algorithm(algorithm)
|
||||
res = self._lib.PKCS5_PBKDF2_HMAC(
|
||||
key_material,
|
||||
len(key_material),
|
||||
|
@ -675,10 +688,7 @@ class Backend(object):
|
|||
)
|
||||
|
||||
# Resolve the signature algorithm.
|
||||
evp_md = self._lib.EVP_get_digestbyname(
|
||||
algorithm.name.encode('ascii')
|
||||
)
|
||||
self.openssl_assert(evp_md != self._ffi.NULL)
|
||||
evp_md = self._evp_md_non_null_from_algorithm(algorithm)
|
||||
|
||||
# Create an empty request.
|
||||
x509_req = self._lib.X509_REQ_new()
|
||||
|
@ -706,10 +716,15 @@ class Backend(object):
|
|||
sk_extension = self._lib.sk_X509_EXTENSION_new_null()
|
||||
self.openssl_assert(sk_extension != self._ffi.NULL)
|
||||
sk_extension = self._ffi.gc(
|
||||
sk_extension, self._lib.sk_X509_EXTENSION_free
|
||||
sk_extension,
|
||||
lambda x: self._lib.sk_X509_EXTENSION_pop_free(
|
||||
x, self._ffi.addressof(
|
||||
self._lib._original_lib, "X509_EXTENSION_free"
|
||||
)
|
||||
)
|
||||
)
|
||||
# gc is not necessary for CSRs, as sk_X509_EXTENSION_free
|
||||
# will release all the X509_EXTENSIONs.
|
||||
# Don't GC individual extensions because the memory is owned by
|
||||
# sk_extensions and will be freed along with it.
|
||||
self._create_x509_extensions(
|
||||
extensions=builder._extensions,
|
||||
handlers=_EXTENSION_ENCODE_HANDLERS,
|
||||
|
@ -752,10 +767,7 @@ class Backend(object):
|
|||
)
|
||||
|
||||
# Resolve the signature algorithm.
|
||||
evp_md = self._lib.EVP_get_digestbyname(
|
||||
algorithm.name.encode('ascii')
|
||||
)
|
||||
self.openssl_assert(evp_md != self._ffi.NULL)
|
||||
evp_md = self._evp_md_non_null_from_algorithm(algorithm)
|
||||
|
||||
# Create an empty certificate.
|
||||
x509_cert = self._lib.X509_new()
|
||||
|
@ -783,20 +795,14 @@ class Backend(object):
|
|||
self.openssl_assert(res == 1)
|
||||
|
||||
# Set the "not before" time.
|
||||
res = self._lib.ASN1_TIME_set(
|
||||
self._lib.X509_get_notBefore(x509_cert),
|
||||
calendar.timegm(builder._not_valid_before.timetuple())
|
||||
self._set_asn1_time(
|
||||
self._lib.X509_get_notBefore(x509_cert), builder._not_valid_before
|
||||
)
|
||||
if res == self._ffi.NULL:
|
||||
self._raise_time_set_error()
|
||||
|
||||
# Set the "not after" time.
|
||||
res = self._lib.ASN1_TIME_set(
|
||||
self._lib.X509_get_notAfter(x509_cert),
|
||||
calendar.timegm(builder._not_valid_after.timetuple())
|
||||
self._set_asn1_time(
|
||||
self._lib.X509_get_notAfter(x509_cert), builder._not_valid_after
|
||||
)
|
||||
if res == self._ffi.NULL:
|
||||
self._raise_time_set_error()
|
||||
|
||||
# Add extensions.
|
||||
self._create_x509_extensions(
|
||||
|
@ -829,18 +835,28 @@ class Backend(object):
|
|||
|
||||
return _Certificate(self, x509_cert)
|
||||
|
||||
def _raise_time_set_error(self):
|
||||
errors = self._consume_errors()
|
||||
self.openssl_assert(
|
||||
errors[0]._lib_reason_match(
|
||||
self._lib.ERR_LIB_ASN1,
|
||||
self._lib.ASN1_R_ERROR_GETTING_TIME
|
||||
def _set_asn1_time(self, asn1_time, time):
|
||||
timestamp = calendar.timegm(time.timetuple())
|
||||
res = self._lib.ASN1_TIME_set(asn1_time, timestamp)
|
||||
if res == self._ffi.NULL:
|
||||
errors = self._consume_errors()
|
||||
self.openssl_assert(
|
||||
errors[0]._lib_reason_match(
|
||||
self._lib.ERR_LIB_ASN1,
|
||||
self._lib.ASN1_R_ERROR_GETTING_TIME
|
||||
)
|
||||
)
|
||||
)
|
||||
raise ValueError(
|
||||
"Invalid time. This error can occur if you set a time too far in "
|
||||
"the future on Windows."
|
||||
)
|
||||
raise ValueError(
|
||||
"Invalid time. This error can occur if you set a time too far "
|
||||
"in the future on Windows."
|
||||
)
|
||||
|
||||
def _create_asn1_time(self, time):
|
||||
asn1_time = self._lib.ASN1_TIME_new()
|
||||
self.openssl_assert(asn1_time != self._ffi.NULL)
|
||||
asn1_time = self._ffi.gc(asn1_time, self._lib.ASN1_TIME_free)
|
||||
self._set_asn1_time(asn1_time, time)
|
||||
return asn1_time
|
||||
|
||||
def create_x509_crl(self, builder, private_key, algorithm):
|
||||
if not isinstance(builder, x509.CertificateRevocationListBuilder):
|
||||
|
@ -856,10 +872,7 @@ class Backend(object):
|
|||
"MD5 is not a supported hash algorithm for EC/DSA CRLs"
|
||||
)
|
||||
|
||||
evp_md = self._lib.EVP_get_digestbyname(
|
||||
algorithm.name.encode('ascii')
|
||||
)
|
||||
self.openssl_assert(evp_md != self._ffi.NULL)
|
||||
evp_md = self._evp_md_non_null_from_algorithm(algorithm)
|
||||
|
||||
# Create an empty CRL.
|
||||
x509_crl = self._lib.X509_CRL_new()
|
||||
|
@ -876,20 +889,12 @@ class Backend(object):
|
|||
self.openssl_assert(res == 1)
|
||||
|
||||
# Set the last update time.
|
||||
last_update = self._lib.ASN1_TIME_set(
|
||||
self._ffi.NULL, calendar.timegm(builder._last_update.timetuple())
|
||||
)
|
||||
self.openssl_assert(last_update != self._ffi.NULL)
|
||||
last_update = self._ffi.gc(last_update, self._lib.ASN1_TIME_free)
|
||||
last_update = self._create_asn1_time(builder._last_update)
|
||||
res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update)
|
||||
self.openssl_assert(res == 1)
|
||||
|
||||
# Set the next update time.
|
||||
next_update = self._lib.ASN1_TIME_set(
|
||||
self._ffi.NULL, calendar.timegm(builder._next_update.timetuple())
|
||||
)
|
||||
self.openssl_assert(next_update != self._ffi.NULL)
|
||||
next_update = self._ffi.gc(next_update, self._lib.ASN1_TIME_free)
|
||||
next_update = self._create_asn1_time(builder._next_update)
|
||||
res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update)
|
||||
self.openssl_assert(res == 1)
|
||||
|
||||
|
@ -951,13 +956,15 @@ class Backend(object):
|
|||
|
||||
def _create_x509_extension(self, handlers, extension):
|
||||
if isinstance(extension.value, x509.UnrecognizedExtension):
|
||||
value = _encode_asn1_str_gc(
|
||||
self, extension.value.value, len(extension.value.value)
|
||||
)
|
||||
value = _encode_asn1_str_gc(self, extension.value.value)
|
||||
return self._create_raw_x509_extension(extension, value)
|
||||
elif isinstance(extension.value, x509.TLSFeature):
|
||||
asn1 = _Integers([x.value for x in extension.value]).dump()
|
||||
value = _encode_asn1_str_gc(self, asn1, len(asn1))
|
||||
value = _encode_asn1_str_gc(self, asn1)
|
||||
return self._create_raw_x509_extension(extension, value)
|
||||
elif isinstance(extension.value, x509.PrecertPoison):
|
||||
asn1 = asn1crypto.core.Null().dump()
|
||||
value = _encode_asn1_str_gc(self, asn1)
|
||||
return self._create_raw_x509_extension(extension, value)
|
||||
else:
|
||||
try:
|
||||
|
@ -988,12 +995,7 @@ class Backend(object):
|
|||
x509_revoked, serial_number
|
||||
)
|
||||
self.openssl_assert(res == 1)
|
||||
rev_date = self._lib.ASN1_TIME_set(
|
||||
self._ffi.NULL,
|
||||
calendar.timegm(builder._revocation_date.timetuple())
|
||||
)
|
||||
self.openssl_assert(rev_date != self._ffi.NULL)
|
||||
rev_date = self._ffi.gc(rev_date, self._lib.ASN1_TIME_free)
|
||||
rev_date = self._create_asn1_time(builder._revocation_date)
|
||||
res = self._lib.X509_REVOKED_set_revocationDate(x509_revoked, rev_date)
|
||||
self.openssl_assert(res == 1)
|
||||
# add CRL entry extensions
|
||||
|
@ -1419,6 +1421,143 @@ class Backend(object):
|
|||
|
||||
return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
|
||||
|
||||
def load_der_ocsp_request(self, data):
|
||||
mem_bio = self._bytes_to_bio(data)
|
||||
request = self._lib.d2i_OCSP_REQUEST_bio(mem_bio.bio, self._ffi.NULL)
|
||||
if request == self._ffi.NULL:
|
||||
self._consume_errors()
|
||||
raise ValueError("Unable to load OCSP request")
|
||||
|
||||
request = self._ffi.gc(request, self._lib.OCSP_REQUEST_free)
|
||||
return _OCSPRequest(self, request)
|
||||
|
||||
def load_der_ocsp_response(self, data):
|
||||
mem_bio = self._bytes_to_bio(data)
|
||||
response = self._lib.d2i_OCSP_RESPONSE_bio(mem_bio.bio, self._ffi.NULL)
|
||||
if response == self._ffi.NULL:
|
||||
self._consume_errors()
|
||||
raise ValueError("Unable to load OCSP response")
|
||||
|
||||
response = self._ffi.gc(response, self._lib.OCSP_RESPONSE_free)
|
||||
return _OCSPResponse(self, response)
|
||||
|
||||
def create_ocsp_request(self, builder):
|
||||
ocsp_req = self._lib.OCSP_REQUEST_new()
|
||||
self.openssl_assert(ocsp_req != self._ffi.NULL)
|
||||
ocsp_req = self._ffi.gc(ocsp_req, self._lib.OCSP_REQUEST_free)
|
||||
cert, issuer, algorithm = builder._request
|
||||
evp_md = self._evp_md_non_null_from_algorithm(algorithm)
|
||||
certid = self._lib.OCSP_cert_to_id(
|
||||
evp_md, cert._x509, issuer._x509
|
||||
)
|
||||
self.openssl_assert(certid != self._ffi.NULL)
|
||||
onereq = self._lib.OCSP_request_add0_id(ocsp_req, certid)
|
||||
self.openssl_assert(onereq != self._ffi.NULL)
|
||||
self._create_x509_extensions(
|
||||
extensions=builder._extensions,
|
||||
handlers=_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS,
|
||||
x509_obj=ocsp_req,
|
||||
add_func=self._lib.OCSP_REQUEST_add_ext,
|
||||
gc=True,
|
||||
)
|
||||
return _OCSPRequest(self, ocsp_req)
|
||||
|
||||
def _create_ocsp_basic_response(self, builder, private_key, algorithm):
|
||||
basic = self._lib.OCSP_BASICRESP_new()
|
||||
self.openssl_assert(basic != self._ffi.NULL)
|
||||
basic = self._ffi.gc(basic, self._lib.OCSP_BASICRESP_free)
|
||||
evp_md = self._evp_md_non_null_from_algorithm(
|
||||
builder._response._algorithm
|
||||
)
|
||||
certid = self._lib.OCSP_cert_to_id(
|
||||
evp_md, builder._response._cert._x509,
|
||||
builder._response._issuer._x509
|
||||
)
|
||||
self.openssl_assert(certid != self._ffi.NULL)
|
||||
certid = self._ffi.gc(certid, self._lib.OCSP_CERTID_free)
|
||||
if builder._response._revocation_reason is None:
|
||||
reason = -1
|
||||
else:
|
||||
reason = _CRL_ENTRY_REASON_ENUM_TO_CODE[
|
||||
builder._response._revocation_reason
|
||||
]
|
||||
if builder._response._revocation_time is None:
|
||||
rev_time = self._ffi.NULL
|
||||
else:
|
||||
rev_time = self._create_asn1_time(
|
||||
builder._response._revocation_time
|
||||
)
|
||||
|
||||
next_update = self._ffi.NULL
|
||||
if builder._response._next_update is not None:
|
||||
next_update = self._create_asn1_time(
|
||||
builder._response._next_update
|
||||
)
|
||||
|
||||
this_update = self._create_asn1_time(builder._response._this_update)
|
||||
|
||||
res = self._lib.OCSP_basic_add1_status(
|
||||
basic,
|
||||
certid,
|
||||
builder._response._cert_status.value,
|
||||
reason,
|
||||
rev_time,
|
||||
this_update,
|
||||
next_update
|
||||
)
|
||||
self.openssl_assert(res != self._ffi.NULL)
|
||||
# okay, now sign the basic structure
|
||||
evp_md = self._evp_md_non_null_from_algorithm(algorithm)
|
||||
responder_cert, responder_encoding = builder._responder_id
|
||||
flags = self._lib.OCSP_NOCERTS
|
||||
if responder_encoding is ocsp.OCSPResponderEncoding.HASH:
|
||||
flags |= self._lib.OCSP_RESPID_KEY
|
||||
|
||||
if builder._certs is not None:
|
||||
for cert in builder._certs:
|
||||
res = self._lib.OCSP_basic_add1_cert(basic, cert._x509)
|
||||
self.openssl_assert(res == 1)
|
||||
|
||||
self._create_x509_extensions(
|
||||
extensions=builder._extensions,
|
||||
handlers=_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS,
|
||||
x509_obj=basic,
|
||||
add_func=self._lib.OCSP_BASICRESP_add_ext,
|
||||
gc=True,
|
||||
)
|
||||
|
||||
res = self._lib.OCSP_basic_sign(
|
||||
basic, responder_cert._x509, private_key._evp_pkey,
|
||||
evp_md, self._ffi.NULL, flags
|
||||
)
|
||||
if res != 1:
|
||||
errors = self._consume_errors()
|
||||
self.openssl_assert(
|
||||
errors[0]._lib_reason_match(
|
||||
self._lib.ERR_LIB_X509,
|
||||
self._lib.X509_R_KEY_VALUES_MISMATCH
|
||||
)
|
||||
)
|
||||
raise ValueError("responder_cert must be signed by private_key")
|
||||
|
||||
return basic
|
||||
|
||||
def create_ocsp_response(self, response_status, builder, private_key,
|
||||
algorithm):
|
||||
if response_status is ocsp.OCSPResponseStatus.SUCCESSFUL:
|
||||
basic = self._create_ocsp_basic_response(
|
||||
builder, private_key, algorithm
|
||||
)
|
||||
else:
|
||||
basic = self._ffi.NULL
|
||||
|
||||
ocsp_resp = self._lib.OCSP_response_create(
|
||||
response_status.value, basic
|
||||
)
|
||||
self.openssl_assert(ocsp_resp != self._ffi.NULL)
|
||||
ocsp_resp = self._ffi.gc(ocsp_resp, self._lib.OCSP_RESPONSE_free)
|
||||
return _OCSPResponse(self, ocsp_resp)
|
||||
|
||||
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
||||
return (
|
||||
self.elliptic_curve_supported(curve) and
|
||||
|
|
|
@ -7,18 +7,21 @@ from __future__ import absolute_import, division, print_function
|
|||
import datetime
|
||||
import ipaddress
|
||||
|
||||
from asn1crypto.core import Integer, SequenceOf
|
||||
import asn1crypto.core
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
|
||||
from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
|
||||
from cryptography.x509.oid import (
|
||||
CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID
|
||||
CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID,
|
||||
OCSPExtensionOID,
|
||||
)
|
||||
|
||||
|
||||
class _Integers(SequenceOf):
|
||||
_child_spec = Integer
|
||||
class _Integers(asn1crypto.core.SequenceOf):
|
||||
_child_spec = asn1crypto.core.Integer
|
||||
|
||||
|
||||
def _obj2txt(backend, obj):
|
||||
|
@ -202,8 +205,8 @@ class _X509ExtensionParser(object):
|
|||
"Duplicate {0} extension found".format(oid), oid
|
||||
)
|
||||
|
||||
# This OID is only supported in OpenSSL 1.1.0+ but we want
|
||||
# to support it in all versions of OpenSSL so we decode it
|
||||
# These OIDs are only supported in OpenSSL 1.1.0+ but we want
|
||||
# to support them in all versions of OpenSSL so we decode them
|
||||
# ourselves.
|
||||
if oid == ExtensionOID.TLS_FEATURE:
|
||||
data = backend._lib.X509_EXTENSION_get_data(ext)
|
||||
|
@ -214,6 +217,17 @@ class _X509ExtensionParser(object):
|
|||
extensions.append(x509.Extension(oid, critical, value))
|
||||
seen_oids.add(oid)
|
||||
continue
|
||||
elif oid == ExtensionOID.PRECERT_POISON:
|
||||
data = backend._lib.X509_EXTENSION_get_data(ext)
|
||||
parsed = asn1crypto.core.Null.load(
|
||||
_asn1_string_to_bytes(backend, data)
|
||||
)
|
||||
assert parsed == asn1crypto.core.Null()
|
||||
extensions.append(x509.Extension(
|
||||
oid, critical, x509.PrecertPoison()
|
||||
))
|
||||
seen_oids.add(oid)
|
||||
continue
|
||||
|
||||
try:
|
||||
handler = self.handlers[oid]
|
||||
|
@ -498,44 +512,7 @@ def _decode_dist_points(backend, cdps):
|
|||
reasons = None
|
||||
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
|
||||
if cdp.reasons != backend._ffi.NULL:
|
||||
# We will check each bit from RFC 5280
|
||||
# ReasonFlags ::= BIT STRING {
|
||||
# unused (0),
|
||||
# keyCompromise (1),
|
||||
# cACompromise (2),
|
||||
# affiliationChanged (3),
|
||||
# superseded (4),
|
||||
# cessationOfOperation (5),
|
||||
# certificateHold (6),
|
||||
# privilegeWithdrawn (7),
|
||||
# aACompromise (8) }
|
||||
reasons = []
|
||||
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
||||
if get_bit(cdp.reasons, 1):
|
||||
reasons.append(x509.ReasonFlags.key_compromise)
|
||||
|
||||
if get_bit(cdp.reasons, 2):
|
||||
reasons.append(x509.ReasonFlags.ca_compromise)
|
||||
|
||||
if get_bit(cdp.reasons, 3):
|
||||
reasons.append(x509.ReasonFlags.affiliation_changed)
|
||||
|
||||
if get_bit(cdp.reasons, 4):
|
||||
reasons.append(x509.ReasonFlags.superseded)
|
||||
|
||||
if get_bit(cdp.reasons, 5):
|
||||
reasons.append(x509.ReasonFlags.cessation_of_operation)
|
||||
|
||||
if get_bit(cdp.reasons, 6):
|
||||
reasons.append(x509.ReasonFlags.certificate_hold)
|
||||
|
||||
if get_bit(cdp.reasons, 7):
|
||||
reasons.append(x509.ReasonFlags.privilege_withdrawn)
|
||||
|
||||
if get_bit(cdp.reasons, 8):
|
||||
reasons.append(x509.ReasonFlags.aa_compromise)
|
||||
|
||||
reasons = frozenset(reasons)
|
||||
reasons = _decode_reasons(backend, cdp.reasons)
|
||||
|
||||
if cdp.CRLissuer != backend._ffi.NULL:
|
||||
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
|
||||
|
@ -543,32 +520,9 @@ def _decode_dist_points(backend, cdps):
|
|||
# Certificates may have a crl_issuer/reasons and no distribution
|
||||
# point so make sure it's not null.
|
||||
if cdp.distpoint != backend._ffi.NULL:
|
||||
# Type 0 is fullName, there is no #define for it in the code.
|
||||
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
|
||||
full_name = _decode_general_names(
|
||||
backend, cdp.distpoint.name.fullname
|
||||
)
|
||||
# OpenSSL code doesn't test for a specific type for
|
||||
# relativename, everything that isn't fullname is considered
|
||||
# relativename. Per RFC 5280:
|
||||
#
|
||||
# DistributionPointName ::= CHOICE {
|
||||
# fullName [0] GeneralNames,
|
||||
# nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
|
||||
else:
|
||||
rns = cdp.distpoint.name.relativename
|
||||
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
|
||||
attributes = set()
|
||||
for i in range(rnum):
|
||||
rn = backend._lib.sk_X509_NAME_ENTRY_value(
|
||||
rns, i
|
||||
)
|
||||
backend.openssl_assert(rn != backend._ffi.NULL)
|
||||
attributes.add(
|
||||
_decode_x509_name_entry(backend, rn)
|
||||
)
|
||||
|
||||
relative_name = x509.RelativeDistinguishedName(attributes)
|
||||
full_name, relative_name = _decode_distpoint(
|
||||
backend, cdp.distpoint
|
||||
)
|
||||
|
||||
dist_points.append(
|
||||
x509.DistributionPoint(
|
||||
|
@ -579,6 +533,67 @@ def _decode_dist_points(backend, cdps):
|
|||
return dist_points
|
||||
|
||||
|
||||
# ReasonFlags ::= BIT STRING {
|
||||
# unused (0),
|
||||
# keyCompromise (1),
|
||||
# cACompromise (2),
|
||||
# affiliationChanged (3),
|
||||
# superseded (4),
|
||||
# cessationOfOperation (5),
|
||||
# certificateHold (6),
|
||||
# privilegeWithdrawn (7),
|
||||
# aACompromise (8) }
|
||||
_REASON_BIT_MAPPING = {
|
||||
1: x509.ReasonFlags.key_compromise,
|
||||
2: x509.ReasonFlags.ca_compromise,
|
||||
3: x509.ReasonFlags.affiliation_changed,
|
||||
4: x509.ReasonFlags.superseded,
|
||||
5: x509.ReasonFlags.cessation_of_operation,
|
||||
6: x509.ReasonFlags.certificate_hold,
|
||||
7: x509.ReasonFlags.privilege_withdrawn,
|
||||
8: x509.ReasonFlags.aa_compromise,
|
||||
}
|
||||
|
||||
|
||||
def _decode_reasons(backend, reasons):
|
||||
# We will check each bit from RFC 5280
|
||||
enum_reasons = []
|
||||
for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING):
|
||||
if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position):
|
||||
enum_reasons.append(reason)
|
||||
|
||||
return frozenset(enum_reasons)
|
||||
|
||||
|
||||
def _decode_distpoint(backend, distpoint):
|
||||
if distpoint.type == _DISTPOINT_TYPE_FULLNAME:
|
||||
full_name = _decode_general_names(backend, distpoint.name.fullname)
|
||||
return full_name, None
|
||||
|
||||
# OpenSSL code doesn't test for a specific type for
|
||||
# relativename, everything that isn't fullname is considered
|
||||
# relativename. Per RFC 5280:
|
||||
#
|
||||
# DistributionPointName ::= CHOICE {
|
||||
# fullName [0] GeneralNames,
|
||||
# nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
|
||||
rns = distpoint.name.relativename
|
||||
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
|
||||
attributes = set()
|
||||
for i in range(rnum):
|
||||
rn = backend._lib.sk_X509_NAME_ENTRY_value(
|
||||
rns, i
|
||||
)
|
||||
backend.openssl_assert(rn != backend._ffi.NULL)
|
||||
attributes.add(
|
||||
_decode_x509_name_entry(backend, rn)
|
||||
)
|
||||
|
||||
relative_name = x509.RelativeDistinguishedName(attributes)
|
||||
|
||||
return None, relative_name
|
||||
|
||||
|
||||
def _decode_crl_distribution_points(backend, cdps):
|
||||
dist_points = _decode_dist_points(backend, cdps)
|
||||
return x509.CRLDistributionPoints(dist_points)
|
||||
|
@ -754,6 +769,12 @@ def _parse_asn1_generalized_time(backend, generalized_time):
|
|||
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
|
||||
|
||||
|
||||
def _decode_nonce(backend, nonce):
|
||||
nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce)
|
||||
nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free)
|
||||
return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce))
|
||||
|
||||
|
||||
_EXTENSION_HANDLERS_NO_SCT = {
|
||||
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
|
||||
|
@ -795,6 +816,14 @@ _CRL_EXTENSION_HANDLERS = {
|
|||
),
|
||||
}
|
||||
|
||||
_OCSP_REQ_EXTENSION_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _decode_nonce,
|
||||
}
|
||||
|
||||
_OCSP_BASICRESP_EXTENSION_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _decode_nonce,
|
||||
}
|
||||
|
||||
_CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser(
|
||||
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
|
||||
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
|
||||
|
@ -824,3 +853,15 @@ _CRL_EXTENSION_PARSER = _X509ExtensionParser(
|
|||
get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i),
|
||||
handlers=_CRL_EXTENSION_HANDLERS,
|
||||
)
|
||||
|
||||
_OCSP_REQ_EXT_PARSER = _X509ExtensionParser(
|
||||
ext_count=lambda backend, x: backend._lib.OCSP_REQUEST_get_ext_count(x),
|
||||
get_ext=lambda backend, x, i: backend._lib.OCSP_REQUEST_get_ext(x, i),
|
||||
handlers=_OCSP_REQ_EXTENSION_HANDLERS,
|
||||
)
|
||||
|
||||
_OCSP_BASICRESP_EXT_PARSER = _X509ExtensionParser(
|
||||
ext_count=lambda backend, x: backend._lib.OCSP_BASICRESP_get_ext_count(x),
|
||||
get_ext=lambda backend, x, i: backend._lib.OCSP_BASICRESP_get_ext(x, i),
|
||||
handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS,
|
||||
)
|
||||
|
|
|
@ -15,7 +15,9 @@ from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
|||
_DISTPOINT_TYPE_RELATIVENAME
|
||||
)
|
||||
from cryptography.x509.name import _ASN1Type
|
||||
from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID
|
||||
from cryptography.x509.oid import (
|
||||
CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID,
|
||||
)
|
||||
|
||||
|
||||
def _encode_asn1_int(backend, x):
|
||||
|
@ -44,12 +46,12 @@ def _encode_asn1_int_gc(backend, x):
|
|||
return i
|
||||
|
||||
|
||||
def _encode_asn1_str(backend, data, length):
|
||||
def _encode_asn1_str(backend, data):
|
||||
"""
|
||||
Create an ASN1_OCTET_STRING from a Python byte string.
|
||||
"""
|
||||
s = backend._lib.ASN1_OCTET_STRING_new()
|
||||
res = backend._lib.ASN1_OCTET_STRING_set(s, data, length)
|
||||
res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data))
|
||||
backend.openssl_assert(res == 1)
|
||||
return s
|
||||
|
||||
|
@ -68,8 +70,8 @@ def _encode_asn1_utf8_str(backend, string):
|
|||
return s
|
||||
|
||||
|
||||
def _encode_asn1_str_gc(backend, data, length):
|
||||
s = _encode_asn1_str(backend, data, length)
|
||||
def _encode_asn1_str_gc(backend, data):
|
||||
s = _encode_asn1_str(backend, data)
|
||||
s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
|
||||
return s
|
||||
|
||||
|
@ -184,7 +186,6 @@ def _encode_certificate_policies(backend, certificate_policies):
|
|||
pqi.d.cpsuri = _encode_asn1_str(
|
||||
backend,
|
||||
qualifier.encode("ascii"),
|
||||
len(qualifier.encode("ascii"))
|
||||
)
|
||||
else:
|
||||
assert isinstance(qualifier, x509.UserNotice)
|
||||
|
@ -245,11 +246,8 @@ def _txt2obj_gc(backend, name):
|
|||
|
||||
|
||||
def _encode_ocsp_nocheck(backend, ext):
|
||||
"""
|
||||
The OCSP No Check extension is defined as a null ASN.1 value embedded in
|
||||
an ASN.1 string.
|
||||
"""
|
||||
return _encode_asn1_str_gc(backend, b"\x05\x00", 2)
|
||||
# Doesn't need to be GC'd
|
||||
return backend._lib.ASN1_NULL_new()
|
||||
|
||||
|
||||
def _encode_key_usage(backend, key_usage):
|
||||
|
@ -292,7 +290,6 @@ def _encode_authority_key_identifier(backend, authority_keyid):
|
|||
akid.keyid = _encode_asn1_str(
|
||||
backend,
|
||||
authority_keyid.key_identifier,
|
||||
len(authority_keyid.key_identifier)
|
||||
)
|
||||
|
||||
if authority_keyid.authority_cert_issuer is not None:
|
||||
|
@ -362,7 +359,7 @@ def _encode_alt_name(backend, san):
|
|||
|
||||
|
||||
def _encode_subject_key_identifier(backend, ski):
|
||||
return _encode_asn1_str_gc(backend, ski.digest, len(ski.digest))
|
||||
return _encode_asn1_str_gc(backend, ski.digest)
|
||||
|
||||
|
||||
def _encode_general_name(backend, name):
|
||||
|
@ -410,7 +407,7 @@ def _encode_general_name(backend, name):
|
|||
)
|
||||
else:
|
||||
packed = name.value.packed
|
||||
ipaddr = _encode_asn1_str(backend, packed, len(packed))
|
||||
ipaddr = _encode_asn1_str(backend, packed)
|
||||
gn.type = backend._lib.GEN_IPADD
|
||||
gn.d.iPAddress = ipaddr
|
||||
elif isinstance(name, x509.OtherName):
|
||||
|
@ -442,7 +439,7 @@ def _encode_general_name(backend, name):
|
|||
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
|
||||
# of broken certs that encode utf8 we'll encode utf8 here too.
|
||||
data = name.value.encode("utf8")
|
||||
asn1_str = _encode_asn1_str(backend, data, len(data))
|
||||
asn1_str = _encode_asn1_str(backend, data)
|
||||
gn.type = backend._lib.GEN_EMAIL
|
||||
gn.d.rfc822Name = asn1_str
|
||||
elif isinstance(name, x509.UniformResourceIdentifier):
|
||||
|
@ -451,7 +448,7 @@ def _encode_general_name(backend, name):
|
|||
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
|
||||
# of broken certs that encode utf8 we'll encode utf8 here too.
|
||||
data = name.value.encode("utf8")
|
||||
asn1_str = _encode_asn1_str(backend, data, len(data))
|
||||
asn1_str = _encode_asn1_str(backend, data)
|
||||
gn.type = backend._lib.GEN_URI
|
||||
gn.d.uniformResourceIdentifier = asn1_str
|
||||
else:
|
||||
|
@ -574,6 +571,10 @@ def _encode_general_subtree(backend, subtrees):
|
|||
return general_subtrees
|
||||
|
||||
|
||||
def _encode_nonce(backend, nonce):
|
||||
return _encode_asn1_str_gc(backend, nonce.nonce)
|
||||
|
||||
|
||||
_EXTENSION_ENCODE_HANDLERS = {
|
||||
ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
|
||||
|
@ -609,3 +610,11 @@ _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
|
|||
CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
|
||||
CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
|
||||
}
|
||||
|
||||
_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _encode_nonce,
|
||||
}
|
||||
|
||||
_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = {
|
||||
OCSPExtensionOID.NONCE: _encode_nonce,
|
||||
}
|
||||
|
|
|
@ -22,12 +22,11 @@ class _HashContext(object):
|
|||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||
)
|
||||
name = self._backend._build_openssl_digest_name(algorithm)
|
||||
evp_md = self._backend._lib.EVP_get_digestbyname(name)
|
||||
evp_md = self._backend._evp_md_from_algorithm(algorithm)
|
||||
if evp_md == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
name),
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
|
||||
|
|
|
@ -25,11 +25,11 @@ class _HMACContext(object):
|
|||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.Cryptography_HMAC_CTX_free
|
||||
)
|
||||
name = self._backend._build_openssl_digest_name(algorithm)
|
||||
evp_md = self._backend._lib.EVP_get_digestbyname(name)
|
||||
evp_md = self._backend._evp_md_from_algorithm(algorithm)
|
||||
if evp_md == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend".format(name),
|
||||
"{0} is not a supported hash on this backend".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
res = self._backend._lib.HMAC_Init_ex(
|
||||
|
|
|
@ -0,0 +1,370 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import functools
|
||||
|
||||
from cryptography import utils, x509
|
||||
from cryptography.exceptions import UnsupportedAlgorithm
|
||||
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||
_CRL_ENTRY_REASON_CODE_TO_ENUM, _OCSP_BASICRESP_EXT_PARSER,
|
||||
_OCSP_REQ_EXT_PARSER, _asn1_integer_to_int,
|
||||
_asn1_string_to_bytes, _decode_x509_name, _obj2txt,
|
||||
_parse_asn1_generalized_time,
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.x509 import _Certificate
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.x509.ocsp import (
|
||||
OCSPCertStatus, OCSPRequest, OCSPResponse, OCSPResponseStatus,
|
||||
_CERT_STATUS_TO_ENUM, _OIDS_TO_HASH, _RESPONSE_STATUS_TO_ENUM,
|
||||
)
|
||||
|
||||
|
||||
def _requires_successful_response(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args):
|
||||
if self.response_status != OCSPResponseStatus.SUCCESSFUL:
|
||||
raise ValueError(
|
||||
"OCSP response status is not successful so the property "
|
||||
"has no value"
|
||||
)
|
||||
else:
|
||||
return func(self, *args)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _issuer_key_hash(backend, cert_id):
|
||||
key_hash = backend._ffi.new("ASN1_OCTET_STRING **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
backend._ffi.NULL, backend._ffi.NULL,
|
||||
key_hash, backend._ffi.NULL, cert_id
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(key_hash[0] != backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(backend, key_hash[0])
|
||||
|
||||
|
||||
def _issuer_name_hash(backend, cert_id):
|
||||
name_hash = backend._ffi.new("ASN1_OCTET_STRING **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
name_hash, backend._ffi.NULL,
|
||||
backend._ffi.NULL, backend._ffi.NULL, cert_id
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(name_hash[0] != backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(backend, name_hash[0])
|
||||
|
||||
|
||||
def _serial_number(backend, cert_id):
|
||||
num = backend._ffi.new("ASN1_INTEGER **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
backend._ffi.NULL, backend._ffi.NULL,
|
||||
backend._ffi.NULL, num, cert_id
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(num[0] != backend._ffi.NULL)
|
||||
return _asn1_integer_to_int(backend, num[0])
|
||||
|
||||
|
||||
def _hash_algorithm(backend, cert_id):
|
||||
asn1obj = backend._ffi.new("ASN1_OBJECT **")
|
||||
res = backend._lib.OCSP_id_get0_info(
|
||||
backend._ffi.NULL, asn1obj,
|
||||
backend._ffi.NULL, backend._ffi.NULL, cert_id
|
||||
)
|
||||
backend.openssl_assert(res == 1)
|
||||
backend.openssl_assert(asn1obj[0] != backend._ffi.NULL)
|
||||
oid = _obj2txt(backend, asn1obj[0])
|
||||
try:
|
||||
return _OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID: {0} not recognized".format(oid)
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(OCSPResponse)
|
||||
class _OCSPResponse(object):
|
||||
def __init__(self, backend, ocsp_response):
|
||||
self._backend = backend
|
||||
self._ocsp_response = ocsp_response
|
||||
status = self._backend._lib.OCSP_response_status(self._ocsp_response)
|
||||
self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM)
|
||||
self._status = _RESPONSE_STATUS_TO_ENUM[status]
|
||||
if self._status is OCSPResponseStatus.SUCCESSFUL:
|
||||
basic = self._backend._lib.OCSP_response_get1_basic(
|
||||
self._ocsp_response
|
||||
)
|
||||
self._backend.openssl_assert(basic != self._backend._ffi.NULL)
|
||||
self._basic = self._backend._ffi.gc(
|
||||
basic, self._backend._lib.OCSP_BASICRESP_free
|
||||
)
|
||||
self._backend.openssl_assert(
|
||||
self._backend._lib.OCSP_resp_count(self._basic) == 1
|
||||
)
|
||||
self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0)
|
||||
self._backend.openssl_assert(
|
||||
self._single != self._backend._ffi.NULL
|
||||
)
|
||||
self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id(
|
||||
self._single
|
||||
)
|
||||
self._backend.openssl_assert(
|
||||
self._cert_id != self._backend._ffi.NULL
|
||||
)
|
||||
|
||||
response_status = utils.read_only_property("_status")
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def signature_algorithm_oid(self):
|
||||
alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic)
|
||||
self._backend.openssl_assert(alg != self._backend._ffi.NULL)
|
||||
oid = _obj2txt(self._backend, alg.algorithm)
|
||||
return x509.ObjectIdentifier(oid)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def signature(self):
|
||||
sig = self._backend._lib.OCSP_resp_get0_signature(self._basic)
|
||||
self._backend.openssl_assert(sig != self._backend._ffi.NULL)
|
||||
return _asn1_string_to_bytes(self._backend, sig)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def tbs_response_bytes(self):
|
||||
respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic)
|
||||
self._backend.openssl_assert(respdata != self._backend._ffi.NULL)
|
||||
pp = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp)
|
||||
self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL)
|
||||
pp = self._backend._ffi.gc(
|
||||
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def certificates(self):
|
||||
sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic)
|
||||
num = self._backend._lib.sk_X509_num(sk_x509)
|
||||
certs = []
|
||||
for i in range(num):
|
||||
x509 = self._backend._lib.sk_X509_value(sk_x509, i)
|
||||
self._backend.openssl_assert(x509 != self._backend._ffi.NULL)
|
||||
cert = _Certificate(self._backend, x509)
|
||||
# We need to keep the OCSP response that the certificate came from
|
||||
# alive until the Certificate object itself goes out of scope, so
|
||||
# we give it a private reference.
|
||||
cert._ocsp_resp = self
|
||||
certs.append(cert)
|
||||
|
||||
return certs
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def responder_key_hash(self):
|
||||
_, asn1_string = self._responder_key_name()
|
||||
if asn1_string == self._backend._ffi.NULL:
|
||||
return None
|
||||
else:
|
||||
return _asn1_string_to_bytes(self._backend, asn1_string)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def responder_name(self):
|
||||
x509_name, _ = self._responder_key_name()
|
||||
if x509_name == self._backend._ffi.NULL:
|
||||
return None
|
||||
else:
|
||||
return _decode_x509_name(self._backend, x509_name)
|
||||
|
||||
def _responder_key_name(self):
|
||||
asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **")
|
||||
x509_name = self._backend._ffi.new("X509_NAME **")
|
||||
res = self._backend._lib.OCSP_resp_get0_id(
|
||||
self._basic, asn1_string, x509_name
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return x509_name[0], asn1_string[0]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def produced_at(self):
|
||||
produced_at = self._backend._lib.OCSP_resp_get0_produced_at(
|
||||
self._basic
|
||||
)
|
||||
return _parse_asn1_generalized_time(self._backend, produced_at)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def certificate_status(self):
|
||||
status = self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM)
|
||||
return _CERT_STATUS_TO_ENUM[status]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def revocation_time(self):
|
||||
if self.certificate_status is not OCSPCertStatus.REVOKED:
|
||||
return None
|
||||
|
||||
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
asn1_time,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
|
||||
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def revocation_reason(self):
|
||||
if self.certificate_status is not OCSPCertStatus.REVOKED:
|
||||
return None
|
||||
|
||||
reason_ptr = self._backend._ffi.new("int *")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
reason_ptr,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
# If no reason is encoded OpenSSL returns -1
|
||||
if reason_ptr[0] == -1:
|
||||
return None
|
||||
else:
|
||||
self._backend.openssl_assert(
|
||||
reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM
|
||||
)
|
||||
return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]]
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def this_update(self):
|
||||
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
asn1_time,
|
||||
self._backend._ffi.NULL,
|
||||
)
|
||||
self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
|
||||
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def next_update(self):
|
||||
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
|
||||
self._backend._lib.OCSP_single_get0_status(
|
||||
self._single,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
asn1_time,
|
||||
)
|
||||
if asn1_time[0] != self._backend._ffi.NULL:
|
||||
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def issuer_key_hash(self):
|
||||
return _issuer_key_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def issuer_name_hash(self):
|
||||
return _issuer_name_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def hash_algorithm(self):
|
||||
return _hash_algorithm(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
@_requires_successful_response
|
||||
def serial_number(self):
|
||||
return _serial_number(self._backend, self._cert_id)
|
||||
|
||||
@utils.cached_property
|
||||
@_requires_successful_response
|
||||
def extensions(self):
|
||||
return _OCSP_BASICRESP_EXT_PARSER.parse(self._backend, self._basic)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
if encoding is not serialization.Encoding.DER:
|
||||
raise ValueError(
|
||||
"The only allowed encoding value is Encoding.DER"
|
||||
)
|
||||
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_OCSP_RESPONSE_bio(
|
||||
bio, self._ocsp_response
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
|
||||
@utils.register_interface(OCSPRequest)
|
||||
class _OCSPRequest(object):
|
||||
def __init__(self, backend, ocsp_request):
|
||||
if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1:
|
||||
raise NotImplementedError(
|
||||
'OCSP request contains more than one request'
|
||||
)
|
||||
self._backend = backend
|
||||
self._ocsp_request = ocsp_request
|
||||
self._request = self._backend._lib.OCSP_request_onereq_get0(
|
||||
self._ocsp_request, 0
|
||||
)
|
||||
self._backend.openssl_assert(self._request != self._backend._ffi.NULL)
|
||||
self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request)
|
||||
self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL)
|
||||
|
||||
@property
|
||||
def issuer_key_hash(self):
|
||||
return _issuer_key_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
def issuer_name_hash(self):
|
||||
return _issuer_name_hash(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
return _serial_number(self._backend, self._cert_id)
|
||||
|
||||
@property
|
||||
def hash_algorithm(self):
|
||||
return _hash_algorithm(self._backend, self._cert_id)
|
||||
|
||||
@utils.cached_property
|
||||
def extensions(self):
|
||||
return _OCSP_REQ_EXT_PARSER.parse(self._backend, self._ocsp_request)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
if encoding is not serialization.Encoding.DER:
|
||||
raise ValueError(
|
||||
"The only allowed encoding value is Encoding.DER"
|
||||
)
|
||||
|
||||
bio = self._backend._create_mem_bio_gc()
|
||||
res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
return self._backend._read_mem_bio(bio)
|
|
@ -92,14 +92,11 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
|
|||
isinstance(padding, OAEP) and
|
||||
backend._lib.Cryptography_HAS_RSA_OAEP_MD
|
||||
):
|
||||
mgf1_md = backend._lib.EVP_get_digestbyname(
|
||||
padding._mgf._algorithm.name.encode("ascii"))
|
||||
backend.openssl_assert(mgf1_md != backend._ffi.NULL)
|
||||
mgf1_md = backend._evp_md_non_null_from_algorithm(
|
||||
padding._mgf._algorithm)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
oaep_md = backend._lib.EVP_get_digestbyname(
|
||||
padding._algorithm.name.encode("ascii"))
|
||||
backend.openssl_assert(oaep_md != backend._ffi.NULL)
|
||||
oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
|
||||
|
@ -189,15 +186,21 @@ def _rsa_sig_determine_padding(backend, key, padding, algorithm):
|
|||
|
||||
def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
|
||||
padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
|
||||
evp_md = backend._lib.EVP_get_digestbyname(algorithm.name.encode("ascii"))
|
||||
backend.openssl_assert(evp_md != backend._ffi.NULL)
|
||||
evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
|
||||
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
|
||||
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
|
||||
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
|
||||
res = init_func(pkey_ctx)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
if res == 0:
|
||||
backend._consume_errors()
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not supported by this backend for RSA signing.".format(
|
||||
algorithm.name
|
||||
),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
|
||||
backend.openssl_assert(res > 0)
|
||||
if isinstance(padding, PSS):
|
||||
|
@ -206,10 +209,8 @@ def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
|
|||
)
|
||||
backend.openssl_assert(res > 0)
|
||||
|
||||
mgf1_md = backend._lib.EVP_get_digestbyname(
|
||||
padding._mgf._algorithm.name.encode("ascii")
|
||||
)
|
||||
backend.openssl_assert(mgf1_md != backend._ffi.NULL)
|
||||
mgf1_md = backend._evp_md_non_null_from_algorithm(
|
||||
padding._mgf._algorithm)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
|
||||
backend.openssl_assert(res > 0)
|
||||
|
||||
|
|
|
@ -238,11 +238,21 @@ class _CertificateRevocationList(object):
|
|||
h.update(der)
|
||||
return h.finalize()
|
||||
|
||||
@utils.cached_property
|
||||
def _sorted_crl(self):
|
||||
# X509_CRL_get0_by_serial sorts in place, which breaks a variety of
|
||||
# things we don't want to break (like iteration and the signature).
|
||||
# Let's dupe it and sort that instead.
|
||||
dup = self._backend._lib.X509_CRL_dup(self._x509_crl)
|
||||
self._backend.openssl_assert(dup != self._backend._ffi.NULL)
|
||||
dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free)
|
||||
return dup
|
||||
|
||||
def get_revoked_certificate_by_serial_number(self, serial_number):
|
||||
revoked = self._backend._ffi.new("X509_REVOKED **")
|
||||
asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
|
||||
res = self._backend._lib.X509_CRL_get0_by_serial(
|
||||
self._x509_crl, revoked, asn1_int
|
||||
self._sorted_crl, revoked, asn1_int
|
||||
)
|
||||
if res == 0:
|
||||
return None
|
||||
|
@ -251,7 +261,7 @@ class _CertificateRevocationList(object):
|
|||
revoked[0] != self._backend._ffi.NULL
|
||||
)
|
||||
return _RevokedCertificate(
|
||||
self._backend, self._x509_crl, revoked[0]
|
||||
self._backend, self._sorted_crl, revoked[0]
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -429,6 +439,14 @@ class _CertificateSigningRequest(object):
|
|||
@utils.cached_property
|
||||
def extensions(self):
|
||||
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
|
||||
x509_exts = self._backend._ffi.gc(
|
||||
x509_exts,
|
||||
lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free(
|
||||
x, self._backend._ffi.addressof(
|
||||
self._backend._lib._original_lib, "X509_EXTENSION_free"
|
||||
)
|
||||
)
|
||||
)
|
||||
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
|
@ -516,3 +534,23 @@ class _SignedCertificateTimestamp(object):
|
|||
# we only have precerts.
|
||||
assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
|
||||
return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
|
||||
|
||||
@property
|
||||
def _signature(self):
|
||||
ptrptr = self._backend._ffi.new("unsigned char **")
|
||||
res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL)
|
||||
return self._backend._ffi.buffer(ptrptr[0], res)[:]
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._signature)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, _SignedCertificateTimestamp):
|
||||
return NotImplemented
|
||||
|
||||
return self._signature == other._signature
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -98,6 +98,18 @@ def cryptography_has_102_verification_params():
|
|||
"X509_VERIFY_PARAM_set1_ip",
|
||||
"X509_VERIFY_PARAM_set1_ip_asc",
|
||||
"X509_VERIFY_PARAM_set_hostflags",
|
||||
"SSL_get0_param",
|
||||
"X509_CHECK_FLAG_ALWAYS_CHECK_SUBJECT",
|
||||
"X509_CHECK_FLAG_NO_WILDCARDS",
|
||||
"X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS",
|
||||
"X509_CHECK_FLAG_MULTI_LABEL_WILDCARDS",
|
||||
"X509_CHECK_FLAG_SINGLE_LABEL_SUBDOMAINS"
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_110_verification_params():
|
||||
return [
|
||||
"X509_CHECK_FLAG_NEVER_CHECK_SUBJECT"
|
||||
]
|
||||
|
||||
|
||||
|
@ -143,6 +155,7 @@ def cryptography_has_locking_callbacks():
|
|||
"CRYPTO_READ",
|
||||
"CRYPTO_LOCK_SSL",
|
||||
"CRYPTO_lock",
|
||||
"Cryptography_setup_ssl_threads",
|
||||
]
|
||||
|
||||
|
||||
|
@ -181,11 +194,19 @@ def cryptography_has_sct():
|
|||
"SCT_get_version",
|
||||
"SCT_get_log_entry_type",
|
||||
"SCT_get0_log_id",
|
||||
"SCT_get0_signature",
|
||||
"SCT_get_timestamp",
|
||||
"SCT_set_source",
|
||||
"sk_SCT_num",
|
||||
"sk_SCT_value",
|
||||
"SCT_LIST_free",
|
||||
"sk_SCT_push",
|
||||
"sk_SCT_new_null",
|
||||
"SCT_new",
|
||||
"SCT_set1_log_id",
|
||||
"SCT_set_timestamp",
|
||||
"SCT_set_version",
|
||||
"SCT_set_log_entry_type",
|
||||
]
|
||||
|
||||
|
||||
|
@ -203,6 +224,19 @@ def cryptography_has_x25519():
|
|||
]
|
||||
|
||||
|
||||
def cryptography_has_ed25519():
|
||||
return [
|
||||
"NID_ED25519",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_oneshot_evp_digest_sign_verify():
|
||||
return [
|
||||
"EVP_DigestSign",
|
||||
"EVP_DigestVerify",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
|
||||
return [
|
||||
"EVP_PKEY_get1_tls_encodedpoint",
|
||||
|
@ -246,6 +280,27 @@ def cryptography_has_openssl_cleanup():
|
|||
]
|
||||
|
||||
|
||||
def cryptography_has_cipher_details():
|
||||
return [
|
||||
"SSL_CIPHER_is_aead",
|
||||
"SSL_CIPHER_get_cipher_nid",
|
||||
"SSL_CIPHER_get_digest_nid",
|
||||
"SSL_CIPHER_get_kx_nid",
|
||||
"SSL_CIPHER_get_auth_nid",
|
||||
]
|
||||
|
||||
|
||||
def cryptography_has_tlsv13():
|
||||
return [
|
||||
"SSL_OP_NO_TLSv1_3",
|
||||
"SSL_VERIFY_POST_HANDSHAKE",
|
||||
"SSL_CTX_set_ciphersuites",
|
||||
"SSL_verify_client_post_handshake",
|
||||
"SSL_CTX_set_post_handshake_auth",
|
||||
"SSL_set_post_handshake_auth",
|
||||
]
|
||||
|
||||
|
||||
# This is a mapping of
|
||||
# {condition: function-returning-names-dependent-on-that-condition} so we can
|
||||
# loop over them and delete unsupported names at runtime. It will be removed
|
||||
|
@ -270,6 +325,9 @@ CONDITIONAL_NAMES = {
|
|||
"Cryptography_HAS_102_VERIFICATION_PARAMS": (
|
||||
cryptography_has_102_verification_params
|
||||
),
|
||||
"Cryptography_HAS_110_VERIFICATION_PARAMS": (
|
||||
cryptography_has_110_verification_params
|
||||
),
|
||||
"Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": (
|
||||
cryptography_has_x509_v_flag_trusted_first
|
||||
),
|
||||
|
@ -291,6 +349,10 @@ CONDITIONAL_NAMES = {
|
|||
cryptography_has_x509_store_ctx_get_issuer
|
||||
),
|
||||
"Cryptography_HAS_X25519": cryptography_has_x25519,
|
||||
"Cryptography_HAS_ED25519": cryptography_has_ed25519,
|
||||
"Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": (
|
||||
cryptography_has_oneshot_evp_digest_sign_verify
|
||||
),
|
||||
"Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": (
|
||||
cryptography_has_evp_pkey_get_set_tls_encodedpoint
|
||||
),
|
||||
|
@ -299,4 +361,6 @@ CONDITIONAL_NAMES = {
|
|||
"Cryptography_HAS_PSK": cryptography_has_psk,
|
||||
"Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
|
||||
"Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
|
||||
"Cryptography_HAS_CIPHER_DETAILS": cryptography_has_cipher_details,
|
||||
"Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13,
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ from __future__ import absolute_import, division, print_function
|
|||
import collections
|
||||
import threading
|
||||
import types
|
||||
import warnings
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InternalError
|
||||
|
@ -140,7 +141,8 @@ class Binding(object):
|
|||
# the setup for this.
|
||||
__import__("_ssl")
|
||||
|
||||
if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
|
||||
if (not cls.lib.Cryptography_HAS_LOCKING_CALLBACKS or
|
||||
cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL):
|
||||
return
|
||||
|
||||
# If nothing else has setup a locking callback already, we set up
|
||||
|
@ -149,9 +151,24 @@ class Binding(object):
|
|||
_openssl_assert(cls.lib, res == 1)
|
||||
|
||||
|
||||
def _verify_openssl_version(lib):
|
||||
if (
|
||||
lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
|
||||
not lib.CRYPTOGRAPHY_IS_LIBRESSL
|
||||
):
|
||||
warnings.warn(
|
||||
"OpenSSL version 1.0.1 is no longer supported by the OpenSSL "
|
||||
"project, please upgrade. A future version of cryptography will "
|
||||
"drop support for it.",
|
||||
utils.CryptographyDeprecationWarning
|
||||
)
|
||||
|
||||
|
||||
# OpenSSL is not thread safe until the locks are initialized. We call this
|
||||
# method in module scope so that it executes with the import lock. On
|
||||
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
|
||||
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
|
||||
# is per module so this approach will not work.
|
||||
Binding.init_static_locks()
|
||||
|
||||
_verify_openssl_version(Binding.lib)
|
||||
|
|
|
@ -9,6 +9,16 @@ import abc
|
|||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat._oid import ObjectIdentifier
|
||||
|
||||
|
||||
class EllipticCurveOID(object):
|
||||
SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
|
||||
SECP224R1 = ObjectIdentifier("1.3.132.0.33")
|
||||
SECP256K1 = ObjectIdentifier("1.3.132.0.10")
|
||||
SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
|
||||
SECP384R1 = ObjectIdentifier("1.3.132.0.34")
|
||||
SECP521R1 = ObjectIdentifier("1.3.132.0.35")
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
|
@ -68,7 +78,7 @@ class EllipticCurvePrivateKey(object):
|
|||
Bit size of a secret scalar for the curve.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
@abc.abstractmethod
|
||||
def sign(self, data, signature_algorithm):
|
||||
"""
|
||||
Signs the data
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue