remove pip
This commit is contained in:
parent
fdf17c51c1
commit
433d75ffa1
247
bin/Activate.ps1
247
bin/Activate.ps1
@ -1,247 +0,0 @@
|
|||||||
<#
|
|
||||||
.Synopsis
|
|
||||||
Activate a Python virtual environment for the current PowerShell session.
|
|
||||||
|
|
||||||
.Description
|
|
||||||
Pushes the python executable for a virtual environment to the front of the
|
|
||||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
|
||||||
in a Python virtual environment. Makes use of the command line switches as
|
|
||||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
|
||||||
|
|
||||||
.Parameter VenvDir
|
|
||||||
Path to the directory that contains the virtual environment to activate. The
|
|
||||||
default value for this is the parent of the directory that the Activate.ps1
|
|
||||||
script is located within.
|
|
||||||
|
|
||||||
.Parameter Prompt
|
|
||||||
The prompt prefix to display when this virtual environment is activated. By
|
|
||||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
|
||||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -Verbose
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
|
||||||
and shows extra information about the activation as it executes.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
|
||||||
Activates the Python virtual environment located in the specified location.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -Prompt "MyPython"
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
|
||||||
and prefixes the current prompt with the specified string (surrounded in
|
|
||||||
parentheses) while the virtual environment is active.
|
|
||||||
|
|
||||||
.Notes
|
|
||||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
|
||||||
execution policy for the user. You can do this by issuing the following PowerShell
|
|
||||||
command:
|
|
||||||
|
|
||||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
|
||||||
|
|
||||||
For more information on Execution Policies:
|
|
||||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
|
||||||
|
|
||||||
#>
|
|
||||||
Param(
|
|
||||||
[Parameter(Mandatory = $false)]
|
|
||||||
[String]
|
|
||||||
$VenvDir,
|
|
||||||
[Parameter(Mandatory = $false)]
|
|
||||||
[String]
|
|
||||||
$Prompt
|
|
||||||
)
|
|
||||||
|
|
||||||
<# Function declarations --------------------------------------------------- #>
|
|
||||||
|
|
||||||
<#
|
|
||||||
.Synopsis
|
|
||||||
Remove all shell session elements added by the Activate script, including the
|
|
||||||
addition of the virtual environment's Python executable from the beginning of
|
|
||||||
the PATH variable.
|
|
||||||
|
|
||||||
.Parameter NonDestructive
|
|
||||||
If present, do not remove this function from the global namespace for the
|
|
||||||
session.
|
|
||||||
|
|
||||||
#>
|
|
||||||
function global:deactivate ([switch]$NonDestructive) {
|
|
||||||
# Revert to original values
|
|
||||||
|
|
||||||
# The prior prompt:
|
|
||||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
|
||||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
|
||||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
|
||||||
}
|
|
||||||
|
|
||||||
# The prior PYTHONHOME:
|
|
||||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
|
||||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
|
||||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
}
|
|
||||||
|
|
||||||
# The prior PATH:
|
|
||||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
|
||||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
|
||||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove the VIRTUAL_ENV altogether:
|
|
||||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
|
||||||
Remove-Item -Path env:VIRTUAL_ENV
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
|
||||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
|
||||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
|
||||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
|
||||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
|
||||||
}
|
|
||||||
|
|
||||||
# Leave deactivate function in the global namespace if requested:
|
|
||||||
if (-not $NonDestructive) {
|
|
||||||
Remove-Item -Path function:deactivate
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
<#
|
|
||||||
.Description
|
|
||||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
|
||||||
given folder, and returns them in a map.
|
|
||||||
|
|
||||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
|
||||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
|
||||||
then it is considered a `key = value` line. The left hand string is the key,
|
|
||||||
the right hand is the value.
|
|
||||||
|
|
||||||
If the value starts with a `'` or a `"` then the first and last character is
|
|
||||||
stripped from the value before being captured.
|
|
||||||
|
|
||||||
.Parameter ConfigDir
|
|
||||||
Path to the directory that contains the `pyvenv.cfg` file.
|
|
||||||
#>
|
|
||||||
function Get-PyVenvConfig(
|
|
||||||
[String]
|
|
||||||
$ConfigDir
|
|
||||||
) {
|
|
||||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
|
||||||
|
|
||||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
|
||||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
|
||||||
|
|
||||||
# An empty map will be returned if no config file is found.
|
|
||||||
$pyvenvConfig = @{ }
|
|
||||||
|
|
||||||
if ($pyvenvConfigPath) {
|
|
||||||
|
|
||||||
Write-Verbose "File exists, parse `key = value` lines"
|
|
||||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
|
||||||
|
|
||||||
$pyvenvConfigContent | ForEach-Object {
|
|
||||||
$keyval = $PSItem -split "\s*=\s*", 2
|
|
||||||
if ($keyval[0] -and $keyval[1]) {
|
|
||||||
$val = $keyval[1]
|
|
||||||
|
|
||||||
# Remove extraneous quotations around a string value.
|
|
||||||
if ("'""".Contains($val.Substring(0, 1))) {
|
|
||||||
$val = $val.Substring(1, $val.Length - 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
$pyvenvConfig[$keyval[0]] = $val
|
|
||||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return $pyvenvConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
<# Begin Activate script --------------------------------------------------- #>
|
|
||||||
|
|
||||||
# Determine the containing directory of this script
|
|
||||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
|
||||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
|
||||||
|
|
||||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
|
||||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
|
||||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
|
||||||
|
|
||||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
|
||||||
# First, get the location of the virtual environment, it might not be
|
|
||||||
# VenvExecDir if specified on the command line.
|
|
||||||
if ($VenvDir) {
|
|
||||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
|
||||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
|
||||||
Write-Verbose "VenvDir=$VenvDir"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
|
||||||
# as `prompt`.
|
|
||||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
|
||||||
|
|
||||||
# Next, set the prompt from the command line, or the config file, or
|
|
||||||
# just use the name of the virtual environment folder.
|
|
||||||
if ($Prompt) {
|
|
||||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
|
||||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
|
||||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
|
||||||
$Prompt = $pyvenvCfg['prompt'];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
|
||||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
|
||||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Write-Verbose "Prompt = '$Prompt'"
|
|
||||||
Write-Verbose "VenvDir='$VenvDir'"
|
|
||||||
|
|
||||||
# Deactivate any currently active virtual environment, but leave the
|
|
||||||
# deactivate function in place.
|
|
||||||
deactivate -nondestructive
|
|
||||||
|
|
||||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
|
||||||
# that there is an activated venv.
|
|
||||||
$env:VIRTUAL_ENV = $VenvDir
|
|
||||||
|
|
||||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
|
||||||
|
|
||||||
Write-Verbose "Setting prompt to '$Prompt'"
|
|
||||||
|
|
||||||
# Set the prompt to include the env name
|
|
||||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
|
||||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
|
||||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
|
||||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
|
||||||
|
|
||||||
function global:prompt {
|
|
||||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
|
||||||
_OLD_VIRTUAL_PROMPT
|
|
||||||
}
|
|
||||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
|
||||||
}
|
|
||||||
|
|
||||||
# Clear PYTHONHOME
|
|
||||||
if (Test-Path -Path Env:PYTHONHOME) {
|
|
||||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
Remove-Item -Path Env:PYTHONHOME
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add the venv to the PATH
|
|
||||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
|
||||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
|
Binary file not shown.
69
bin/activate
69
bin/activate
@ -1,69 +0,0 @@
|
|||||||
# This file must be used with "source bin/activate" *from bash*
|
|
||||||
# you cannot run it directly
|
|
||||||
|
|
||||||
deactivate () {
|
|
||||||
# reset old environment variables
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
|
||||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
|
||||||
export PATH
|
|
||||||
unset _OLD_VIRTUAL_PATH
|
|
||||||
fi
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
|
||||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
|
||||||
export PYTHONHOME
|
|
||||||
unset _OLD_VIRTUAL_PYTHONHOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This should detect bash and zsh, which have a hash command that must
|
|
||||||
# be called to get it to forget past commands. Without forgetting
|
|
||||||
# past commands the $PATH changes we made may not be respected
|
|
||||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
|
||||||
hash -r 2> /dev/null
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
|
||||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
|
||||||
export PS1
|
|
||||||
unset _OLD_VIRTUAL_PS1
|
|
||||||
fi
|
|
||||||
|
|
||||||
unset VIRTUAL_ENV
|
|
||||||
unset VIRTUAL_ENV_PROMPT
|
|
||||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
|
||||||
# Self destruct!
|
|
||||||
unset -f deactivate
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# unset irrelevant variables
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
VIRTUAL_ENV="/var/www/impuls"
|
|
||||||
export VIRTUAL_ENV
|
|
||||||
|
|
||||||
_OLD_VIRTUAL_PATH="$PATH"
|
|
||||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
|
||||||
export PATH
|
|
||||||
|
|
||||||
# unset PYTHONHOME if set
|
|
||||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
|
||||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
|
||||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
|
||||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
|
||||||
unset PYTHONHOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
|
||||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
|
||||||
PS1="(impuls) ${PS1:-}"
|
|
||||||
export PS1
|
|
||||||
VIRTUAL_ENV_PROMPT="(impuls) "
|
|
||||||
export VIRTUAL_ENV_PROMPT
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This should detect bash and zsh, which have a hash command that must
|
|
||||||
# be called to get it to forget past commands. Without forgetting
|
|
||||||
# past commands the $PATH changes we made may not be respected
|
|
||||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
|
||||||
hash -r 2> /dev/null
|
|
||||||
fi
|
|
@ -1,26 +0,0 @@
|
|||||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
|
||||||
# You cannot run it directly.
|
|
||||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
|
||||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
|
||||||
|
|
||||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
|
||||||
|
|
||||||
# Unset irrelevant variables.
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
setenv VIRTUAL_ENV "/var/www/impuls"
|
|
||||||
|
|
||||||
set _OLD_VIRTUAL_PATH="$PATH"
|
|
||||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
|
||||||
|
|
||||||
|
|
||||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
|
||||||
|
|
||||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
|
||||||
set prompt = "(impuls) $prompt"
|
|
||||||
setenv VIRTUAL_ENV_PROMPT "(impuls) "
|
|
||||||
endif
|
|
||||||
|
|
||||||
alias pydoc python -m pydoc
|
|
||||||
|
|
||||||
rehash
|
|
@ -1,69 +0,0 @@
|
|||||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
|
||||||
# (https://fishshell.com/); you cannot run it directly.
|
|
||||||
|
|
||||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
|
||||||
# reset old environment variables
|
|
||||||
if test -n "$_OLD_VIRTUAL_PATH"
|
|
||||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
|
||||||
set -e _OLD_VIRTUAL_PATH
|
|
||||||
end
|
|
||||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
|
||||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
|
||||||
end
|
|
||||||
|
|
||||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
|
||||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
|
||||||
# prevents error when using nested fish instances (Issue #93858)
|
|
||||||
if functions -q _old_fish_prompt
|
|
||||||
functions -e fish_prompt
|
|
||||||
functions -c _old_fish_prompt fish_prompt
|
|
||||||
functions -e _old_fish_prompt
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
set -e VIRTUAL_ENV
|
|
||||||
set -e VIRTUAL_ENV_PROMPT
|
|
||||||
if test "$argv[1]" != "nondestructive"
|
|
||||||
# Self-destruct!
|
|
||||||
functions -e deactivate
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Unset irrelevant variables.
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
set -gx VIRTUAL_ENV "/var/www/impuls"
|
|
||||||
|
|
||||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
|
||||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
|
||||||
|
|
||||||
# Unset PYTHONHOME if set.
|
|
||||||
if set -q PYTHONHOME
|
|
||||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
|
||||||
set -e PYTHONHOME
|
|
||||||
end
|
|
||||||
|
|
||||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
|
||||||
# fish uses a function instead of an env var to generate the prompt.
|
|
||||||
|
|
||||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
|
||||||
functions -c fish_prompt _old_fish_prompt
|
|
||||||
|
|
||||||
# With the original prompt function renamed, we can override with our own.
|
|
||||||
function fish_prompt
|
|
||||||
# Save the return status of the last command.
|
|
||||||
set -l old_status $status
|
|
||||||
|
|
||||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
|
||||||
printf "%s%s%s" (set_color 4B8BBE) "(impuls) " (set_color normal)
|
|
||||||
|
|
||||||
# Restore the return status of the previous command.
|
|
||||||
echo "exit $old_status" | .
|
|
||||||
# Output the original/"old" prompt.
|
|
||||||
_old_fish_prompt
|
|
||||||
end
|
|
||||||
|
|
||||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
|
||||||
set -gx VIRTUAL_ENV_PROMPT "(impuls) "
|
|
||||||
end
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from cinderclient.shell import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from oslo_log.cmds.convert_json import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from django.core.management import execute_from_command_line
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(execute_from_command_line())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from freezerclient.shell import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from glanceclient.shell import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
54
bin/jp.py
54
bin/jp.py
@ -1,54 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import argparse
|
|
||||||
from pprint import pformat
|
|
||||||
|
|
||||||
import jmespath
|
|
||||||
from jmespath import exceptions
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('expression')
|
|
||||||
parser.add_argument('-f', '--filename',
|
|
||||||
help=('The filename containing the input data. '
|
|
||||||
'If a filename is not given then data is '
|
|
||||||
'read from stdin.'))
|
|
||||||
parser.add_argument('--ast', action='store_true',
|
|
||||||
help=('Pretty print the AST, do not search the data.'))
|
|
||||||
args = parser.parse_args()
|
|
||||||
expression = args.expression
|
|
||||||
if args.ast:
|
|
||||||
# Only print the AST
|
|
||||||
expression = jmespath.compile(args.expression)
|
|
||||||
sys.stdout.write(pformat(expression.parsed))
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
return 0
|
|
||||||
if args.filename:
|
|
||||||
with open(args.filename, 'r') as f:
|
|
||||||
data = json.load(f)
|
|
||||||
else:
|
|
||||||
data = sys.stdin.read()
|
|
||||||
data = json.loads(data)
|
|
||||||
try:
|
|
||||||
sys.stdout.write(json.dumps(
|
|
||||||
jmespath.search(expression, data), indent=4, ensure_ascii=False))
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
except exceptions.ArityError as e:
|
|
||||||
sys.stderr.write("invalid-arity: %s\n" % e)
|
|
||||||
return 1
|
|
||||||
except exceptions.JMESPathTypeError as e:
|
|
||||||
sys.stderr.write("invalid-type: %s\n" % e)
|
|
||||||
return 1
|
|
||||||
except exceptions.UnknownFunctionError as e:
|
|
||||||
sys.stderr.write("unknown-function: %s\n" % e)
|
|
||||||
return 1
|
|
||||||
except exceptions.ParseError as e:
|
|
||||||
sys.stderr.write("syntax-error: %s\n" % e)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.exit(main())
|
|
41
bin/jsondiff
41
bin/jsondiff
@ -1,41 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import jsonpatch
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Diff two JSON files')
|
|
||||||
parser.add_argument('FILE1', type=argparse.FileType('r'))
|
|
||||||
parser.add_argument('FILE2', type=argparse.FileType('r'))
|
|
||||||
parser.add_argument('--indent', type=int, default=None,
|
|
||||||
help='Indent output by n spaces')
|
|
||||||
parser.add_argument('-u', '--preserve-unicode', action='store_true',
|
|
||||||
help='Output Unicode character as-is without using Code Point')
|
|
||||||
parser.add_argument('-v', '--version', action='version',
|
|
||||||
version='%(prog)s ' + jsonpatch.__version__)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
diff_files()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def diff_files():
|
|
||||||
""" Diffs two JSON files and prints a patch """
|
|
||||||
args = parser.parse_args()
|
|
||||||
doc1 = json.load(args.FILE1)
|
|
||||||
doc2 = json.load(args.FILE2)
|
|
||||||
patch = jsonpatch.make_patch(doc1, doc2)
|
|
||||||
if patch.patch:
|
|
||||||
print(json.dumps(patch.patch, indent=args.indent, ensure_ascii=not(args.preserve_unicode)))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
107
bin/jsonpatch
107
bin/jsonpatch
@ -1,107 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os.path
|
|
||||||
import json
|
|
||||||
import jsonpatch
|
|
||||||
import tempfile
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Apply a JSON patch on a JSON file')
|
|
||||||
parser.add_argument('ORIGINAL', type=argparse.FileType('r'),
|
|
||||||
help='Original file')
|
|
||||||
parser.add_argument('PATCH', type=argparse.FileType('r'),
|
|
||||||
nargs='?', default=sys.stdin,
|
|
||||||
help='Patch file (read from stdin if omitted)')
|
|
||||||
parser.add_argument('--indent', type=int, default=None,
|
|
||||||
help='Indent output by n spaces')
|
|
||||||
parser.add_argument('-b', '--backup', action='store_true',
|
|
||||||
help='Back up ORIGINAL if modifying in-place')
|
|
||||||
parser.add_argument('-i', '--in-place', action='store_true',
|
|
||||||
help='Modify ORIGINAL in-place instead of to stdout')
|
|
||||||
parser.add_argument('-v', '--version', action='version',
|
|
||||||
version='%(prog)s ' + jsonpatch.__version__)
|
|
||||||
parser.add_argument('-u', '--preserve-unicode', action='store_true',
|
|
||||||
help='Output Unicode character as-is without using Code Point')
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
patch_files()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def patch_files():
|
|
||||||
""" Diffs two JSON files and prints a patch """
|
|
||||||
args = parser.parse_args()
|
|
||||||
doc = json.load(args.ORIGINAL)
|
|
||||||
patch = json.load(args.PATCH)
|
|
||||||
result = jsonpatch.apply_patch(doc, patch)
|
|
||||||
|
|
||||||
if args.in_place:
|
|
||||||
dirname = os.path.abspath(os.path.dirname(args.ORIGINAL.name))
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Attempt to replace the file atomically. We do this by
|
|
||||||
# creating a temporary file in the same directory as the
|
|
||||||
# original file so we can atomically move the new file over
|
|
||||||
# the original later. (This is done in the same directory
|
|
||||||
# because atomic renames do not work across mount points.)
|
|
||||||
|
|
||||||
fd, pathname = tempfile.mkstemp(dir=dirname)
|
|
||||||
fp = os.fdopen(fd, 'w')
|
|
||||||
atomic = True
|
|
||||||
|
|
||||||
except OSError:
|
|
||||||
# We failed to create the temporary file for an atomic
|
|
||||||
# replace, so fall back to non-atomic mode by backing up
|
|
||||||
# the original (if desired) and writing a new file.
|
|
||||||
|
|
||||||
if args.backup:
|
|
||||||
os.rename(args.ORIGINAL.name, args.ORIGINAL.name + '.orig')
|
|
||||||
fp = open(args.ORIGINAL.name, 'w')
|
|
||||||
atomic = False
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Since we're not replacing the original file in-place, write
|
|
||||||
# the modified JSON to stdout instead.
|
|
||||||
|
|
||||||
fp = sys.stdout
|
|
||||||
|
|
||||||
# By this point we have some sort of file object we can write the
|
|
||||||
# modified JSON to.
|
|
||||||
|
|
||||||
json.dump(result, fp, indent=args.indent, ensure_ascii=not(args.preserve_unicode))
|
|
||||||
fp.write('\n')
|
|
||||||
|
|
||||||
if args.in_place:
|
|
||||||
# Close the new file. If we aren't replacing atomically, this
|
|
||||||
# is our last step, since everything else is already in place.
|
|
||||||
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
if atomic:
|
|
||||||
try:
|
|
||||||
# Complete the atomic replace by linking the original
|
|
||||||
# to a backup (if desired), fixing up the permissions
|
|
||||||
# on the temporary file, and moving it into place.
|
|
||||||
|
|
||||||
if args.backup:
|
|
||||||
os.link(args.ORIGINAL.name, args.ORIGINAL.name + '.orig')
|
|
||||||
os.chmod(pathname, os.stat(args.ORIGINAL.name).st_mode)
|
|
||||||
os.rename(pathname, args.ORIGINAL.name)
|
|
||||||
|
|
||||||
except OSError:
|
|
||||||
# In the event we could not actually do the atomic
|
|
||||||
# replace, unlink the original to move it out of the
|
|
||||||
# way and finally move the temporary file into place.
|
|
||||||
|
|
||||||
os.unlink(args.ORIGINAL.name)
|
|
||||||
os.rename(pathname, args.ORIGINAL.name)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,69 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os.path
|
|
||||||
import json
|
|
||||||
import jsonpointer
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Resolve a JSON pointer on JSON files')
|
|
||||||
|
|
||||||
# Accept pointer as argument or as file
|
|
||||||
ptr_group = parser.add_mutually_exclusive_group(required=True)
|
|
||||||
|
|
||||||
ptr_group.add_argument('-f', '--pointer-file', type=argparse.FileType('r'),
|
|
||||||
nargs='?',
|
|
||||||
help='File containing a JSON pointer expression')
|
|
||||||
|
|
||||||
ptr_group.add_argument('POINTER', type=str, nargs='?',
|
|
||||||
help='A JSON pointer expression')
|
|
||||||
|
|
||||||
parser.add_argument('FILE', type=argparse.FileType('r'), nargs='+',
|
|
||||||
help='Files for which the pointer should be resolved')
|
|
||||||
parser.add_argument('--indent', type=int, default=None,
|
|
||||||
help='Indent output by n spaces')
|
|
||||||
parser.add_argument('-v', '--version', action='version',
|
|
||||||
version='%(prog)s ' + jsonpointer.__version__)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
resolve_files()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_pointer(args):
|
|
||||||
if args.POINTER:
|
|
||||||
ptr = args.POINTER
|
|
||||||
elif args.pointer_file:
|
|
||||||
ptr = args.pointer_file.read().strip()
|
|
||||||
else:
|
|
||||||
parser.print_usage()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
return ptr
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_files():
|
|
||||||
""" Resolve a JSON pointer on JSON files """
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
ptr = parse_pointer(args)
|
|
||||||
|
|
||||||
for f in args.FILE:
|
|
||||||
doc = json.load(f)
|
|
||||||
try:
|
|
||||||
result = jsonpointer.resolve_pointer(doc, ptr)
|
|
||||||
print(json.dumps(result, indent=args.indent))
|
|
||||||
except jsonpointer.JsonPointerException as e:
|
|
||||||
print('Could not resolve pointer: %s' % str(e), file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from jsonschema.cli import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from oslo_concurrency.lockutils import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from markdown.__main__ import run
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(run())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from netaddr.cli import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from charset_normalizer.cli.normalizer import cli_detect
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(cli_detect())
|
|
8
bin/nova
8
bin/nova
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from novaclient.shell import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from openstack.cloud.cmd.inventory import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from oslo_config.generator import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from oslo_config.validator import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
8
bin/pbr
8
bin/pbr
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pbr.cmd.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
8
bin/pip
8
bin/pip
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
8
bin/pip3
8
bin/pip3
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from oslo_privsep.daemon import helper_main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(helper_main())
|
|
@ -1 +0,0 @@
|
|||||||
python3
|
|
@ -1 +0,0 @@
|
|||||||
/usr/lib/python-exec/python3.11/python3
|
|
@ -1 +0,0 @@
|
|||||||
python3
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from sqlparse.__main__ import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from swiftclient.shell import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
#!/var/www/impuls/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from yappi import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||||||
# os-brick command filters
|
|
||||||
# This file should be owned by (and only-writeable by) the root user
|
|
||||||
|
|
||||||
[Filters]
|
|
||||||
# privileged/__init__.py: priv_context.PrivContext(default)
|
|
||||||
# This line ties the superuser privs with the config files, context name,
|
|
||||||
# and (implicitly) the actual python code invoked.
|
|
||||||
privsep-rootwrap: RegExpFilter, privsep-helper, root, privsep-helper, --config-file, /etc/(?!\.\.).*, --privsep_context, os_brick.privileged.default, --privsep_sock_path, /tmp/.*
|
|
@ -1,164 +0,0 @@
|
|||||||
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
|
|
||||||
|
|
||||||
/* Greenlet object interface */
|
|
||||||
|
|
||||||
#ifndef Py_GREENLETOBJECT_H
|
|
||||||
#define Py_GREENLETOBJECT_H
|
|
||||||
|
|
||||||
|
|
||||||
#include <Python.h>
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* This is deprecated and undocumented. It does not change. */
|
|
||||||
#define GREENLET_VERSION "1.0.0"
|
|
||||||
|
|
||||||
#ifndef GREENLET_MODULE
|
|
||||||
#define implementation_ptr_t void*
|
|
||||||
#endif
|
|
||||||
|
|
||||||
typedef struct _greenlet {
|
|
||||||
PyObject_HEAD
|
|
||||||
PyObject* weakreflist;
|
|
||||||
PyObject* dict;
|
|
||||||
implementation_ptr_t pimpl;
|
|
||||||
} PyGreenlet;
|
|
||||||
|
|
||||||
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
|
|
||||||
|
|
||||||
|
|
||||||
/* C API functions */
|
|
||||||
|
|
||||||
/* Total number of symbols that are exported */
|
|
||||||
#define PyGreenlet_API_pointers 12
|
|
||||||
|
|
||||||
#define PyGreenlet_Type_NUM 0
|
|
||||||
#define PyExc_GreenletError_NUM 1
|
|
||||||
#define PyExc_GreenletExit_NUM 2
|
|
||||||
|
|
||||||
#define PyGreenlet_New_NUM 3
|
|
||||||
#define PyGreenlet_GetCurrent_NUM 4
|
|
||||||
#define PyGreenlet_Throw_NUM 5
|
|
||||||
#define PyGreenlet_Switch_NUM 6
|
|
||||||
#define PyGreenlet_SetParent_NUM 7
|
|
||||||
|
|
||||||
#define PyGreenlet_MAIN_NUM 8
|
|
||||||
#define PyGreenlet_STARTED_NUM 9
|
|
||||||
#define PyGreenlet_ACTIVE_NUM 10
|
|
||||||
#define PyGreenlet_GET_PARENT_NUM 11
|
|
||||||
|
|
||||||
#ifndef GREENLET_MODULE
|
|
||||||
/* This section is used by modules that uses the greenlet C API */
|
|
||||||
static void** _PyGreenlet_API = NULL;
|
|
||||||
|
|
||||||
# define PyGreenlet_Type \
|
|
||||||
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
|
|
||||||
|
|
||||||
# define PyExc_GreenletError \
|
|
||||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
|
|
||||||
|
|
||||||
# define PyExc_GreenletExit \
|
|
||||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* PyGreenlet_New(PyObject *args)
|
|
||||||
*
|
|
||||||
* greenlet.greenlet(run, parent=None)
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_New \
|
|
||||||
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_New_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* PyGreenlet_GetCurrent(void)
|
|
||||||
*
|
|
||||||
* greenlet.getcurrent()
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_GetCurrent \
|
|
||||||
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* PyGreenlet_Throw(
|
|
||||||
* PyGreenlet *greenlet,
|
|
||||||
* PyObject *typ,
|
|
||||||
* PyObject *val,
|
|
||||||
* PyObject *tb)
|
|
||||||
*
|
|
||||||
* g.throw(...)
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_Throw \
|
|
||||||
(*(PyObject * (*)(PyGreenlet * self, \
|
|
||||||
PyObject * typ, \
|
|
||||||
PyObject * val, \
|
|
||||||
PyObject * tb)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_Throw_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
|
|
||||||
*
|
|
||||||
* g.switch(*args, **kwargs)
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_Switch \
|
|
||||||
(*(PyObject * \
|
|
||||||
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_Switch_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
|
|
||||||
*
|
|
||||||
* g.parent = new_parent
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_SetParent \
|
|
||||||
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* PyGreenlet_GetParent(PyObject* greenlet)
|
|
||||||
*
|
|
||||||
* return greenlet.parent;
|
|
||||||
*
|
|
||||||
* This could return NULL even if there is no exception active.
|
|
||||||
* If it does not return NULL, you are responsible for decrementing the
|
|
||||||
* reference count.
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_GetParent \
|
|
||||||
(*(PyGreenlet* (*)(PyGreenlet*)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
|
|
||||||
|
|
||||||
/*
|
|
||||||
* deprecated, undocumented alias.
|
|
||||||
*/
|
|
||||||
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
|
|
||||||
|
|
||||||
# define PyGreenlet_MAIN \
|
|
||||||
(*(int (*)(PyGreenlet*)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
|
|
||||||
|
|
||||||
# define PyGreenlet_STARTED \
|
|
||||||
(*(int (*)(PyGreenlet*)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
|
|
||||||
|
|
||||||
# define PyGreenlet_ACTIVE \
|
|
||||||
(*(int (*)(PyGreenlet*)) \
|
|
||||||
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* Macro that imports greenlet and initializes C API */
|
|
||||||
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
|
|
||||||
keep the older definition to be sure older code that might have a copy of
|
|
||||||
the header still works. */
|
|
||||||
# define PyGreenlet_Import() \
|
|
||||||
{ \
|
|
||||||
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif /* GREENLET_MODULE */
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#endif /* !Py_GREENLETOBJECT_H */
|
|
@ -1 +0,0 @@
|
|||||||
pip
|
|
@ -1,19 +0,0 @@
|
|||||||
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
|
|
||||||
|
|
||||||
Copyright (c) Alex Grönholm
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this
|
|
||||||
software and associated documentation files (the "Software"), to deal in the Software
|
|
||||||
without restriction, including without limitation the rights to use, copy, modify, merge,
|
|
||||||
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
|
|
||||||
to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or
|
|
||||||
substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
|
||||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
|
|
||||||
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
|
||||||
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,138 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: APScheduler
|
|
||||||
Version: 3.10.1
|
|
||||||
Summary: In-process task scheduler with Cron-like capabilities
|
|
||||||
Home-page: https://github.com/agronholm/apscheduler
|
|
||||||
Author: Alex Grönholm
|
|
||||||
Author-email: apscheduler@nextday.fi
|
|
||||||
License: MIT
|
|
||||||
Keywords: scheduling cron
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Classifier: Programming Language :: Python :: 3.11
|
|
||||||
Requires-Python: >=3.6
|
|
||||||
License-File: LICENSE.txt
|
|
||||||
Requires-Dist: setuptools (>=0.7)
|
|
||||||
Requires-Dist: six (>=1.4.0)
|
|
||||||
Requires-Dist: pytz
|
|
||||||
Requires-Dist: tzlocal (!=3.*,>=2.0)
|
|
||||||
Provides-Extra: doc
|
|
||||||
Requires-Dist: sphinx ; extra == 'doc'
|
|
||||||
Requires-Dist: sphinx-rtd-theme ; extra == 'doc'
|
|
||||||
Provides-Extra: gevent
|
|
||||||
Requires-Dist: gevent ; extra == 'gevent'
|
|
||||||
Provides-Extra: mongodb
|
|
||||||
Requires-Dist: pymongo (>=3.0) ; extra == 'mongodb'
|
|
||||||
Provides-Extra: redis
|
|
||||||
Requires-Dist: redis (>=3.0) ; extra == 'redis'
|
|
||||||
Provides-Extra: rethinkdb
|
|
||||||
Requires-Dist: rethinkdb (>=2.4.0) ; extra == 'rethinkdb'
|
|
||||||
Provides-Extra: sqlalchemy
|
|
||||||
Requires-Dist: sqlalchemy (>=1.4) ; extra == 'sqlalchemy'
|
|
||||||
Provides-Extra: testing
|
|
||||||
Requires-Dist: pytest ; extra == 'testing'
|
|
||||||
Requires-Dist: pytest-asyncio ; extra == 'testing'
|
|
||||||
Requires-Dist: pytest-cov ; extra == 'testing'
|
|
||||||
Requires-Dist: pytest-tornado5 ; extra == 'testing'
|
|
||||||
Provides-Extra: tornado
|
|
||||||
Requires-Dist: tornado (>=4.3) ; extra == 'tornado'
|
|
||||||
Provides-Extra: twisted
|
|
||||||
Requires-Dist: twisted ; extra == 'twisted'
|
|
||||||
Provides-Extra: zookeeper
|
|
||||||
Requires-Dist: kazoo ; extra == 'zookeeper'
|
|
||||||
|
|
||||||
.. image:: https://github.com/agronholm/apscheduler/workflows/Python%20codeqa/test/badge.svg?branch=3.x
|
|
||||||
:target: https://github.com/agronholm/apscheduler/actions?query=workflow%3A%22Python+codeqa%2Ftest%22+branch%3A3.x
|
|
||||||
:alt: Build Status
|
|
||||||
.. image:: https://coveralls.io/repos/github/agronholm/apscheduler/badge.svg?branch=3.x
|
|
||||||
:target: https://coveralls.io/github/agronholm/apscheduler?branch=3.x
|
|
||||||
:alt: Code Coverage
|
|
||||||
.. image:: https://readthedocs.org/projects/apscheduler/badge/?version=3.x
|
|
||||||
:target: https://apscheduler.readthedocs.io/en/master/?badge=3.x
|
|
||||||
:alt: Documentation
|
|
||||||
|
|
||||||
Advanced Python Scheduler (APScheduler) is a Python library that lets you schedule your Python code
|
|
||||||
to be executed later, either just once or periodically. You can add new jobs or remove old ones on
|
|
||||||
the fly as you please. If you store your jobs in a database, they will also survive scheduler
|
|
||||||
restarts and maintain their state. When the scheduler is restarted, it will then run all the jobs
|
|
||||||
it should have run while it was offline [#f1]_.
|
|
||||||
|
|
||||||
Among other things, APScheduler can be used as a cross-platform, application specific replacement
|
|
||||||
to platform specific schedulers, such as the cron daemon or the Windows task scheduler. Please
|
|
||||||
note, however, that APScheduler is **not** a daemon or service itself, nor does it come with any
|
|
||||||
command line tools. It is primarily meant to be run inside existing applications. That said,
|
|
||||||
APScheduler does provide some building blocks for you to build a scheduler service or to run a
|
|
||||||
dedicated scheduler process.
|
|
||||||
|
|
||||||
APScheduler has three built-in scheduling systems you can use:
|
|
||||||
|
|
||||||
* Cron-style scheduling (with optional start/end times)
|
|
||||||
* Interval-based execution (runs jobs on even intervals, with optional start/end times)
|
|
||||||
* One-off delayed execution (runs jobs once, on a set date/time)
|
|
||||||
|
|
||||||
You can mix and match scheduling systems and the backends where the jobs are stored any way you
|
|
||||||
like. Supported backends for storing jobs include:
|
|
||||||
|
|
||||||
* Memory
|
|
||||||
* `SQLAlchemy <http://www.sqlalchemy.org/>`_ (any RDBMS supported by SQLAlchemy works)
|
|
||||||
* `MongoDB <http://www.mongodb.org/>`_
|
|
||||||
* `Redis <http://redis.io/>`_
|
|
||||||
* `RethinkDB <https://www.rethinkdb.com/>`_
|
|
||||||
* `ZooKeeper <https://zookeeper.apache.org/>`_
|
|
||||||
|
|
||||||
APScheduler also integrates with several common Python frameworks, like:
|
|
||||||
|
|
||||||
* `asyncio <http://docs.python.org/3.4/library/asyncio.html>`_ (:pep:`3156`)
|
|
||||||
* `gevent <http://www.gevent.org/>`_
|
|
||||||
* `Tornado <http://www.tornadoweb.org/>`_
|
|
||||||
* `Twisted <http://twistedmatrix.com/>`_
|
|
||||||
* `Qt <http://qt-project.org/>`_ (using either
|
|
||||||
`PyQt <http://www.riverbankcomputing.com/software/pyqt/intro>`_ ,
|
|
||||||
`PySide6 <https://wiki.qt.io/Qt_for_Python>`_ ,
|
|
||||||
`PySide2 <https://wiki.qt.io/Qt_for_Python>`_ or
|
|
||||||
`PySide <http://qt-project.org/wiki/PySide>`_)
|
|
||||||
|
|
||||||
There are third party solutions for integrating APScheduler with other frameworks:
|
|
||||||
|
|
||||||
* `Django <https://github.com/jarekwg/django-apscheduler>`_
|
|
||||||
* `Flask <https://github.com/viniciuschiele/flask-apscheduler>`_
|
|
||||||
|
|
||||||
|
|
||||||
.. [#f1] The cutoff period for this is also configurable.
|
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Documentation can be found `here <https://apscheduler.readthedocs.io/>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Source
|
|
||||||
------
|
|
||||||
|
|
||||||
The source can be browsed at `Github <https://github.com/agronholm/apscheduler/tree/3.x>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Reporting bugs
|
|
||||||
--------------
|
|
||||||
|
|
||||||
A `bug tracker <https://github.com/agronholm/apscheduler/issues>`_ is provided by Github.
|
|
||||||
|
|
||||||
|
|
||||||
Getting help
|
|
||||||
------------
|
|
||||||
|
|
||||||
If you have problems or other questions, you can either:
|
|
||||||
|
|
||||||
* Ask in the `apscheduler <https://gitter.im/apscheduler/Lobby>`_ room on Gitter
|
|
||||||
* Ask on the `APScheduler GitHub discussion forum <https://github.com/agronholm/apscheduler/discussions>`_, or
|
|
||||||
* Ask on `StackOverflow <http://stackoverflow.com/questions/tagged/apscheduler>`_ and tag your
|
|
||||||
question with the ``apscheduler`` tag
|
|
@ -1,84 +0,0 @@
|
|||||||
APScheduler-3.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
APScheduler-3.10.1.dist-info/LICENSE.txt,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
|
|
||||||
APScheduler-3.10.1.dist-info/METADATA,sha256=nShBYOJMsJ9iwrKP_x4rAVN87_NmBOwpdPPn85sY9G4,5676
|
|
||||||
APScheduler-3.10.1.dist-info/RECORD,,
|
|
||||||
APScheduler-3.10.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
APScheduler-3.10.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
||||||
APScheduler-3.10.1.dist-info/entry_points.txt,sha256=KMxTUp2QykDNL6w-WBU5xrk8ebroCPEBN0eZtyL3x2w,1147
|
|
||||||
APScheduler-3.10.1.dist-info/top_level.txt,sha256=O3oMCWxG-AHkecUoO6Ze7-yYjWrttL95uHO8-RFdYvE,12
|
|
||||||
apscheduler/__init__.py,sha256=qFEK2ysRBcLiYmm3deyJJ1avUOugaM_nCGHMD42WMBw,380
|
|
||||||
apscheduler/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
apscheduler/__pycache__/events.cpython-311.pyc,,
|
|
||||||
apscheduler/__pycache__/job.cpython-311.pyc,,
|
|
||||||
apscheduler/__pycache__/util.cpython-311.pyc,,
|
|
||||||
apscheduler/events.py,sha256=KRMTDQUS6d2uVnrQvPoz3ZPV5V9XKsCAZLsgx913FFo,3593
|
|
||||||
apscheduler/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
apscheduler/executors/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/asyncio.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/base.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/base_py3.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/debug.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/gevent.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/pool.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/tornado.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/__pycache__/twisted.cpython-311.pyc,,
|
|
||||||
apscheduler/executors/asyncio.py,sha256=9m4wvRHSSYplllxAQyxWkPVcFdyFG5aZbHt5nfWKIAc,1859
|
|
||||||
apscheduler/executors/base.py,sha256=hogiMc_t-huw6BMod0HEeY2FhRNmAAUyNNuBHvIX31M,5336
|
|
||||||
apscheduler/executors/base_py3.py,sha256=8WOpTeX1NA-spdbEQ1oJMh5T2O_t2UdsaSnAh-iEWe0,1831
|
|
||||||
apscheduler/executors/debug.py,sha256=15_ogSBzl8RRCfBYDnkIV2uMH8cLk1KImYmBa_NVGpc,573
|
|
||||||
apscheduler/executors/gevent.py,sha256=aulrNmoefyBgrOkH9awRhFiXIDnSCnZ4U0o0_JXIXgc,777
|
|
||||||
apscheduler/executors/pool.py,sha256=h4cYgKMRhjpNHmkhlogHLbmT4O_q6HePXVLmiJIHC3c,2484
|
|
||||||
apscheduler/executors/tornado.py,sha256=DU75VaQ9R6nBuy8lbPUvDKUgsuJcZqwAvURC5vg3r6w,1780
|
|
||||||
apscheduler/executors/twisted.py,sha256=bRoU0C4BoVcS6_BjKD5wfUs0IJpGkmLsRAcMH2rJJss,778
|
|
||||||
apscheduler/job.py,sha256=JCRERBpfWLuomPiNNHX-jrluEwfHkdscEmz4i0Y8rao,11216
|
|
||||||
apscheduler/jobstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
apscheduler/jobstores/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/base.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/memory.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/mongodb.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/redis.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/rethinkdb.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/sqlalchemy.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/__pycache__/zookeeper.cpython-311.pyc,,
|
|
||||||
apscheduler/jobstores/base.py,sha256=DXzSW9XscueHZHMvy1qFiG-vYqUl_MMv0n0uBSZWXGo,4523
|
|
||||||
apscheduler/jobstores/memory.py,sha256=ZxWiKsqfsCHFvac-6X9BztuhnuSxlOYi1dhT6g-pjQo,3655
|
|
||||||
apscheduler/jobstores/mongodb.py,sha256=r9t2neNuzfPuf_omDm0KdkLGPZXLksiH-U3j13MIBlM,5347
|
|
||||||
apscheduler/jobstores/redis.py,sha256=kjQDIzPXz-Yq976U9HK3aMkcCI_QRLKgTADQWKewtik,5483
|
|
||||||
apscheduler/jobstores/rethinkdb.py,sha256=k1rSLYJqejuhQxJY3pXwHAQYcpZ1QFJsoQ8n0oEu5MM,5863
|
|
||||||
apscheduler/jobstores/sqlalchemy.py,sha256=LIA9iSGMvuPTVqGHdztgQs4YFmYN1xqXvpJauYNK470,6529
|
|
||||||
apscheduler/jobstores/zookeeper.py,sha256=avGLXaJGjHD0F7uG6rLJ2gg_TXNqXDEM4PqOu56f-Xg,6363
|
|
||||||
apscheduler/schedulers/__init__.py,sha256=jM63xA_K7GSToBenhsz-SCcqfhk1pdEVb6ajwoO5Kqg,406
|
|
||||||
apscheduler/schedulers/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/asyncio.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/background.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/base.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/blocking.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/gevent.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/qt.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/tornado.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/__pycache__/twisted.cpython-311.pyc,,
|
|
||||||
apscheduler/schedulers/asyncio.py,sha256=iJO6QUo1oW16giOU_nW8WMu2b9NTWT4Tg2gY586G08w,1994
|
|
||||||
apscheduler/schedulers/background.py,sha256=751p-f5Di6pY4x6UXlZggpxQ5k2ObJ_Q5wSeWmKHS8o,1566
|
|
||||||
apscheduler/schedulers/base.py,sha256=M8WWEKjG-VfyL_UF1Wgbjk01yxa45t_GXfKyvtY0RMs,43228
|
|
||||||
apscheduler/schedulers/blocking.py,sha256=8nubfJ4PoUnAkEY6WRQG4COzG4SxGyW9PjuVPhDAbsk,985
|
|
||||||
apscheduler/schedulers/gevent.py,sha256=csPBvV75FGcboXXsdex6fCD7J54QgBddYNdWj62ZO9g,1031
|
|
||||||
apscheduler/schedulers/qt.py,sha256=aooX3slyDwLglojae5t2tz6NlqfceZYCeXAIS0LQVCk,1613
|
|
||||||
apscheduler/schedulers/tornado.py,sha256=D9Vaq3Ee9EFiXa1jDy9tedI048gR_YT_LAFUWqO_uEw,1926
|
|
||||||
apscheduler/schedulers/twisted.py,sha256=D5EBjjMRtMBxy0_aAURcULAI8Ky2IvCTr9tK9sO1rYk,1844
|
|
||||||
apscheduler/triggers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
apscheduler/triggers/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/__pycache__/base.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/__pycache__/combining.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/__pycache__/date.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/__pycache__/interval.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/base.py,sha256=BvBJdOnIeVClXPXeInzYK25cN64jAc4a9IiEQucSiVk,1355
|
|
||||||
apscheduler/triggers/combining.py,sha256=klaSoBp1kyrPX5D3gBpNTlsGKjks5QeKPW5JN_MVs30,3449
|
|
||||||
apscheduler/triggers/cron/__init__.py,sha256=D39BQ63qWyk6XZcSuWth46ELQ3VIFpYjUHh7Kj65Z9M,9251
|
|
||||||
apscheduler/triggers/cron/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/cron/__pycache__/expressions.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/cron/__pycache__/fields.cpython-311.pyc,,
|
|
||||||
apscheduler/triggers/cron/expressions.py,sha256=hu1kq0mKvivIw7U0D0Nnrbuk3q01dCuhZ7SHRPw6qhI,9184
|
|
||||||
apscheduler/triggers/cron/fields.py,sha256=NWPClh1NgSOpTlJ3sm1TXM_ViC2qJGKWkd_vg0xsw7o,3510
|
|
||||||
apscheduler/triggers/date.py,sha256=RrfB1PNO9G9e91p1BOf-y_TseVHQQR-KJPhNdPpAHcU,1705
|
|
||||||
apscheduler/triggers/interval.py,sha256=ABjcZFaGYAAgdAaUQIuLr9_dLszIifu88qaXrJmdxQ4,4377
|
|
||||||
apscheduler/util.py,sha256=zaDgtfj1TzSZp7TGyC_57Gq96hrcrhzP41pwZ0xbBxA,13846
|
|
@ -1,5 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.38.4)
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
|||||||
[apscheduler.executors]
|
|
||||||
asyncio = apscheduler.executors.asyncio:AsyncIOExecutor [asyncio]
|
|
||||||
debug = apscheduler.executors.debug:DebugExecutor
|
|
||||||
gevent = apscheduler.executors.gevent:GeventExecutor [gevent]
|
|
||||||
processpool = apscheduler.executors.pool:ProcessPoolExecutor
|
|
||||||
threadpool = apscheduler.executors.pool:ThreadPoolExecutor
|
|
||||||
tornado = apscheduler.executors.tornado:TornadoExecutor [tornado]
|
|
||||||
twisted = apscheduler.executors.twisted:TwistedExecutor [twisted]
|
|
||||||
|
|
||||||
[apscheduler.jobstores]
|
|
||||||
memory = apscheduler.jobstores.memory:MemoryJobStore
|
|
||||||
mongodb = apscheduler.jobstores.mongodb:MongoDBJobStore [mongodb]
|
|
||||||
redis = apscheduler.jobstores.redis:RedisJobStore [redis]
|
|
||||||
rethinkdb = apscheduler.jobstores.rethinkdb:RethinkDBJobStore [rethinkdb]
|
|
||||||
sqlalchemy = apscheduler.jobstores.sqlalchemy:SQLAlchemyJobStore [sqlalchemy]
|
|
||||||
zookeeper = apscheduler.jobstores.zookeeper:ZooKeeperJobStore [zookeeper]
|
|
||||||
|
|
||||||
[apscheduler.triggers]
|
|
||||||
and = apscheduler.triggers.combining:AndTrigger
|
|
||||||
cron = apscheduler.triggers.cron:CronTrigger
|
|
||||||
date = apscheduler.triggers.date:DateTrigger
|
|
||||||
interval = apscheduler.triggers.interval:IntervalTrigger
|
|
||||||
or = apscheduler.triggers.combining:OrTrigger
|
|
@ -1 +0,0 @@
|
|||||||
apscheduler
|
|
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
pip
|
|
@ -1,27 +0,0 @@
|
|||||||
Copyright (c) Django Software Foundation and individual contributors.
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of Django nor the names of its contributors may be used
|
|
||||||
to endorse or promote products derived from this software without
|
|
||||||
specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
|
||||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,290 +0,0 @@
|
|||||||
Django is licensed under the three-clause BSD license; see the file
|
|
||||||
LICENSE for details.
|
|
||||||
|
|
||||||
Django includes code from the Python standard library, which is licensed under
|
|
||||||
the Python license, a permissive open source license. The copyright and license
|
|
||||||
is included below for compliance with Python's terms.
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 2001-present Python Software Foundation; All Rights Reserved
|
|
||||||
|
|
||||||
A. HISTORY OF THE SOFTWARE
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
|
||||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
|
||||||
as a successor of a language called ABC. Guido remains Python's
|
|
||||||
principal author, although it includes many contributions from others.
|
|
||||||
|
|
||||||
In 1995, Guido continued his work on Python at the Corporation for
|
|
||||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
|
||||||
in Reston, Virginia where he released several versions of the
|
|
||||||
software.
|
|
||||||
|
|
||||||
In May 2000, Guido and the Python core development team moved to
|
|
||||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
|
||||||
year, the PythonLabs team moved to Digital Creations, which became
|
|
||||||
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
|
||||||
https://www.python.org/psf/) was formed, a non-profit organization
|
|
||||||
created specifically to own Python-related Intellectual Property.
|
|
||||||
Zope Corporation was a sponsoring member of the PSF.
|
|
||||||
|
|
||||||
All Python releases are Open Source (see http://www.opensource.org for
|
|
||||||
the Open Source Definition). Historically, most, but not all, Python
|
|
||||||
releases have also been GPL-compatible; the table below summarizes
|
|
||||||
the various releases.
|
|
||||||
|
|
||||||
Release Derived Year Owner GPL-
|
|
||||||
from compatible? (1)
|
|
||||||
|
|
||||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
|
||||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
|
||||||
1.6 1.5.2 2000 CNRI no
|
|
||||||
2.0 1.6 2000 BeOpen.com no
|
|
||||||
1.6.1 1.6 2001 CNRI yes (2)
|
|
||||||
2.1 2.0+1.6.1 2001 PSF no
|
|
||||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
|
||||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
|
||||||
2.1.2 2.1.1 2002 PSF yes
|
|
||||||
2.1.3 2.1.2 2002 PSF yes
|
|
||||||
2.2 and above 2.1.1 2001-now PSF yes
|
|
||||||
|
|
||||||
Footnotes:
|
|
||||||
|
|
||||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
|
||||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
|
||||||
a modified version without making your changes open source. The
|
|
||||||
GPL-compatible licenses make it possible to combine Python with
|
|
||||||
other software that is released under the GPL; the others don't.
|
|
||||||
|
|
||||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
|
||||||
because its license has a choice of law clause. According to
|
|
||||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
|
||||||
is "not incompatible" with the GPL.
|
|
||||||
|
|
||||||
Thanks to the many outside volunteers who have worked under Guido's
|
|
||||||
direction to make these releases possible.
|
|
||||||
|
|
||||||
|
|
||||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
|
||||||
===============================================================
|
|
||||||
|
|
||||||
Python software and documentation are licensed under the
|
|
||||||
Python Software Foundation License Version 2.
|
|
||||||
|
|
||||||
Starting with Python 3.8.6, examples, recipes, and other code in
|
|
||||||
the documentation are dual licensed under the PSF License Version 2
|
|
||||||
and the Zero-Clause BSD license.
|
|
||||||
|
|
||||||
Some software incorporated into Python is under different licenses.
|
|
||||||
The licenses are listed with code falling under that license.
|
|
||||||
|
|
||||||
|
|
||||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
|
||||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
|
||||||
otherwise using this software ("Python") in source or binary form and
|
|
||||||
its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
|
||||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
|
||||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
|
||||||
distribute, and otherwise use Python alone or in any derivative version,
|
|
||||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
|
||||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
|
||||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation;
|
|
||||||
All Rights Reserved" are retained in Python alone or in any derivative version
|
|
||||||
prepared by Licensee.
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python.
|
|
||||||
|
|
||||||
4. PSF is making Python available to Licensee on an "AS IS"
|
|
||||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. Nothing in this License Agreement shall be deemed to create any
|
|
||||||
relationship of agency, partnership, or joint venture between PSF and
|
|
||||||
Licensee. This License Agreement does not grant permission to use PSF
|
|
||||||
trademarks or trade name in a trademark sense to endorse or promote
|
|
||||||
products or services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By copying, installing or otherwise using Python, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
|
||||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
|
||||||
Individual or Organization ("Licensee") accessing and otherwise using
|
|
||||||
this software in source or binary form and its associated
|
|
||||||
documentation ("the Software").
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this BeOpen Python License
|
|
||||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
|
||||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
|
||||||
and/or display publicly, prepare derivative works, distribute, and
|
|
||||||
otherwise use the Software alone or in any derivative version,
|
|
||||||
provided, however, that the BeOpen Python License is retained in the
|
|
||||||
Software, alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
|
||||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
|
||||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
|
||||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
|
||||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
5. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
6. This License Agreement shall be governed by and interpreted in all
|
|
||||||
respects by the law of the State of California, excluding conflict of
|
|
||||||
law provisions. Nothing in this License Agreement shall be deemed to
|
|
||||||
create any relationship of agency, partnership, or joint venture
|
|
||||||
between BeOpen and Licensee. This License Agreement does not grant
|
|
||||||
permission to use BeOpen trademarks or trade names in a trademark
|
|
||||||
sense to endorse or promote products or services of Licensee, or any
|
|
||||||
third party. As an exception, the "BeOpen Python" logos available at
|
|
||||||
http://www.pythonlabs.com/logos.html may be used according to the
|
|
||||||
permissions granted on that web page.
|
|
||||||
|
|
||||||
7. By copying, installing or otherwise using the software, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
|
||||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
|
||||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
|
||||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
|
||||||
source or binary form and its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
|
||||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
|
||||||
license to reproduce, analyze, test, perform and/or display publicly,
|
|
||||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
|
||||||
alone or in any derivative version, provided, however, that CNRI's
|
|
||||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
|
||||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
|
||||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
|
||||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
|
||||||
Agreement, Licensee may substitute the following text (omitting the
|
|
||||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
|
||||||
conditions in CNRI's License Agreement. This Agreement together with
|
|
||||||
Python 1.6.1 may be located on the internet using the following
|
|
||||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
|
||||||
Agreement may also be obtained from a proxy server on the internet
|
|
||||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python 1.6.1.
|
|
||||||
|
|
||||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
|
||||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. This License Agreement shall be governed by the federal
|
|
||||||
intellectual property law of the United States, including without
|
|
||||||
limitation the federal copyright law, and, to the extent such
|
|
||||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
|
||||||
Virginia, excluding Virginia's conflict of law provisions.
|
|
||||||
Notwithstanding the foregoing, with regard to derivative works based
|
|
||||||
on Python 1.6.1 that incorporate non-separable material that was
|
|
||||||
previously distributed under the GNU General Public License (GPL), the
|
|
||||||
law of the Commonwealth of Virginia shall govern this License
|
|
||||||
Agreement only as to issues arising under or with respect to
|
|
||||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
|
||||||
License Agreement shall be deemed to create any relationship of
|
|
||||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
|
||||||
License Agreement does not grant permission to use CNRI trademarks or
|
|
||||||
trade name in a trademark sense to endorse or promote products or
|
|
||||||
services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
|
||||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
|
||||||
bound by the terms and conditions of this License Agreement.
|
|
||||||
|
|
||||||
ACCEPT
|
|
||||||
|
|
||||||
|
|
||||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
|
||||||
The Netherlands. All rights reserved.
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its
|
|
||||||
documentation for any purpose and without fee is hereby granted,
|
|
||||||
provided that the above copyright notice appear in all copies and that
|
|
||||||
both that copyright notice and this permission notice appear in
|
|
||||||
supporting documentation, and that the name of Stichting Mathematisch
|
|
||||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
|
||||||
distribution of the software without specific, written prior
|
|
||||||
permission.
|
|
||||||
|
|
||||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
|
||||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
||||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
|
||||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
||||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
|
|
||||||
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
|
||||||
purpose with or without fee is hereby granted.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
||||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|
||||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
||||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|
||||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|
||||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|
||||||
PERFORMANCE OF THIS SOFTWARE.
|
|
@ -1,101 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: Django
|
|
||||||
Version: 4.2.1
|
|
||||||
Summary: A high-level Python web framework that encourages rapid development and clean, pragmatic design.
|
|
||||||
Home-page: https://www.djangoproject.com/
|
|
||||||
Author: Django Software Foundation
|
|
||||||
Author-email: foundation@djangoproject.com
|
|
||||||
License: BSD-3-Clause
|
|
||||||
Project-URL: Documentation, https://docs.djangoproject.com/
|
|
||||||
Project-URL: Release notes, https://docs.djangoproject.com/en/stable/releases/
|
|
||||||
Project-URL: Funding, https://www.djangoproject.com/fundraising/
|
|
||||||
Project-URL: Source, https://github.com/django/django
|
|
||||||
Project-URL: Tracker, https://code.djangoproject.com/
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Environment :: Web Environment
|
|
||||||
Classifier: Framework :: Django
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: BSD License
|
|
||||||
Classifier: Operating System :: OS Independent
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3 :: Only
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: 3.10
|
|
||||||
Classifier: Programming Language :: Python :: 3.11
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
||||||
Requires-Python: >=3.8
|
|
||||||
License-File: LICENSE
|
|
||||||
License-File: LICENSE.python
|
|
||||||
License-File: AUTHORS
|
|
||||||
Requires-Dist: asgiref (<4,>=3.6.0)
|
|
||||||
Requires-Dist: sqlparse (>=0.3.1)
|
|
||||||
Requires-Dist: backports.zoneinfo ; python_version < "3.9"
|
|
||||||
Requires-Dist: tzdata ; sys_platform == "win32"
|
|
||||||
Provides-Extra: argon2
|
|
||||||
Requires-Dist: argon2-cffi (>=19.1.0) ; extra == 'argon2'
|
|
||||||
Provides-Extra: bcrypt
|
|
||||||
Requires-Dist: bcrypt ; extra == 'bcrypt'
|
|
||||||
|
|
||||||
======
|
|
||||||
Django
|
|
||||||
======
|
|
||||||
|
|
||||||
Django is a high-level Python web framework that encourages rapid development
|
|
||||||
and clean, pragmatic design. Thanks for checking it out.
|
|
||||||
|
|
||||||
All documentation is in the "``docs``" directory and online at
|
|
||||||
https://docs.djangoproject.com/en/stable/. If you're just getting started,
|
|
||||||
here's how we recommend you read the docs:
|
|
||||||
|
|
||||||
* First, read ``docs/intro/install.txt`` for instructions on installing Django.
|
|
||||||
|
|
||||||
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``,
|
|
||||||
``docs/intro/tutorial02.txt``, etc.).
|
|
||||||
|
|
||||||
* If you want to set up an actual deployment server, read
|
|
||||||
``docs/howto/deployment/index.txt`` for instructions.
|
|
||||||
|
|
||||||
* You'll probably want to read through the topical guides (in ``docs/topics``)
|
|
||||||
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific
|
|
||||||
problems, and check out the reference (``docs/ref``) for gory details.
|
|
||||||
|
|
||||||
* See ``docs/README`` for instructions on building an HTML version of the docs.
|
|
||||||
|
|
||||||
Docs are updated rigorously. If you find any problems in the docs, or think
|
|
||||||
they should be clarified in any way, please take 30 seconds to fill out a
|
|
||||||
ticket here: https://code.djangoproject.com/newticket
|
|
||||||
|
|
||||||
To get more help:
|
|
||||||
|
|
||||||
* Join the ``#django`` channel on ``irc.libera.chat``. Lots of helpful people
|
|
||||||
hang out there. See https://web.libera.chat if you're new to IRC.
|
|
||||||
|
|
||||||
* Join the django-users mailing list, or read the archives, at
|
|
||||||
https://groups.google.com/group/django-users.
|
|
||||||
|
|
||||||
To contribute to Django:
|
|
||||||
|
|
||||||
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for
|
|
||||||
information about getting involved.
|
|
||||||
|
|
||||||
To run Django's test suite:
|
|
||||||
|
|
||||||
* Follow the instructions in the "Unit tests" section of
|
|
||||||
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at
|
|
||||||
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests
|
|
||||||
|
|
||||||
Supporting the Development of Django
|
|
||||||
====================================
|
|
||||||
|
|
||||||
Django's development depends on your contributions.
|
|
||||||
|
|
||||||
If you depend on Django, remember to support the Django Software Foundation: https://www.djangoproject.com/fundraising/
|
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.37.1)
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
|||||||
[console_scripts]
|
|
||||||
django-admin = django.core.management:execute_from_command_line
|
|
||||||
|
|
@ -1 +0,0 @@
|
|||||||
django
|
|
@ -1 +0,0 @@
|
|||||||
pip
|
|
@ -1,29 +0,0 @@
|
|||||||
Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
|
|
||||||
Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
|
|
||||||
Copyright 2004 Manfred Stienstra (the original version)
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
* Neither the name of the Python Markdown Project nor the
|
|
||||||
names of its contributors may be used to endorse or promote products
|
|
||||||
derived from this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE PYTHON MARKDOWN PROJECT ''AS IS'' AND ANY
|
|
||||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL ANY CONTRIBUTORS TO THE PYTHON MARKDOWN PROJECT
|
|
||||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,57 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: Markdown
|
|
||||||
Version: 3.2.1
|
|
||||||
Summary: Python implementation of Markdown.
|
|
||||||
Home-page: https://Python-Markdown.github.io/
|
|
||||||
Author: Manfred Stienstra, Yuri takhteyev and Waylan limberg
|
|
||||||
Author-email: waylan.limberg@icloud.com
|
|
||||||
Maintainer: Waylan Limberg
|
|
||||||
Maintainer-email: waylan.limberg@icloud.com
|
|
||||||
License: BSD License
|
|
||||||
Download-URL: http://pypi.python.org/packages/source/M/Markdown/Markdown-3.2.1-py2.py3-none-any.whl
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: License :: OSI Approved :: BSD License
|
|
||||||
Classifier: Operating System :: OS Independent
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3 :: Only
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Classifier: Topic :: Communications :: Email :: Filters
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP :: Site Management
|
|
||||||
Classifier: Topic :: Software Development :: Documentation
|
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
||||||
Classifier: Topic :: Text Processing :: Filters
|
|
||||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
|
||||||
Requires-Python: >=3.5
|
|
||||||
Requires-Dist: setuptools (>=36)
|
|
||||||
Provides-Extra: testing
|
|
||||||
Requires-Dist: coverage ; extra == 'testing'
|
|
||||||
Requires-Dist: pyyaml ; extra == 'testing'
|
|
||||||
|
|
||||||
|
|
||||||
This is a Python implementation of John Gruber's Markdown_.
|
|
||||||
It is almost completely compliant with the reference implementation,
|
|
||||||
though there are a few known issues. See Features_ for information
|
|
||||||
on what exactly is supported and what is not. Additional features are
|
|
||||||
supported by the `Available Extensions`_.
|
|
||||||
|
|
||||||
.. _Markdown: https://daringfireball.net/projects/markdown/
|
|
||||||
.. _Features: https://Python-Markdown.github.io#features
|
|
||||||
.. _`Available Extensions`: https://Python-Markdown.github.io/extensions/
|
|
||||||
|
|
||||||
Support
|
|
||||||
=======
|
|
||||||
|
|
||||||
You may report bugs, ask for help, and discuss various other issues on
|
|
||||||
the `bug tracker`_.
|
|
||||||
|
|
||||||
.. _`bug tracker`: https://github.com/Python-Markdown/markdown/issues
|
|
||||||
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
|||||||
../../../bin/markdown_py,sha256=hPYy_uEXY0uTAbNeHZ-oOKRwPxaj-viX2rqnVZOUhcc,225
|
|
||||||
Markdown-3.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
Markdown-3.2.1.dist-info/LICENSE.md,sha256=bxGTy2NHGOZcOlN9biXr1hSCDsDvaTz8EiSBEmONZNo,1645
|
|
||||||
Markdown-3.2.1.dist-info/METADATA,sha256=PK6UzXb9yL09qZJH7SCqZd6-mj8keovCmxQLt89NlEQ,2383
|
|
||||||
Markdown-3.2.1.dist-info/RECORD,,
|
|
||||||
Markdown-3.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
Markdown-3.2.1.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110
|
|
||||||
Markdown-3.2.1.dist-info/entry_points.txt,sha256=j4jiKg-iwZGImvi8OzotZePWoFbJJ4GrfzDqH03u3SQ,1103
|
|
||||||
Markdown-3.2.1.dist-info/top_level.txt,sha256=IAxs8x618RXoH1uCqeLLxXsDefJvE_mIibr_M4sOlyk,9
|
|
||||||
markdown/__init__.py,sha256=002-LuHviYzROW2rg_gBGai81nMouUNO9UFj5nSsTSk,2065
|
|
||||||
markdown/__main__.py,sha256=MpVK3zlwQ-4AzDzZmIScPB90PpunMGVgS5KBmJuHYTw,5802
|
|
||||||
markdown/__meta__.py,sha256=xhmwLb0Eb6kfiapdM21pCb80lyVEl8hxv8Re_X6wsI0,1837
|
|
||||||
markdown/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/__main__.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/__meta__.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/blockparser.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/blockprocessors.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/core.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/inlinepatterns.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/pep562.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/postprocessors.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/preprocessors.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/serializers.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/test_tools.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/treeprocessors.cpython-311.pyc,,
|
|
||||||
markdown/__pycache__/util.cpython-311.pyc,,
|
|
||||||
markdown/blockparser.py,sha256=JpBhOokOoBUGCXolftOc5m1hPcR2y9s9hVd9WSuhHzo,4285
|
|
||||||
markdown/blockprocessors.py,sha256=l4gmkAN9b2L340EX0gm24EyWS7UzBviPqX6wYrcgEco,23736
|
|
||||||
markdown/core.py,sha256=JLR5hIMwWSeIHRQhTzAymB3QUD3gHCdITFvmuuCpIcA,15360
|
|
||||||
markdown/extensions/__init__.py,sha256=6kUSgoqDT4gGUVsqf7F9oQD_jA0RJCbX5EK3JVo8iQE,3517
|
|
||||||
markdown/extensions/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/abbr.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/admonition.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/attr_list.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/codehilite.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/def_list.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/extra.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/fenced_code.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/footnotes.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/legacy_attrs.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/legacy_em.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/md_in_html.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/meta.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/nl2br.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/sane_lists.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/smarty.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/tables.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/toc.cpython-311.pyc,,
|
|
||||||
markdown/extensions/__pycache__/wikilinks.cpython-311.pyc,,
|
|
||||||
markdown/extensions/abbr.py,sha256=pqp2HnOR2giT-iYKyqtsp2_eUOWBR0j_hUfjvUV5c88,2916
|
|
||||||
markdown/extensions/admonition.py,sha256=HWHHjuYZPAPOg5X8hbpDuSbw8gB6k0odw8GuTT1v_N4,3124
|
|
||||||
markdown/extensions/attr_list.py,sha256=m9a1H-S33rV2twtlFYuoxSiCAf22ndU5tziSzNF2dNg,6003
|
|
||||||
markdown/extensions/codehilite.py,sha256=rVZVOIjp2KEIZsnz90mX6E2_xnwVPQZpVVQVJMuMVU0,9834
|
|
||||||
markdown/extensions/def_list.py,sha256=iqRXAEl2XnyF415afCxihAgOmEUOK1hIuBPIK1k7Tzo,3521
|
|
||||||
markdown/extensions/extra.py,sha256=udRN8OvSWcq3UwkPygvsFl1RlCVtCJ-ARVg2IwVH6VY,1831
|
|
||||||
markdown/extensions/fenced_code.py,sha256=dww9rDu2kQtkoTpjn9BBgeGCTNdE1bMPJ2wgR6695iM,3897
|
|
||||||
markdown/extensions/footnotes.py,sha256=a9sb8RoKqFU8p8ZhpTObrn_Uek0hbyPFVGYpRaEDXaw,15339
|
|
||||||
markdown/extensions/legacy_attrs.py,sha256=2EaVQkxQoNnP8_lMPvGRBdNda8L4weUQroiyEuVdS-w,2547
|
|
||||||
markdown/extensions/legacy_em.py,sha256=9ZMGCTrFh01eiOpnFjS0jVkqgYXiTzCGn-eNvYcvObg,1579
|
|
||||||
markdown/extensions/md_in_html.py,sha256=ohSiGcgR5yBqusuTs0opbTO_5fq442fqPK-klFd_qaM,4040
|
|
||||||
markdown/extensions/meta.py,sha256=EUfkzM7l7UpH__Or9K3pl8ldVddwndlCZWA3d712RAE,2331
|
|
||||||
markdown/extensions/nl2br.py,sha256=wAqTNOuf2L1NzlEvEqoID70n9y-aiYaGLkuyQk3CD0w,783
|
|
||||||
markdown/extensions/sane_lists.py,sha256=ZQmCf-247KBexVG0fc62nDvokGkV6W1uavYbieNKSG4,1505
|
|
||||||
markdown/extensions/smarty.py,sha256=0padzkVCNACainKw-Xj1S5UfT0125VCTfNejmrCZItA,10238
|
|
||||||
markdown/extensions/tables.py,sha256=bicFx_wqhnEx6Y_8MJqA56rh71pt5fOe94oiWbvcobY,7685
|
|
||||||
markdown/extensions/toc.py,sha256=E-d3R4etcM_R2sQyTpKkejRv2NHrHPCvaXK9hUqfK58,13224
|
|
||||||
markdown/extensions/wikilinks.py,sha256=GkgT9BY7b1-qW--dIwFAhC9V20RoeF13b7CFdw_V21Q,2812
|
|
||||||
markdown/inlinepatterns.py,sha256=EnYq9aU_Hi1gu5e8dcbUxUu0mRz-pHFV79uGQCYbD5I,29378
|
|
||||||
markdown/pep562.py,sha256=5UkqT7sb-cQufgbOl_jF-RYUVVHS7VThzlMzR9vrd3I,8917
|
|
||||||
markdown/postprocessors.py,sha256=25g6qqpJ4kuiq4RBrGz8RA6GMb7ArUi1AN2VDVnR35U,3738
|
|
||||||
markdown/preprocessors.py,sha256=dsmMVPP2afKAZ0s59_mFidM_mCiNfgdBJ9aVDWu_viE,15323
|
|
||||||
markdown/serializers.py,sha256=_wQl-iJrPSUEQ4Q1owWYqN9qceVh6TOlAOH_i44BKAQ,6540
|
|
||||||
markdown/test_tools.py,sha256=zFHFzmtzjfMRroyyli3LY4SP8yLfLf4S7SsU3z7Z1SQ,6823
|
|
||||||
markdown/treeprocessors.py,sha256=NBaYc9TEGP7TBaN6YRROIqE5Lj-AMoAqp0jN-coGW3Q,15401
|
|
||||||
markdown/util.py,sha256=0ySktJgYplEV7g6TOOs8fatAS4Fi-6F7iv4D9Vw3g0c,15201
|
|
@ -1,6 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.33.4)
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py2-none-any
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
|||||||
[console_scripts]
|
|
||||||
markdown_py = markdown.__main__:run
|
|
||||||
|
|
||||||
[markdown.extensions]
|
|
||||||
abbr = markdown.extensions.abbr:AbbrExtension
|
|
||||||
admonition = markdown.extensions.admonition:AdmonitionExtension
|
|
||||||
attr_list = markdown.extensions.attr_list:AttrListExtension
|
|
||||||
codehilite = markdown.extensions.codehilite:CodeHiliteExtension
|
|
||||||
def_list = markdown.extensions.def_list:DefListExtension
|
|
||||||
extra = markdown.extensions.extra:ExtraExtension
|
|
||||||
fenced_code = markdown.extensions.fenced_code:FencedCodeExtension
|
|
||||||
footnotes = markdown.extensions.footnotes:FootnoteExtension
|
|
||||||
legacy_attrs = markdown.extensions.legacy_attrs:LegacyAttrExtension
|
|
||||||
legacy_em = markdown.extensions.legacy_em:LegacyEmExtension
|
|
||||||
md_in_html = markdown.extensions.md_in_html:MarkdownInHtmlExtension
|
|
||||||
meta = markdown.extensions.meta:MetaExtension
|
|
||||||
nl2br = markdown.extensions.nl2br:Nl2BrExtension
|
|
||||||
sane_lists = markdown.extensions.sane_lists:SaneListExtension
|
|
||||||
smarty = markdown.extensions.smarty:SmartyExtension
|
|
||||||
tables = markdown.extensions.tables:TableExtension
|
|
||||||
toc = markdown.extensions.toc:TocExtension
|
|
||||||
wikilinks = markdown.extensions.wikilinks:WikiLinkExtension
|
|
||||||
|
|
@ -1 +0,0 @@
|
|||||||
markdown
|
|
File diff suppressed because it is too large
Load Diff
@ -1,32 +0,0 @@
|
|||||||
# Copyright (C) AB Strakt
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
pyOpenSSL - A simple wrapper around the OpenSSL library
|
|
||||||
"""
|
|
||||||
|
|
||||||
from OpenSSL import SSL, crypto
|
|
||||||
from OpenSSL.version import (
|
|
||||||
__author__,
|
|
||||||
__copyright__,
|
|
||||||
__email__,
|
|
||||||
__license__,
|
|
||||||
__summary__,
|
|
||||||
__title__,
|
|
||||||
__uri__,
|
|
||||||
__version__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"SSL",
|
|
||||||
"crypto",
|
|
||||||
"__author__",
|
|
||||||
"__copyright__",
|
|
||||||
"__email__",
|
|
||||||
"__license__",
|
|
||||||
"__summary__",
|
|
||||||
"__title__",
|
|
||||||
"__uri__",
|
|
||||||
"__version__",
|
|
||||||
]
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,124 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
from typing import Any, Callable, NoReturn, Type, Union
|
|
||||||
|
|
||||||
from cryptography.hazmat.bindings.openssl.binding import Binding
|
|
||||||
|
|
||||||
StrOrBytesPath = Union[str, bytes, os.PathLike]
|
|
||||||
|
|
||||||
binding = Binding()
|
|
||||||
ffi = binding.ffi
|
|
||||||
lib = binding.lib
|
|
||||||
|
|
||||||
|
|
||||||
# This is a special CFFI allocator that does not bother to zero its memory
|
|
||||||
# after allocation. This has vastly better performance on large allocations and
|
|
||||||
# so should be used whenever we don't need the memory zeroed out.
|
|
||||||
no_zero_allocator = ffi.new_allocator(should_clear_after_alloc=False)
|
|
||||||
|
|
||||||
|
|
||||||
def text(charp: Any) -> str:
|
|
||||||
"""
|
|
||||||
Get a native string type representing of the given CFFI ``char*`` object.
|
|
||||||
|
|
||||||
:param charp: A C-style string represented using CFFI.
|
|
||||||
|
|
||||||
:return: :class:`str`
|
|
||||||
"""
|
|
||||||
if not charp:
|
|
||||||
return ""
|
|
||||||
return ffi.string(charp).decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def exception_from_error_queue(exception_type: Type[Exception]) -> NoReturn:
|
|
||||||
"""
|
|
||||||
Convert an OpenSSL library failure into a Python exception.
|
|
||||||
|
|
||||||
When a call to the native OpenSSL library fails, this is usually signalled
|
|
||||||
by the return value, and an error code is stored in an error queue
|
|
||||||
associated with the current thread. The err library provides functions to
|
|
||||||
obtain these error codes and textual error messages.
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
error = lib.ERR_get_error()
|
|
||||||
if error == 0:
|
|
||||||
break
|
|
||||||
errors.append(
|
|
||||||
(
|
|
||||||
text(lib.ERR_lib_error_string(error)),
|
|
||||||
text(lib.ERR_func_error_string(error)),
|
|
||||||
text(lib.ERR_reason_error_string(error)),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
raise exception_type(errors)
|
|
||||||
|
|
||||||
|
|
||||||
def make_assert(error: Type[Exception]) -> Callable[[bool], Any]:
|
|
||||||
"""
|
|
||||||
Create an assert function that uses :func:`exception_from_error_queue` to
|
|
||||||
raise an exception wrapped by *error*.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def openssl_assert(ok: bool) -> None:
|
|
||||||
"""
|
|
||||||
If *ok* is not True, retrieve the error from OpenSSL and raise it.
|
|
||||||
"""
|
|
||||||
if ok is not True:
|
|
||||||
exception_from_error_queue(error)
|
|
||||||
|
|
||||||
return openssl_assert
|
|
||||||
|
|
||||||
|
|
||||||
def path_bytes(s: StrOrBytesPath) -> bytes:
|
|
||||||
"""
|
|
||||||
Convert a Python path to a :py:class:`bytes` for the path which can be
|
|
||||||
passed into an OpenSSL API accepting a filename.
|
|
||||||
|
|
||||||
:param s: A path (valid for os.fspath).
|
|
||||||
|
|
||||||
:return: An instance of :py:class:`bytes`.
|
|
||||||
"""
|
|
||||||
b = os.fspath(s)
|
|
||||||
|
|
||||||
if isinstance(b, str):
|
|
||||||
return b.encode(sys.getfilesystemencoding())
|
|
||||||
else:
|
|
||||||
return b
|
|
||||||
|
|
||||||
|
|
||||||
def byte_string(s: str) -> bytes:
|
|
||||||
return s.encode("charmap")
|
|
||||||
|
|
||||||
|
|
||||||
# A marker object to observe whether some optional arguments are passed any
|
|
||||||
# value or not.
|
|
||||||
UNSPECIFIED = object()
|
|
||||||
|
|
||||||
_TEXT_WARNING = "str for {0} is no longer accepted, use bytes"
|
|
||||||
|
|
||||||
|
|
||||||
def text_to_bytes_and_warn(label: str, obj: Any) -> Any:
|
|
||||||
"""
|
|
||||||
If ``obj`` is text, emit a warning that it should be bytes instead and try
|
|
||||||
to convert it to bytes automatically.
|
|
||||||
|
|
||||||
:param str label: The name of the parameter from which ``obj`` was taken
|
|
||||||
(so a developer can easily find the source of the problem and correct
|
|
||||||
it).
|
|
||||||
|
|
||||||
:return: If ``obj`` is the text string type, a ``bytes`` object giving the
|
|
||||||
UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is
|
|
||||||
returned.
|
|
||||||
"""
|
|
||||||
if isinstance(obj, str):
|
|
||||||
warnings.warn(
|
|
||||||
_TEXT_WARNING.format(label),
|
|
||||||
category=DeprecationWarning,
|
|
||||||
stacklevel=3,
|
|
||||||
)
|
|
||||||
return obj.encode("utf-8")
|
|
||||||
return obj
|
|
File diff suppressed because it is too large
Load Diff
@ -1,44 +0,0 @@
|
|||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import ssl
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import cffi
|
|
||||||
|
|
||||||
import cryptography
|
|
||||||
|
|
||||||
import OpenSSL.SSL
|
|
||||||
|
|
||||||
from . import version
|
|
||||||
|
|
||||||
|
|
||||||
_env_info = """\
|
|
||||||
pyOpenSSL: {pyopenssl}
|
|
||||||
cryptography: {cryptography}
|
|
||||||
cffi: {cffi}
|
|
||||||
cryptography's compiled against OpenSSL: {crypto_openssl_compile}
|
|
||||||
cryptography's linked OpenSSL: {crypto_openssl_link}
|
|
||||||
Python's OpenSSL: {python_openssl}
|
|
||||||
Python executable: {python}
|
|
||||||
Python version: {python_version}
|
|
||||||
Platform: {platform}
|
|
||||||
sys.path: {sys_path}""".format(
|
|
||||||
pyopenssl=version.__version__,
|
|
||||||
crypto_openssl_compile=OpenSSL._util.ffi.string(
|
|
||||||
OpenSSL._util.lib.OPENSSL_VERSION_TEXT,
|
|
||||||
).decode("ascii"),
|
|
||||||
crypto_openssl_link=OpenSSL.SSL.SSLeay_version(
|
|
||||||
OpenSSL.SSL.SSLEAY_VERSION
|
|
||||||
).decode("ascii"),
|
|
||||||
python_openssl=getattr(ssl, "OPENSSL_VERSION", "n/a"),
|
|
||||||
cryptography=cryptography.__version__,
|
|
||||||
cffi=cffi.__version__,
|
|
||||||
python=sys.executable,
|
|
||||||
python_version=sys.version,
|
|
||||||
platform=sys.platform,
|
|
||||||
sys_path=sys.path,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
print(_env_info)
|
|
@ -1,40 +0,0 @@
|
|||||||
"""
|
|
||||||
PRNG management routines, thin wrappers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from OpenSSL._util import lib as _lib
|
|
||||||
|
|
||||||
|
|
||||||
def add(buffer: bytes, entropy: int) -> None:
|
|
||||||
"""
|
|
||||||
Mix bytes from *string* into the PRNG state.
|
|
||||||
|
|
||||||
The *entropy* argument is (the lower bound of) an estimate of how much
|
|
||||||
randomness is contained in *string*, measured in bytes.
|
|
||||||
|
|
||||||
For more information, see e.g. :rfc:`1750`.
|
|
||||||
|
|
||||||
This function is only relevant if you are forking Python processes and
|
|
||||||
need to reseed the CSPRNG after fork.
|
|
||||||
|
|
||||||
:param buffer: Buffer with random data.
|
|
||||||
:param entropy: The entropy (in bytes) measurement of the buffer.
|
|
||||||
|
|
||||||
:return: :obj:`None`
|
|
||||||
"""
|
|
||||||
if not isinstance(buffer, bytes):
|
|
||||||
raise TypeError("buffer must be a byte string")
|
|
||||||
|
|
||||||
if not isinstance(entropy, int):
|
|
||||||
raise TypeError("entropy must be an integer")
|
|
||||||
|
|
||||||
_lib.RAND_add(buffer, len(buffer), entropy)
|
|
||||||
|
|
||||||
|
|
||||||
def status() -> int:
|
|
||||||
"""
|
|
||||||
Check whether the PRNG has been seeded with enough data.
|
|
||||||
|
|
||||||
:return: 1 if the PRNG is seeded enough, 0 otherwise.
|
|
||||||
"""
|
|
||||||
return _lib.RAND_status()
|
|
@ -1,28 +0,0 @@
|
|||||||
# Copyright (C) AB Strakt
|
|
||||||
# Copyright (C) Jean-Paul Calderone
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
pyOpenSSL - A simple wrapper around the OpenSSL library
|
|
||||||
"""
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"__author__",
|
|
||||||
"__copyright__",
|
|
||||||
"__email__",
|
|
||||||
"__license__",
|
|
||||||
"__summary__",
|
|
||||||
"__title__",
|
|
||||||
"__uri__",
|
|
||||||
"__version__",
|
|
||||||
]
|
|
||||||
|
|
||||||
__version__ = "23.2.0"
|
|
||||||
|
|
||||||
__title__ = "pyOpenSSL"
|
|
||||||
__uri__ = "https://pyopenssl.org/"
|
|
||||||
__summary__ = "Python wrapper module around the OpenSSL library"
|
|
||||||
__author__ = "The pyOpenSSL developers"
|
|
||||||
__email__ = "cryptography-dev@python.org"
|
|
||||||
__license__ = "Apache License, Version 2.0"
|
|
||||||
__copyright__ = "Copyright 2001-2023 {0}".format(__author__)
|
|
@ -1,122 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# bitmap distribution font (bdf) file parser
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1996-05-16 fl created (as bdf2pil)
|
|
||||||
# 1997-08-25 fl converted to FontFile driver
|
|
||||||
# 2001-05-25 fl removed bogus __init__ call
|
|
||||||
# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev)
|
|
||||||
# 2003-04-22 fl more robustification (from Graham Dumpleton)
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1997-2003 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Parse X Bitmap Distribution Format (BDF)
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
from . import FontFile, Image
|
|
||||||
|
|
||||||
bdf_slant = {
|
|
||||||
"R": "Roman",
|
|
||||||
"I": "Italic",
|
|
||||||
"O": "Oblique",
|
|
||||||
"RI": "Reverse Italic",
|
|
||||||
"RO": "Reverse Oblique",
|
|
||||||
"OT": "Other",
|
|
||||||
}
|
|
||||||
|
|
||||||
bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"}
|
|
||||||
|
|
||||||
|
|
||||||
def bdf_char(f):
|
|
||||||
# skip to STARTCHAR
|
|
||||||
while True:
|
|
||||||
s = f.readline()
|
|
||||||
if not s:
|
|
||||||
return None
|
|
||||||
if s[:9] == b"STARTCHAR":
|
|
||||||
break
|
|
||||||
id = s[9:].strip().decode("ascii")
|
|
||||||
|
|
||||||
# load symbol properties
|
|
||||||
props = {}
|
|
||||||
while True:
|
|
||||||
s = f.readline()
|
|
||||||
if not s or s[:6] == b"BITMAP":
|
|
||||||
break
|
|
||||||
i = s.find(b" ")
|
|
||||||
props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
|
|
||||||
|
|
||||||
# load bitmap
|
|
||||||
bitmap = []
|
|
||||||
while True:
|
|
||||||
s = f.readline()
|
|
||||||
if not s or s[:7] == b"ENDCHAR":
|
|
||||||
break
|
|
||||||
bitmap.append(s[:-1])
|
|
||||||
bitmap = b"".join(bitmap)
|
|
||||||
|
|
||||||
# The word BBX
|
|
||||||
# followed by the width in x (BBw), height in y (BBh),
|
|
||||||
# and x and y displacement (BBxoff0, BByoff0)
|
|
||||||
# of the lower left corner from the origin of the character.
|
|
||||||
width, height, x_disp, y_disp = [int(p) for p in props["BBX"].split()]
|
|
||||||
|
|
||||||
# The word DWIDTH
|
|
||||||
# followed by the width in x and y of the character in device pixels.
|
|
||||||
dwx, dwy = [int(p) for p in props["DWIDTH"].split()]
|
|
||||||
|
|
||||||
bbox = (
|
|
||||||
(dwx, dwy),
|
|
||||||
(x_disp, -y_disp - height, width + x_disp, -y_disp),
|
|
||||||
(0, 0, width, height),
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
im = Image.frombytes("1", (width, height), bitmap, "hex", "1")
|
|
||||||
except ValueError:
|
|
||||||
# deal with zero-width characters
|
|
||||||
im = Image.new("1", (width, height))
|
|
||||||
|
|
||||||
return id, int(props["ENCODING"]), bbox, im
|
|
||||||
|
|
||||||
|
|
||||||
class BdfFontFile(FontFile.FontFile):
|
|
||||||
"""Font file plugin for the X11 BDF format."""
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
s = fp.readline()
|
|
||||||
if s[:13] != b"STARTFONT 2.1":
|
|
||||||
msg = "not a valid BDF file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
props = {}
|
|
||||||
comments = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
s = fp.readline()
|
|
||||||
if not s or s[:13] == b"ENDPROPERTIES":
|
|
||||||
break
|
|
||||||
i = s.find(b" ")
|
|
||||||
props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
|
|
||||||
if s[:i] in [b"COMMENT", b"COPYRIGHT"]:
|
|
||||||
if s.find(b"LogicalFontDescription") < 0:
|
|
||||||
comments.append(s[i + 1 : -1].decode("ascii"))
|
|
||||||
|
|
||||||
while True:
|
|
||||||
c = bdf_char(fp)
|
|
||||||
if not c:
|
|
||||||
break
|
|
||||||
id, ch, (xy, dst, src), im = c
|
|
||||||
if 0 <= ch < len(self.glyph):
|
|
||||||
self.glyph[ch] = xy, dst, src, im
|
|
@ -1,488 +0,0 @@
|
|||||||
"""
|
|
||||||
Blizzard Mipmap Format (.blp)
|
|
||||||
Jerome Leclanche <jerome@leclan.ch>
|
|
||||||
|
|
||||||
The contents of this file are hereby released in the public domain (CC0)
|
|
||||||
Full text of the CC0 license:
|
|
||||||
https://creativecommons.org/publicdomain/zero/1.0/
|
|
||||||
|
|
||||||
BLP1 files, used mostly in Warcraft III, are not fully supported.
|
|
||||||
All types of BLP2 files used in World of Warcraft are supported.
|
|
||||||
|
|
||||||
The BLP file structure consists of a header, up to 16 mipmaps of the
|
|
||||||
texture
|
|
||||||
|
|
||||||
Texture sizes must be powers of two, though the two dimensions do
|
|
||||||
not have to be equal; 512x256 is valid, but 512x200 is not.
|
|
||||||
The first mipmap (mipmap #0) is the full size image; each subsequent
|
|
||||||
mipmap halves both dimensions. The final mipmap should be 1x1.
|
|
||||||
|
|
||||||
BLP files come in many different flavours:
|
|
||||||
* JPEG-compressed (type == 0) - only supported for BLP1.
|
|
||||||
* RAW images (type == 1, encoding == 1). Each mipmap is stored as an
|
|
||||||
array of 8-bit values, one per pixel, left to right, top to bottom.
|
|
||||||
Each value is an index to the palette.
|
|
||||||
* DXT-compressed (type == 1, encoding == 2):
|
|
||||||
- DXT1 compression is used if alpha_encoding == 0.
|
|
||||||
- An additional alpha bit is used if alpha_depth == 1.
|
|
||||||
- DXT3 compression is used if alpha_encoding == 1.
|
|
||||||
- DXT5 compression is used if alpha_encoding == 7.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import struct
|
|
||||||
from enum import IntEnum
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._deprecate import deprecate
|
|
||||||
|
|
||||||
|
|
||||||
class Format(IntEnum):
|
|
||||||
JPEG = 0
|
|
||||||
|
|
||||||
|
|
||||||
class Encoding(IntEnum):
|
|
||||||
UNCOMPRESSED = 1
|
|
||||||
DXT = 2
|
|
||||||
UNCOMPRESSED_RAW_BGRA = 3
|
|
||||||
|
|
||||||
|
|
||||||
class AlphaEncoding(IntEnum):
|
|
||||||
DXT1 = 0
|
|
||||||
DXT3 = 1
|
|
||||||
DXT5 = 7
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name):
|
|
||||||
for enum, prefix in {
|
|
||||||
Format: "BLP_FORMAT_",
|
|
||||||
Encoding: "BLP_ENCODING_",
|
|
||||||
AlphaEncoding: "BLP_ALPHA_ENCODING_",
|
|
||||||
}.items():
|
|
||||||
if name.startswith(prefix):
|
|
||||||
name = name[len(prefix) :]
|
|
||||||
if name in enum.__members__:
|
|
||||||
deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}")
|
|
||||||
return enum[name]
|
|
||||||
msg = f"module '{__name__}' has no attribute '{name}'"
|
|
||||||
raise AttributeError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_565(i):
|
|
||||||
return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dxt1(data, alpha=False):
|
|
||||||
"""
|
|
||||||
input: one "row" of data (i.e. will produce 4*width pixels)
|
|
||||||
"""
|
|
||||||
|
|
||||||
blocks = len(data) // 8 # number of blocks in row
|
|
||||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
|
||||||
|
|
||||||
for block in range(blocks):
|
|
||||||
# Decode next 8-byte block.
|
|
||||||
idx = block * 8
|
|
||||||
color0, color1, bits = struct.unpack_from("<HHI", data, idx)
|
|
||||||
|
|
||||||
r0, g0, b0 = unpack_565(color0)
|
|
||||||
r1, g1, b1 = unpack_565(color1)
|
|
||||||
|
|
||||||
# Decode this block into 4x4 pixels
|
|
||||||
# Accumulate the results onto our 4 row accumulators
|
|
||||||
for j in range(4):
|
|
||||||
for i in range(4):
|
|
||||||
# get next control op and generate a pixel
|
|
||||||
|
|
||||||
control = bits & 3
|
|
||||||
bits = bits >> 2
|
|
||||||
|
|
||||||
a = 0xFF
|
|
||||||
if control == 0:
|
|
||||||
r, g, b = r0, g0, b0
|
|
||||||
elif control == 1:
|
|
||||||
r, g, b = r1, g1, b1
|
|
||||||
elif control == 2:
|
|
||||||
if color0 > color1:
|
|
||||||
r = (2 * r0 + r1) // 3
|
|
||||||
g = (2 * g0 + g1) // 3
|
|
||||||
b = (2 * b0 + b1) // 3
|
|
||||||
else:
|
|
||||||
r = (r0 + r1) // 2
|
|
||||||
g = (g0 + g1) // 2
|
|
||||||
b = (b0 + b1) // 2
|
|
||||||
elif control == 3:
|
|
||||||
if color0 > color1:
|
|
||||||
r = (2 * r1 + r0) // 3
|
|
||||||
g = (2 * g1 + g0) // 3
|
|
||||||
b = (2 * b1 + b0) // 3
|
|
||||||
else:
|
|
||||||
r, g, b, a = 0, 0, 0, 0
|
|
||||||
|
|
||||||
if alpha:
|
|
||||||
ret[j].extend([r, g, b, a])
|
|
||||||
else:
|
|
||||||
ret[j].extend([r, g, b])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dxt3(data):
|
|
||||||
"""
|
|
||||||
input: one "row" of data (i.e. will produce 4*width pixels)
|
|
||||||
"""
|
|
||||||
|
|
||||||
blocks = len(data) // 16 # number of blocks in row
|
|
||||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
|
||||||
|
|
||||||
for block in range(blocks):
|
|
||||||
idx = block * 16
|
|
||||||
block = data[idx : idx + 16]
|
|
||||||
# Decode next 16-byte block.
|
|
||||||
bits = struct.unpack_from("<8B", block)
|
|
||||||
color0, color1 = struct.unpack_from("<HH", block, 8)
|
|
||||||
|
|
||||||
(code,) = struct.unpack_from("<I", block, 12)
|
|
||||||
|
|
||||||
r0, g0, b0 = unpack_565(color0)
|
|
||||||
r1, g1, b1 = unpack_565(color1)
|
|
||||||
|
|
||||||
for j in range(4):
|
|
||||||
high = False # Do we want the higher bits?
|
|
||||||
for i in range(4):
|
|
||||||
alphacode_index = (4 * j + i) // 2
|
|
||||||
a = bits[alphacode_index]
|
|
||||||
if high:
|
|
||||||
high = False
|
|
||||||
a >>= 4
|
|
||||||
else:
|
|
||||||
high = True
|
|
||||||
a &= 0xF
|
|
||||||
a *= 17 # We get a value between 0 and 15
|
|
||||||
|
|
||||||
color_code = (code >> 2 * (4 * j + i)) & 0x03
|
|
||||||
|
|
||||||
if color_code == 0:
|
|
||||||
r, g, b = r0, g0, b0
|
|
||||||
elif color_code == 1:
|
|
||||||
r, g, b = r1, g1, b1
|
|
||||||
elif color_code == 2:
|
|
||||||
r = (2 * r0 + r1) // 3
|
|
||||||
g = (2 * g0 + g1) // 3
|
|
||||||
b = (2 * b0 + b1) // 3
|
|
||||||
elif color_code == 3:
|
|
||||||
r = (2 * r1 + r0) // 3
|
|
||||||
g = (2 * g1 + g0) // 3
|
|
||||||
b = (2 * b1 + b0) // 3
|
|
||||||
|
|
||||||
ret[j].extend([r, g, b, a])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dxt5(data):
|
|
||||||
"""
|
|
||||||
input: one "row" of data (i.e. will produce 4 * width pixels)
|
|
||||||
"""
|
|
||||||
|
|
||||||
blocks = len(data) // 16 # number of blocks in row
|
|
||||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
|
||||||
|
|
||||||
for block in range(blocks):
|
|
||||||
idx = block * 16
|
|
||||||
block = data[idx : idx + 16]
|
|
||||||
# Decode next 16-byte block.
|
|
||||||
a0, a1 = struct.unpack_from("<BB", block)
|
|
||||||
|
|
||||||
bits = struct.unpack_from("<6B", block, 2)
|
|
||||||
alphacode1 = bits[2] | (bits[3] << 8) | (bits[4] << 16) | (bits[5] << 24)
|
|
||||||
alphacode2 = bits[0] | (bits[1] << 8)
|
|
||||||
|
|
||||||
color0, color1 = struct.unpack_from("<HH", block, 8)
|
|
||||||
|
|
||||||
(code,) = struct.unpack_from("<I", block, 12)
|
|
||||||
|
|
||||||
r0, g0, b0 = unpack_565(color0)
|
|
||||||
r1, g1, b1 = unpack_565(color1)
|
|
||||||
|
|
||||||
for j in range(4):
|
|
||||||
for i in range(4):
|
|
||||||
# get next control op and generate a pixel
|
|
||||||
alphacode_index = 3 * (4 * j + i)
|
|
||||||
|
|
||||||
if alphacode_index <= 12:
|
|
||||||
alphacode = (alphacode2 >> alphacode_index) & 0x07
|
|
||||||
elif alphacode_index == 15:
|
|
||||||
alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06)
|
|
||||||
else: # alphacode_index >= 18 and alphacode_index <= 45
|
|
||||||
alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07
|
|
||||||
|
|
||||||
if alphacode == 0:
|
|
||||||
a = a0
|
|
||||||
elif alphacode == 1:
|
|
||||||
a = a1
|
|
||||||
elif a0 > a1:
|
|
||||||
a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7
|
|
||||||
elif alphacode == 6:
|
|
||||||
a = 0
|
|
||||||
elif alphacode == 7:
|
|
||||||
a = 255
|
|
||||||
else:
|
|
||||||
a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5
|
|
||||||
|
|
||||||
color_code = (code >> 2 * (4 * j + i)) & 0x03
|
|
||||||
|
|
||||||
if color_code == 0:
|
|
||||||
r, g, b = r0, g0, b0
|
|
||||||
elif color_code == 1:
|
|
||||||
r, g, b = r1, g1, b1
|
|
||||||
elif color_code == 2:
|
|
||||||
r = (2 * r0 + r1) // 3
|
|
||||||
g = (2 * g0 + g1) // 3
|
|
||||||
b = (2 * b0 + b1) // 3
|
|
||||||
elif color_code == 3:
|
|
||||||
r = (2 * r1 + r0) // 3
|
|
||||||
g = (2 * g1 + g0) // 3
|
|
||||||
b = (2 * b1 + b0) // 3
|
|
||||||
|
|
||||||
ret[j].extend([r, g, b, a])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class BLPFormatError(NotImplementedError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] in (b"BLP1", b"BLP2")
|
|
||||||
|
|
||||||
|
|
||||||
class BlpImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
Blizzard Mipmap Format
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "BLP"
|
|
||||||
format_description = "Blizzard Mipmap Format"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self.magic = self.fp.read(4)
|
|
||||||
|
|
||||||
self.fp.seek(5, os.SEEK_CUR)
|
|
||||||
(self._blp_alpha_depth,) = struct.unpack("<b", self.fp.read(1))
|
|
||||||
|
|
||||||
self.fp.seek(2, os.SEEK_CUR)
|
|
||||||
self._size = struct.unpack("<II", self.fp.read(8))
|
|
||||||
|
|
||||||
if self.magic in (b"BLP1", b"BLP2"):
|
|
||||||
decoder = self.magic.decode()
|
|
||||||
else:
|
|
||||||
msg = f"Bad BLP magic {repr(self.magic)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
self.mode = "RGBA" if self._blp_alpha_depth else "RGB"
|
|
||||||
self.tile = [(decoder, (0, 0) + self.size, 0, (self.mode, 0, 1))]
|
|
||||||
|
|
||||||
|
|
||||||
class _BLPBaseDecoder(ImageFile.PyDecoder):
|
|
||||||
_pulls_fd = True
|
|
||||||
|
|
||||||
def decode(self, buffer):
|
|
||||||
try:
|
|
||||||
self._read_blp_header()
|
|
||||||
self._load()
|
|
||||||
except struct.error as e:
|
|
||||||
msg = "Truncated BLP file"
|
|
||||||
raise OSError(msg) from e
|
|
||||||
return -1, 0
|
|
||||||
|
|
||||||
def _read_blp_header(self):
|
|
||||||
self.fd.seek(4)
|
|
||||||
(self._blp_compression,) = struct.unpack("<i", self._safe_read(4))
|
|
||||||
|
|
||||||
(self._blp_encoding,) = struct.unpack("<b", self._safe_read(1))
|
|
||||||
(self._blp_alpha_depth,) = struct.unpack("<b", self._safe_read(1))
|
|
||||||
(self._blp_alpha_encoding,) = struct.unpack("<b", self._safe_read(1))
|
|
||||||
self.fd.seek(1, os.SEEK_CUR) # mips
|
|
||||||
|
|
||||||
self.size = struct.unpack("<II", self._safe_read(8))
|
|
||||||
|
|
||||||
if isinstance(self, BLP1Decoder):
|
|
||||||
# Only present for BLP1
|
|
||||||
(self._blp_encoding,) = struct.unpack("<i", self._safe_read(4))
|
|
||||||
self.fd.seek(4, os.SEEK_CUR) # subtype
|
|
||||||
|
|
||||||
self._blp_offsets = struct.unpack("<16I", self._safe_read(16 * 4))
|
|
||||||
self._blp_lengths = struct.unpack("<16I", self._safe_read(16 * 4))
|
|
||||||
|
|
||||||
def _safe_read(self, length):
|
|
||||||
return ImageFile._safe_read(self.fd, length)
|
|
||||||
|
|
||||||
def _read_palette(self):
|
|
||||||
ret = []
|
|
||||||
for i in range(256):
|
|
||||||
try:
|
|
||||||
b, g, r, a = struct.unpack("<4B", self._safe_read(4))
|
|
||||||
except struct.error:
|
|
||||||
break
|
|
||||||
ret.append((b, g, r, a))
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def _read_bgra(self, palette):
|
|
||||||
data = bytearray()
|
|
||||||
_data = BytesIO(self._safe_read(self._blp_lengths[0]))
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
(offset,) = struct.unpack("<B", _data.read(1))
|
|
||||||
except struct.error:
|
|
||||||
break
|
|
||||||
b, g, r, a = palette[offset]
|
|
||||||
d = (r, g, b)
|
|
||||||
if self._blp_alpha_depth:
|
|
||||||
d += (a,)
|
|
||||||
data.extend(d)
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class BLP1Decoder(_BLPBaseDecoder):
|
|
||||||
def _load(self):
|
|
||||||
if self._blp_compression == Format.JPEG:
|
|
||||||
self._decode_jpeg_stream()
|
|
||||||
|
|
||||||
elif self._blp_compression == 1:
|
|
||||||
if self._blp_encoding in (4, 5):
|
|
||||||
palette = self._read_palette()
|
|
||||||
data = self._read_bgra(palette)
|
|
||||||
self.set_as_raw(bytes(data))
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BLP encoding {repr(self._blp_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BLP compression {repr(self._blp_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
def _decode_jpeg_stream(self):
|
|
||||||
from .JpegImagePlugin import JpegImageFile
|
|
||||||
|
|
||||||
(jpeg_header_size,) = struct.unpack("<I", self._safe_read(4))
|
|
||||||
jpeg_header = self._safe_read(jpeg_header_size)
|
|
||||||
self._safe_read(self._blp_offsets[0] - self.fd.tell()) # What IS this?
|
|
||||||
data = self._safe_read(self._blp_lengths[0])
|
|
||||||
data = jpeg_header + data
|
|
||||||
data = BytesIO(data)
|
|
||||||
image = JpegImageFile(data)
|
|
||||||
Image._decompression_bomb_check(image.size)
|
|
||||||
if image.mode == "CMYK":
|
|
||||||
decoder_name, extents, offset, args = image.tile[0]
|
|
||||||
image.tile = [(decoder_name, extents, offset, (args[0], "CMYK"))]
|
|
||||||
r, g, b = image.convert("RGB").split()
|
|
||||||
image = Image.merge("RGB", (b, g, r))
|
|
||||||
self.set_as_raw(image.tobytes())
|
|
||||||
|
|
||||||
|
|
||||||
class BLP2Decoder(_BLPBaseDecoder):
|
|
||||||
def _load(self):
|
|
||||||
palette = self._read_palette()
|
|
||||||
|
|
||||||
self.fd.seek(self._blp_offsets[0])
|
|
||||||
|
|
||||||
if self._blp_compression == 1:
|
|
||||||
# Uncompressed or DirectX compression
|
|
||||||
|
|
||||||
if self._blp_encoding == Encoding.UNCOMPRESSED:
|
|
||||||
data = self._read_bgra(palette)
|
|
||||||
|
|
||||||
elif self._blp_encoding == Encoding.DXT:
|
|
||||||
data = bytearray()
|
|
||||||
if self._blp_alpha_encoding == AlphaEncoding.DXT1:
|
|
||||||
linesize = (self.size[0] + 3) // 4 * 8
|
|
||||||
for yb in range((self.size[1] + 3) // 4):
|
|
||||||
for d in decode_dxt1(
|
|
||||||
self._safe_read(linesize), alpha=bool(self._blp_alpha_depth)
|
|
||||||
):
|
|
||||||
data += d
|
|
||||||
|
|
||||||
elif self._blp_alpha_encoding == AlphaEncoding.DXT3:
|
|
||||||
linesize = (self.size[0] + 3) // 4 * 16
|
|
||||||
for yb in range((self.size[1] + 3) // 4):
|
|
||||||
for d in decode_dxt3(self._safe_read(linesize)):
|
|
||||||
data += d
|
|
||||||
|
|
||||||
elif self._blp_alpha_encoding == AlphaEncoding.DXT5:
|
|
||||||
linesize = (self.size[0] + 3) // 4 * 16
|
|
||||||
for yb in range((self.size[1] + 3) // 4):
|
|
||||||
for d in decode_dxt5(self._safe_read(linesize)):
|
|
||||||
data += d
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported alpha encoding {repr(self._blp_alpha_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unknown BLP encoding {repr(self._blp_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
else:
|
|
||||||
msg = f"Unknown BLP compression {repr(self._blp_compression)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
self.set_as_raw(bytes(data))
|
|
||||||
|
|
||||||
|
|
||||||
class BLPEncoder(ImageFile.PyEncoder):
|
|
||||||
_pushes_fd = True
|
|
||||||
|
|
||||||
def _write_palette(self):
|
|
||||||
data = b""
|
|
||||||
palette = self.im.getpalette("RGBA", "RGBA")
|
|
||||||
for i in range(256):
|
|
||||||
r, g, b, a = palette[i * 4 : (i + 1) * 4]
|
|
||||||
data += struct.pack("<4B", b, g, r, a)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def encode(self, bufsize):
|
|
||||||
palette_data = self._write_palette()
|
|
||||||
|
|
||||||
offset = 20 + 16 * 4 * 2 + len(palette_data)
|
|
||||||
data = struct.pack("<16I", offset, *((0,) * 15))
|
|
||||||
|
|
||||||
w, h = self.im.size
|
|
||||||
data += struct.pack("<16I", w * h, *((0,) * 15))
|
|
||||||
|
|
||||||
data += palette_data
|
|
||||||
|
|
||||||
for y in range(h):
|
|
||||||
for x in range(w):
|
|
||||||
data += struct.pack("<B", self.im.getpixel((x, y)))
|
|
||||||
|
|
||||||
return len(data), 0, data
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename, save_all=False):
|
|
||||||
if im.mode != "P":
|
|
||||||
msg = "Unsupported BLP image mode"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
magic = b"BLP1" if im.encoderinfo.get("blp_version") == "BLP1" else b"BLP2"
|
|
||||||
fp.write(magic)
|
|
||||||
|
|
||||||
fp.write(struct.pack("<i", 1)) # Uncompressed or DirectX compression
|
|
||||||
fp.write(struct.pack("<b", Encoding.UNCOMPRESSED))
|
|
||||||
fp.write(struct.pack("<b", 1 if im.palette.mode == "RGBA" else 0))
|
|
||||||
fp.write(struct.pack("<b", 0)) # alpha encoding
|
|
||||||
fp.write(struct.pack("<b", 0)) # mips
|
|
||||||
fp.write(struct.pack("<II", *im.size))
|
|
||||||
if magic == b"BLP1":
|
|
||||||
fp.write(struct.pack("<i", 5))
|
|
||||||
fp.write(struct.pack("<i", 0))
|
|
||||||
|
|
||||||
ImageFile._save(im, fp, [("BLP", (0, 0) + im.size, 0, im.mode)])
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(BlpImageFile.format, BlpImageFile, _accept)
|
|
||||||
Image.register_extension(BlpImageFile.format, ".blp")
|
|
||||||
Image.register_decoder("BLP1", BLP1Decoder)
|
|
||||||
Image.register_decoder("BLP2", BLP2Decoder)
|
|
||||||
|
|
||||||
Image.register_save(BlpImageFile.format, _save)
|
|
||||||
Image.register_encoder("BLP", BLPEncoder)
|
|
@ -1,471 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# BMP file handler
|
|
||||||
#
|
|
||||||
# Windows (and OS/2) native bitmap storage format.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1995-09-01 fl Created
|
|
||||||
# 1996-04-30 fl Added save
|
|
||||||
# 1997-08-27 fl Fixed save of 1-bit images
|
|
||||||
# 1998-03-06 fl Load P images as L where possible
|
|
||||||
# 1998-07-03 fl Load P images as 1 where possible
|
|
||||||
# 1998-12-29 fl Handle small palettes
|
|
||||||
# 2002-12-30 fl Fixed load of 1-bit palette images
|
|
||||||
# 2003-04-21 fl Fixed load of 1-bit monochrome images
|
|
||||||
# 2003-04-23 fl Added limited support for BI_BITFIELDS compression
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB
|
|
||||||
# Copyright (c) 1995-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from . import Image, ImageFile, ImagePalette
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._binary import o8
|
|
||||||
from ._binary import o16le as o16
|
|
||||||
from ._binary import o32le as o32
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Read BMP file
|
|
||||||
|
|
||||||
BIT2MODE = {
|
|
||||||
# bits => mode, rawmode
|
|
||||||
1: ("P", "P;1"),
|
|
||||||
4: ("P", "P;4"),
|
|
||||||
8: ("P", "P"),
|
|
||||||
16: ("RGB", "BGR;15"),
|
|
||||||
24: ("RGB", "BGR"),
|
|
||||||
32: ("RGB", "BGRX"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:2] == b"BM"
|
|
||||||
|
|
||||||
|
|
||||||
def _dib_accept(prefix):
|
|
||||||
return i32(prefix) in [12, 40, 64, 108, 124]
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Image plugin for the Windows BMP format.
|
|
||||||
# =============================================================================
|
|
||||||
class BmpImageFile(ImageFile.ImageFile):
|
|
||||||
"""Image plugin for the Windows Bitmap format (BMP)"""
|
|
||||||
|
|
||||||
# ------------------------------------------------------------- Description
|
|
||||||
format_description = "Windows Bitmap"
|
|
||||||
format = "BMP"
|
|
||||||
|
|
||||||
# -------------------------------------------------- BMP Compression values
|
|
||||||
COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5}
|
|
||||||
for k, v in COMPRESSIONS.items():
|
|
||||||
vars()[k] = v
|
|
||||||
|
|
||||||
def _bitmap(self, header=0, offset=0):
|
|
||||||
"""Read relevant info about the BMP"""
|
|
||||||
read, seek = self.fp.read, self.fp.seek
|
|
||||||
if header:
|
|
||||||
seek(header)
|
|
||||||
# read bmp header size @offset 14 (this is part of the header size)
|
|
||||||
file_info = {"header_size": i32(read(4)), "direction": -1}
|
|
||||||
|
|
||||||
# -------------------- If requested, read header at a specific position
|
|
||||||
# read the rest of the bmp header, without its size
|
|
||||||
header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4)
|
|
||||||
|
|
||||||
# -------------------------------------------------- IBM OS/2 Bitmap v1
|
|
||||||
# ----- This format has different offsets because of width/height types
|
|
||||||
if file_info["header_size"] == 12:
|
|
||||||
file_info["width"] = i16(header_data, 0)
|
|
||||||
file_info["height"] = i16(header_data, 2)
|
|
||||||
file_info["planes"] = i16(header_data, 4)
|
|
||||||
file_info["bits"] = i16(header_data, 6)
|
|
||||||
file_info["compression"] = self.RAW
|
|
||||||
file_info["palette_padding"] = 3
|
|
||||||
|
|
||||||
# --------------------------------------------- Windows Bitmap v2 to v5
|
|
||||||
# v3, OS/2 v2, v4, v5
|
|
||||||
elif file_info["header_size"] in (40, 64, 108, 124):
|
|
||||||
file_info["y_flip"] = header_data[7] == 0xFF
|
|
||||||
file_info["direction"] = 1 if file_info["y_flip"] else -1
|
|
||||||
file_info["width"] = i32(header_data, 0)
|
|
||||||
file_info["height"] = (
|
|
||||||
i32(header_data, 4)
|
|
||||||
if not file_info["y_flip"]
|
|
||||||
else 2**32 - i32(header_data, 4)
|
|
||||||
)
|
|
||||||
file_info["planes"] = i16(header_data, 8)
|
|
||||||
file_info["bits"] = i16(header_data, 10)
|
|
||||||
file_info["compression"] = i32(header_data, 12)
|
|
||||||
# byte size of pixel data
|
|
||||||
file_info["data_size"] = i32(header_data, 16)
|
|
||||||
file_info["pixels_per_meter"] = (
|
|
||||||
i32(header_data, 20),
|
|
||||||
i32(header_data, 24),
|
|
||||||
)
|
|
||||||
file_info["colors"] = i32(header_data, 28)
|
|
||||||
file_info["palette_padding"] = 4
|
|
||||||
self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"])
|
|
||||||
if file_info["compression"] == self.BITFIELDS:
|
|
||||||
if len(header_data) >= 52:
|
|
||||||
for idx, mask in enumerate(
|
|
||||||
["r_mask", "g_mask", "b_mask", "a_mask"]
|
|
||||||
):
|
|
||||||
file_info[mask] = i32(header_data, 36 + idx * 4)
|
|
||||||
else:
|
|
||||||
# 40 byte headers only have the three components in the
|
|
||||||
# bitfields masks, ref:
|
|
||||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
|
|
||||||
# See also
|
|
||||||
# https://github.com/python-pillow/Pillow/issues/1293
|
|
||||||
# There is a 4th component in the RGBQuad, in the alpha
|
|
||||||
# location, but it is listed as a reserved component,
|
|
||||||
# and it is not generally an alpha channel
|
|
||||||
file_info["a_mask"] = 0x0
|
|
||||||
for mask in ["r_mask", "g_mask", "b_mask"]:
|
|
||||||
file_info[mask] = i32(read(4))
|
|
||||||
file_info["rgb_mask"] = (
|
|
||||||
file_info["r_mask"],
|
|
||||||
file_info["g_mask"],
|
|
||||||
file_info["b_mask"],
|
|
||||||
)
|
|
||||||
file_info["rgba_mask"] = (
|
|
||||||
file_info["r_mask"],
|
|
||||||
file_info["g_mask"],
|
|
||||||
file_info["b_mask"],
|
|
||||||
file_info["a_mask"],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BMP header type ({file_info['header_size']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# ------------------ Special case : header is reported 40, which
|
|
||||||
# ---------------------- is shorter than real size for bpp >= 16
|
|
||||||
self._size = file_info["width"], file_info["height"]
|
|
||||||
|
|
||||||
# ------- If color count was not found in the header, compute from bits
|
|
||||||
file_info["colors"] = (
|
|
||||||
file_info["colors"]
|
|
||||||
if file_info.get("colors", 0)
|
|
||||||
else (1 << file_info["bits"])
|
|
||||||
)
|
|
||||||
if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8:
|
|
||||||
offset += 4 * file_info["colors"]
|
|
||||||
|
|
||||||
# ---------------------- Check bit depth for unusual unsupported values
|
|
||||||
self.mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None))
|
|
||||||
if self.mode is None:
|
|
||||||
msg = f"Unsupported BMP pixel depth ({file_info['bits']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# ---------------- Process BMP with Bitfields compression (not palette)
|
|
||||||
decoder_name = "raw"
|
|
||||||
if file_info["compression"] == self.BITFIELDS:
|
|
||||||
SUPPORTED = {
|
|
||||||
32: [
|
|
||||||
(0xFF0000, 0xFF00, 0xFF, 0x0),
|
|
||||||
(0xFF000000, 0xFF0000, 0xFF00, 0x0),
|
|
||||||
(0xFF000000, 0xFF0000, 0xFF00, 0xFF),
|
|
||||||
(0xFF, 0xFF00, 0xFF0000, 0xFF000000),
|
|
||||||
(0xFF0000, 0xFF00, 0xFF, 0xFF000000),
|
|
||||||
(0x0, 0x0, 0x0, 0x0),
|
|
||||||
],
|
|
||||||
24: [(0xFF0000, 0xFF00, 0xFF)],
|
|
||||||
16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)],
|
|
||||||
}
|
|
||||||
MASK_MODES = {
|
|
||||||
(32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX",
|
|
||||||
(32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR",
|
|
||||||
(32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR",
|
|
||||||
(32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA",
|
|
||||||
(32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA",
|
|
||||||
(32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
|
|
||||||
(24, (0xFF0000, 0xFF00, 0xFF)): "BGR",
|
|
||||||
(16, (0xF800, 0x7E0, 0x1F)): "BGR;16",
|
|
||||||
(16, (0x7C00, 0x3E0, 0x1F)): "BGR;15",
|
|
||||||
}
|
|
||||||
if file_info["bits"] in SUPPORTED:
|
|
||||||
if (
|
|
||||||
file_info["bits"] == 32
|
|
||||||
and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]]
|
|
||||||
):
|
|
||||||
raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])]
|
|
||||||
self.mode = "RGBA" if "A" in raw_mode else self.mode
|
|
||||||
elif (
|
|
||||||
file_info["bits"] in (24, 16)
|
|
||||||
and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]]
|
|
||||||
):
|
|
||||||
raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])]
|
|
||||||
else:
|
|
||||||
msg = "Unsupported BMP bitfields layout"
|
|
||||||
raise OSError(msg)
|
|
||||||
else:
|
|
||||||
msg = "Unsupported BMP bitfields layout"
|
|
||||||
raise OSError(msg)
|
|
||||||
elif file_info["compression"] == self.RAW:
|
|
||||||
if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset
|
|
||||||
raw_mode, self.mode = "BGRA", "RGBA"
|
|
||||||
elif file_info["compression"] in (self.RLE8, self.RLE4):
|
|
||||||
decoder_name = "bmp_rle"
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BMP compression ({file_info['compression']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# --------------- Once the header is processed, process the palette/LUT
|
|
||||||
if self.mode == "P": # Paletted for 1, 4 and 8 bit images
|
|
||||||
# ---------------------------------------------------- 1-bit images
|
|
||||||
if not (0 < file_info["colors"] <= 65536):
|
|
||||||
msg = f"Unsupported BMP Palette size ({file_info['colors']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
else:
|
|
||||||
padding = file_info["palette_padding"]
|
|
||||||
palette = read(padding * file_info["colors"])
|
|
||||||
greyscale = True
|
|
||||||
indices = (
|
|
||||||
(0, 255)
|
|
||||||
if file_info["colors"] == 2
|
|
||||||
else list(range(file_info["colors"]))
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------- Check if greyscale and ignore palette if so
|
|
||||||
for ind, val in enumerate(indices):
|
|
||||||
rgb = palette[ind * padding : ind * padding + 3]
|
|
||||||
if rgb != o8(val) * 3:
|
|
||||||
greyscale = False
|
|
||||||
|
|
||||||
# ------- If all colors are grey, white or black, ditch palette
|
|
||||||
if greyscale:
|
|
||||||
self.mode = "1" if file_info["colors"] == 2 else "L"
|
|
||||||
raw_mode = self.mode
|
|
||||||
else:
|
|
||||||
self.mode = "P"
|
|
||||||
self.palette = ImagePalette.raw(
|
|
||||||
"BGRX" if padding == 4 else "BGR", palette
|
|
||||||
)
|
|
||||||
|
|
||||||
# ---------------------------- Finally set the tile data for the plugin
|
|
||||||
self.info["compression"] = file_info["compression"]
|
|
||||||
args = [raw_mode]
|
|
||||||
if decoder_name == "bmp_rle":
|
|
||||||
args.append(file_info["compression"] == self.RLE4)
|
|
||||||
else:
|
|
||||||
args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3))
|
|
||||||
args.append(file_info["direction"])
|
|
||||||
self.tile = [
|
|
||||||
(
|
|
||||||
decoder_name,
|
|
||||||
(0, 0, file_info["width"], file_info["height"]),
|
|
||||||
offset or self.fp.tell(),
|
|
||||||
tuple(args),
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
"""Open file, check magic number and read header"""
|
|
||||||
# read 14 bytes: magic number, filesize, reserved, header final offset
|
|
||||||
head_data = self.fp.read(14)
|
|
||||||
# choke if the file does not have the required magic bytes
|
|
||||||
if not _accept(head_data):
|
|
||||||
msg = "Not a BMP file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
# read the start position of the BMP image data (u32)
|
|
||||||
offset = i32(head_data, 10)
|
|
||||||
# load bitmap information (offset=raster info)
|
|
||||||
self._bitmap(offset=offset)
|
|
||||||
|
|
||||||
|
|
||||||
class BmpRleDecoder(ImageFile.PyDecoder):
|
|
||||||
_pulls_fd = True
|
|
||||||
|
|
||||||
def decode(self, buffer):
|
|
||||||
rle4 = self.args[1]
|
|
||||||
data = bytearray()
|
|
||||||
x = 0
|
|
||||||
while len(data) < self.state.xsize * self.state.ysize:
|
|
||||||
pixels = self.fd.read(1)
|
|
||||||
byte = self.fd.read(1)
|
|
||||||
if not pixels or not byte:
|
|
||||||
break
|
|
||||||
num_pixels = pixels[0]
|
|
||||||
if num_pixels:
|
|
||||||
# encoded mode
|
|
||||||
if x + num_pixels > self.state.xsize:
|
|
||||||
# Too much data for row
|
|
||||||
num_pixels = max(0, self.state.xsize - x)
|
|
||||||
if rle4:
|
|
||||||
first_pixel = o8(byte[0] >> 4)
|
|
||||||
second_pixel = o8(byte[0] & 0x0F)
|
|
||||||
for index in range(num_pixels):
|
|
||||||
if index % 2 == 0:
|
|
||||||
data += first_pixel
|
|
||||||
else:
|
|
||||||
data += second_pixel
|
|
||||||
else:
|
|
||||||
data += byte * num_pixels
|
|
||||||
x += num_pixels
|
|
||||||
else:
|
|
||||||
if byte[0] == 0:
|
|
||||||
# end of line
|
|
||||||
while len(data) % self.state.xsize != 0:
|
|
||||||
data += b"\x00"
|
|
||||||
x = 0
|
|
||||||
elif byte[0] == 1:
|
|
||||||
# end of bitmap
|
|
||||||
break
|
|
||||||
elif byte[0] == 2:
|
|
||||||
# delta
|
|
||||||
bytes_read = self.fd.read(2)
|
|
||||||
if len(bytes_read) < 2:
|
|
||||||
break
|
|
||||||
right, up = self.fd.read(2)
|
|
||||||
data += b"\x00" * (right + up * self.state.xsize)
|
|
||||||
x = len(data) % self.state.xsize
|
|
||||||
else:
|
|
||||||
# absolute mode
|
|
||||||
if rle4:
|
|
||||||
# 2 pixels per byte
|
|
||||||
byte_count = byte[0] // 2
|
|
||||||
bytes_read = self.fd.read(byte_count)
|
|
||||||
for byte_read in bytes_read:
|
|
||||||
data += o8(byte_read >> 4)
|
|
||||||
data += o8(byte_read & 0x0F)
|
|
||||||
else:
|
|
||||||
byte_count = byte[0]
|
|
||||||
bytes_read = self.fd.read(byte_count)
|
|
||||||
data += bytes_read
|
|
||||||
if len(bytes_read) < byte_count:
|
|
||||||
break
|
|
||||||
x += byte[0]
|
|
||||||
|
|
||||||
# align to 16-bit word boundary
|
|
||||||
if self.fd.tell() % 2 != 0:
|
|
||||||
self.fd.seek(1, os.SEEK_CUR)
|
|
||||||
rawmode = "L" if self.mode == "L" else "P"
|
|
||||||
self.set_as_raw(bytes(data), (rawmode, 0, self.args[-1]))
|
|
||||||
return -1, 0
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Image plugin for the DIB format (BMP alias)
|
|
||||||
# =============================================================================
|
|
||||||
class DibImageFile(BmpImageFile):
|
|
||||||
format = "DIB"
|
|
||||||
format_description = "Windows Bitmap"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self._bitmap()
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Write BMP file
|
|
||||||
|
|
||||||
|
|
||||||
SAVE = {
|
|
||||||
"1": ("1", 1, 2),
|
|
||||||
"L": ("L", 8, 256),
|
|
||||||
"P": ("P", 8, 256),
|
|
||||||
"RGB": ("BGR", 24, 0),
|
|
||||||
"RGBA": ("BGRA", 32, 0),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _dib_save(im, fp, filename):
|
|
||||||
_save(im, fp, filename, False)
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename, bitmap_header=True):
|
|
||||||
try:
|
|
||||||
rawmode, bits, colors = SAVE[im.mode]
|
|
||||||
except KeyError as e:
|
|
||||||
msg = f"cannot write mode {im.mode} as BMP"
|
|
||||||
raise OSError(msg) from e
|
|
||||||
|
|
||||||
info = im.encoderinfo
|
|
||||||
|
|
||||||
dpi = info.get("dpi", (96, 96))
|
|
||||||
|
|
||||||
# 1 meter == 39.3701 inches
|
|
||||||
ppm = tuple(map(lambda x: int(x * 39.3701 + 0.5), dpi))
|
|
||||||
|
|
||||||
stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3)
|
|
||||||
header = 40 # or 64 for OS/2 version 2
|
|
||||||
image = stride * im.size[1]
|
|
||||||
|
|
||||||
if im.mode == "1":
|
|
||||||
palette = b"".join(o8(i) * 4 for i in (0, 255))
|
|
||||||
elif im.mode == "L":
|
|
||||||
palette = b"".join(o8(i) * 4 for i in range(256))
|
|
||||||
elif im.mode == "P":
|
|
||||||
palette = im.im.getpalette("RGB", "BGRX")
|
|
||||||
colors = len(palette) // 4
|
|
||||||
else:
|
|
||||||
palette = None
|
|
||||||
|
|
||||||
# bitmap header
|
|
||||||
if bitmap_header:
|
|
||||||
offset = 14 + header + colors * 4
|
|
||||||
file_size = offset + image
|
|
||||||
if file_size > 2**32 - 1:
|
|
||||||
msg = "File size is too large for the BMP format"
|
|
||||||
raise ValueError(msg)
|
|
||||||
fp.write(
|
|
||||||
b"BM" # file type (magic)
|
|
||||||
+ o32(file_size) # file size
|
|
||||||
+ o32(0) # reserved
|
|
||||||
+ o32(offset) # image data offset
|
|
||||||
)
|
|
||||||
|
|
||||||
# bitmap info header
|
|
||||||
fp.write(
|
|
||||||
o32(header) # info header size
|
|
||||||
+ o32(im.size[0]) # width
|
|
||||||
+ o32(im.size[1]) # height
|
|
||||||
+ o16(1) # planes
|
|
||||||
+ o16(bits) # depth
|
|
||||||
+ o32(0) # compression (0=uncompressed)
|
|
||||||
+ o32(image) # size of bitmap
|
|
||||||
+ o32(ppm[0]) # resolution
|
|
||||||
+ o32(ppm[1]) # resolution
|
|
||||||
+ o32(colors) # colors used
|
|
||||||
+ o32(colors) # colors important
|
|
||||||
)
|
|
||||||
|
|
||||||
fp.write(b"\0" * (header - 40)) # padding (for OS/2 format)
|
|
||||||
|
|
||||||
if palette:
|
|
||||||
fp.write(palette)
|
|
||||||
|
|
||||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))])
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(BmpImageFile.format, BmpImageFile, _accept)
|
|
||||||
Image.register_save(BmpImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(BmpImageFile.format, ".bmp")
|
|
||||||
|
|
||||||
Image.register_mime(BmpImageFile.format, "image/bmp")
|
|
||||||
|
|
||||||
Image.register_decoder("bmp_rle", BmpRleDecoder)
|
|
||||||
|
|
||||||
Image.register_open(DibImageFile.format, DibImageFile, _dib_accept)
|
|
||||||
Image.register_save(DibImageFile.format, _dib_save)
|
|
||||||
|
|
||||||
Image.register_extension(DibImageFile.format, ".dib")
|
|
||||||
|
|
||||||
Image.register_mime(DibImageFile.format, "image/bmp")
|
|
@ -1,73 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# BUFR stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 1996-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific BUFR image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Image adapter
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC"
|
|
||||||
|
|
||||||
|
|
||||||
class BufrStubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = "BUFR"
|
|
||||||
format_description = "BUFR"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
if not _accept(self.fp.read(4)):
|
|
||||||
msg = "Not a BUFR file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
# make something up
|
|
||||||
self.mode = "F"
|
|
||||||
self._size = 1, 1
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if _handler is None or not hasattr(_handler, "save"):
|
|
||||||
msg = "BUFR save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
_handler.save(im, fp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept)
|
|
||||||
Image.register_save(BufrStubImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(BufrStubImageFile.format, ".bufr")
|
|
@ -1,120 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# a class to read from a container file
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-06-18 fl Created
|
|
||||||
# 1995-09-07 fl Added readline(), readlines()
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2001 by Secret Labs AB
|
|
||||||
# Copyright (c) 1995 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
import io
|
|
||||||
|
|
||||||
|
|
||||||
class ContainerIO:
|
|
||||||
"""
|
|
||||||
A file object that provides read access to a part of an existing
|
|
||||||
file (for example a TAR file).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, file, offset, length):
|
|
||||||
"""
|
|
||||||
Create file object.
|
|
||||||
|
|
||||||
:param file: Existing file.
|
|
||||||
:param offset: Start of region, in bytes.
|
|
||||||
:param length: Size of region, in bytes.
|
|
||||||
"""
|
|
||||||
self.fh = file
|
|
||||||
self.pos = 0
|
|
||||||
self.offset = offset
|
|
||||||
self.length = length
|
|
||||||
self.fh.seek(offset)
|
|
||||||
|
|
||||||
##
|
|
||||||
# Always false.
|
|
||||||
|
|
||||||
def isatty(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def seek(self, offset, mode=io.SEEK_SET):
|
|
||||||
"""
|
|
||||||
Move file pointer.
|
|
||||||
|
|
||||||
:param offset: Offset in bytes.
|
|
||||||
:param mode: Starting position. Use 0 for beginning of region, 1
|
|
||||||
for current offset, and 2 for end of region. You cannot move
|
|
||||||
the pointer outside the defined region.
|
|
||||||
"""
|
|
||||||
if mode == 1:
|
|
||||||
self.pos = self.pos + offset
|
|
||||||
elif mode == 2:
|
|
||||||
self.pos = self.length + offset
|
|
||||||
else:
|
|
||||||
self.pos = offset
|
|
||||||
# clamp
|
|
||||||
self.pos = max(0, min(self.pos, self.length))
|
|
||||||
self.fh.seek(self.offset + self.pos)
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
"""
|
|
||||||
Get current file pointer.
|
|
||||||
|
|
||||||
:returns: Offset from start of region, in bytes.
|
|
||||||
"""
|
|
||||||
return self.pos
|
|
||||||
|
|
||||||
def read(self, n=0):
|
|
||||||
"""
|
|
||||||
Read data.
|
|
||||||
|
|
||||||
:param n: Number of bytes to read. If omitted or zero,
|
|
||||||
read until end of region.
|
|
||||||
:returns: An 8-bit string.
|
|
||||||
"""
|
|
||||||
if n:
|
|
||||||
n = min(n, self.length - self.pos)
|
|
||||||
else:
|
|
||||||
n = self.length - self.pos
|
|
||||||
if not n: # EOF
|
|
||||||
return b"" if "b" in self.fh.mode else ""
|
|
||||||
self.pos = self.pos + n
|
|
||||||
return self.fh.read(n)
|
|
||||||
|
|
||||||
def readline(self):
|
|
||||||
"""
|
|
||||||
Read a line of text.
|
|
||||||
|
|
||||||
:returns: An 8-bit string.
|
|
||||||
"""
|
|
||||||
s = b"" if "b" in self.fh.mode else ""
|
|
||||||
newline_character = b"\n" if "b" in self.fh.mode else "\n"
|
|
||||||
while True:
|
|
||||||
c = self.read(1)
|
|
||||||
if not c:
|
|
||||||
break
|
|
||||||
s = s + c
|
|
||||||
if c == newline_character:
|
|
||||||
break
|
|
||||||
return s
|
|
||||||
|
|
||||||
def readlines(self):
|
|
||||||
"""
|
|
||||||
Read multiple lines of text.
|
|
||||||
|
|
||||||
:returns: A list of 8-bit strings.
|
|
||||||
"""
|
|
||||||
lines = []
|
|
||||||
while True:
|
|
||||||
s = self.readline()
|
|
||||||
if not s:
|
|
||||||
break
|
|
||||||
lines.append(s)
|
|
||||||
return lines
|
|
@ -1,75 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# Windows Cursor support for PIL
|
|
||||||
#
|
|
||||||
# notes:
|
|
||||||
# uses BmpImagePlugin.py to read the bitmap data.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 96-05-27 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from . import BmpImagePlugin, Image
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"\0\0\2\0"
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Windows Cursor files.
|
|
||||||
|
|
||||||
|
|
||||||
class CurImageFile(BmpImagePlugin.BmpImageFile):
|
|
||||||
format = "CUR"
|
|
||||||
format_description = "Windows Cursor"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
# check magic
|
|
||||||
s = self.fp.read(6)
|
|
||||||
if not _accept(s):
|
|
||||||
msg = "not a CUR file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# pick the largest cursor in the file
|
|
||||||
m = b""
|
|
||||||
for i in range(i16(s, 4)):
|
|
||||||
s = self.fp.read(16)
|
|
||||||
if not m:
|
|
||||||
m = s
|
|
||||||
elif s[0] > m[0] and s[1] > m[1]:
|
|
||||||
m = s
|
|
||||||
if not m:
|
|
||||||
msg = "No cursors were found"
|
|
||||||
raise TypeError(msg)
|
|
||||||
|
|
||||||
# load as bitmap
|
|
||||||
self._bitmap(i32(m, 12) + offset)
|
|
||||||
|
|
||||||
# patch up the bitmap height
|
|
||||||
self._size = self.size[0], self.size[1] // 2
|
|
||||||
d, e, o, a = self.tile[0]
|
|
||||||
self.tile[0] = d, (0, 0) + self.size, o, a
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
Image.register_open(CurImageFile.format, CurImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extension(CurImageFile.format, ".cur")
|
|
@ -1,79 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# DCX file handling
|
|
||||||
#
|
|
||||||
# DCX is a container file format defined by Intel, commonly used
|
|
||||||
# for fax applications. Each DCX file consists of a directory
|
|
||||||
# (a list of file offsets) followed by a set of (usually 1-bit)
|
|
||||||
# PCX files.
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-09-09 fl Created
|
|
||||||
# 1996-03-20 fl Properly derived from PcxImageFile.
|
|
||||||
# 1998-07-15 fl Renamed offset attribute to avoid name clash
|
|
||||||
# 2002-07-30 fl Fixed file handling
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-98 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1995-96 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from .PcxImagePlugin import PcxImageFile
|
|
||||||
|
|
||||||
MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then?
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return len(prefix) >= 4 and i32(prefix) == MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the Intel DCX format.
|
|
||||||
|
|
||||||
|
|
||||||
class DcxImageFile(PcxImageFile):
|
|
||||||
format = "DCX"
|
|
||||||
format_description = "Intel DCX"
|
|
||||||
_close_exclusive_fp_after_loading = False
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# Header
|
|
||||||
s = self.fp.read(4)
|
|
||||||
if not _accept(s):
|
|
||||||
msg = "not a DCX file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# Component directory
|
|
||||||
self._offset = []
|
|
||||||
for i in range(1024):
|
|
||||||
offset = i32(self.fp.read(4))
|
|
||||||
if not offset:
|
|
||||||
break
|
|
||||||
self._offset.append(offset)
|
|
||||||
|
|
||||||
self._fp = self.fp
|
|
||||||
self.frame = None
|
|
||||||
self.n_frames = len(self._offset)
|
|
||||||
self.is_animated = self.n_frames > 1
|
|
||||||
self.seek(0)
|
|
||||||
|
|
||||||
def seek(self, frame):
|
|
||||||
if not self._seek_check(frame):
|
|
||||||
return
|
|
||||||
self.frame = frame
|
|
||||||
self.fp = self._fp
|
|
||||||
self.fp.seek(self._offset[frame])
|
|
||||||
PcxImageFile._open(self)
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
return self.frame
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(DcxImageFile.format, DcxImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extension(DcxImageFile.format, ".dcx")
|
|
@ -1,291 +0,0 @@
|
|||||||
"""
|
|
||||||
A Pillow loader for .dds files (S3TC-compressed aka DXTC)
|
|
||||||
Jerome Leclanche <jerome@leclan.ch>
|
|
||||||
|
|
||||||
Documentation:
|
|
||||||
https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt
|
|
||||||
|
|
||||||
The contents of this file are hereby released in the public domain (CC0)
|
|
||||||
Full text of the CC0 license:
|
|
||||||
https://creativecommons.org/publicdomain/zero/1.0/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import o32le as o32
|
|
||||||
|
|
||||||
# Magic ("DDS ")
|
|
||||||
DDS_MAGIC = 0x20534444
|
|
||||||
|
|
||||||
# DDS flags
|
|
||||||
DDSD_CAPS = 0x1
|
|
||||||
DDSD_HEIGHT = 0x2
|
|
||||||
DDSD_WIDTH = 0x4
|
|
||||||
DDSD_PITCH = 0x8
|
|
||||||
DDSD_PIXELFORMAT = 0x1000
|
|
||||||
DDSD_MIPMAPCOUNT = 0x20000
|
|
||||||
DDSD_LINEARSIZE = 0x80000
|
|
||||||
DDSD_DEPTH = 0x800000
|
|
||||||
|
|
||||||
# DDS caps
|
|
||||||
DDSCAPS_COMPLEX = 0x8
|
|
||||||
DDSCAPS_TEXTURE = 0x1000
|
|
||||||
DDSCAPS_MIPMAP = 0x400000
|
|
||||||
|
|
||||||
DDSCAPS2_CUBEMAP = 0x200
|
|
||||||
DDSCAPS2_CUBEMAP_POSITIVEX = 0x400
|
|
||||||
DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800
|
|
||||||
DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000
|
|
||||||
DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000
|
|
||||||
DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000
|
|
||||||
DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000
|
|
||||||
DDSCAPS2_VOLUME = 0x200000
|
|
||||||
|
|
||||||
# Pixel Format
|
|
||||||
DDPF_ALPHAPIXELS = 0x1
|
|
||||||
DDPF_ALPHA = 0x2
|
|
||||||
DDPF_FOURCC = 0x4
|
|
||||||
DDPF_PALETTEINDEXED8 = 0x20
|
|
||||||
DDPF_RGB = 0x40
|
|
||||||
DDPF_LUMINANCE = 0x20000
|
|
||||||
|
|
||||||
|
|
||||||
# dds.h
|
|
||||||
|
|
||||||
DDS_FOURCC = DDPF_FOURCC
|
|
||||||
DDS_RGB = DDPF_RGB
|
|
||||||
DDS_RGBA = DDPF_RGB | DDPF_ALPHAPIXELS
|
|
||||||
DDS_LUMINANCE = DDPF_LUMINANCE
|
|
||||||
DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS
|
|
||||||
DDS_ALPHA = DDPF_ALPHA
|
|
||||||
DDS_PAL8 = DDPF_PALETTEINDEXED8
|
|
||||||
|
|
||||||
DDS_HEADER_FLAGS_TEXTURE = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT
|
|
||||||
DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT
|
|
||||||
DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH
|
|
||||||
DDS_HEADER_FLAGS_PITCH = DDSD_PITCH
|
|
||||||
DDS_HEADER_FLAGS_LINEARSIZE = DDSD_LINEARSIZE
|
|
||||||
|
|
||||||
DDS_HEIGHT = DDSD_HEIGHT
|
|
||||||
DDS_WIDTH = DDSD_WIDTH
|
|
||||||
|
|
||||||
DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS_TEXTURE
|
|
||||||
DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS_COMPLEX | DDSCAPS_MIPMAP
|
|
||||||
DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS_COMPLEX
|
|
||||||
|
|
||||||
DDS_CUBEMAP_POSITIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX
|
|
||||||
DDS_CUBEMAP_NEGATIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX
|
|
||||||
DDS_CUBEMAP_POSITIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY
|
|
||||||
DDS_CUBEMAP_NEGATIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY
|
|
||||||
DDS_CUBEMAP_POSITIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ
|
|
||||||
DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ
|
|
||||||
|
|
||||||
|
|
||||||
# DXT1
|
|
||||||
DXT1_FOURCC = 0x31545844
|
|
||||||
|
|
||||||
# DXT3
|
|
||||||
DXT3_FOURCC = 0x33545844
|
|
||||||
|
|
||||||
# DXT5
|
|
||||||
DXT5_FOURCC = 0x35545844
|
|
||||||
|
|
||||||
|
|
||||||
# dxgiformat.h
|
|
||||||
|
|
||||||
DXGI_FORMAT_R8G8B8A8_TYPELESS = 27
|
|
||||||
DXGI_FORMAT_R8G8B8A8_UNORM = 28
|
|
||||||
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = 29
|
|
||||||
DXGI_FORMAT_BC5_TYPELESS = 82
|
|
||||||
DXGI_FORMAT_BC5_UNORM = 83
|
|
||||||
DXGI_FORMAT_BC5_SNORM = 84
|
|
||||||
DXGI_FORMAT_BC6H_UF16 = 95
|
|
||||||
DXGI_FORMAT_BC6H_SF16 = 96
|
|
||||||
DXGI_FORMAT_BC7_TYPELESS = 97
|
|
||||||
DXGI_FORMAT_BC7_UNORM = 98
|
|
||||||
DXGI_FORMAT_BC7_UNORM_SRGB = 99
|
|
||||||
|
|
||||||
|
|
||||||
class DdsImageFile(ImageFile.ImageFile):
|
|
||||||
format = "DDS"
|
|
||||||
format_description = "DirectDraw Surface"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
if not _accept(self.fp.read(4)):
|
|
||||||
msg = "not a DDS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
(header_size,) = struct.unpack("<I", self.fp.read(4))
|
|
||||||
if header_size != 124:
|
|
||||||
msg = f"Unsupported header size {repr(header_size)}"
|
|
||||||
raise OSError(msg)
|
|
||||||
header_bytes = self.fp.read(header_size - 4)
|
|
||||||
if len(header_bytes) != 120:
|
|
||||||
msg = f"Incomplete header: {len(header_bytes)} bytes"
|
|
||||||
raise OSError(msg)
|
|
||||||
header = BytesIO(header_bytes)
|
|
||||||
|
|
||||||
flags, height, width = struct.unpack("<3I", header.read(12))
|
|
||||||
self._size = (width, height)
|
|
||||||
self.mode = "RGBA"
|
|
||||||
|
|
||||||
pitch, depth, mipmaps = struct.unpack("<3I", header.read(12))
|
|
||||||
struct.unpack("<11I", header.read(44)) # reserved
|
|
||||||
|
|
||||||
# pixel format
|
|
||||||
pfsize, pfflags = struct.unpack("<2I", header.read(8))
|
|
||||||
fourcc = header.read(4)
|
|
||||||
(bitcount,) = struct.unpack("<I", header.read(4))
|
|
||||||
masks = struct.unpack("<4I", header.read(16))
|
|
||||||
if pfflags & DDPF_LUMINANCE:
|
|
||||||
# Texture contains uncompressed L or LA data
|
|
||||||
if pfflags & DDPF_ALPHAPIXELS:
|
|
||||||
self.mode = "LA"
|
|
||||||
else:
|
|
||||||
self.mode = "L"
|
|
||||||
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, 0, (self.mode, 0, 1))]
|
|
||||||
elif pfflags & DDPF_RGB:
|
|
||||||
# Texture contains uncompressed RGB data
|
|
||||||
masks = {mask: ["R", "G", "B", "A"][i] for i, mask in enumerate(masks)}
|
|
||||||
rawmode = ""
|
|
||||||
if pfflags & DDPF_ALPHAPIXELS:
|
|
||||||
rawmode += masks[0xFF000000]
|
|
||||||
else:
|
|
||||||
self.mode = "RGB"
|
|
||||||
rawmode += masks[0xFF0000] + masks[0xFF00] + masks[0xFF]
|
|
||||||
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, 0, (rawmode[::-1], 0, 1))]
|
|
||||||
else:
|
|
||||||
data_start = header_size + 4
|
|
||||||
n = 0
|
|
||||||
if fourcc == b"DXT1":
|
|
||||||
self.pixel_format = "DXT1"
|
|
||||||
n = 1
|
|
||||||
elif fourcc == b"DXT3":
|
|
||||||
self.pixel_format = "DXT3"
|
|
||||||
n = 2
|
|
||||||
elif fourcc == b"DXT5":
|
|
||||||
self.pixel_format = "DXT5"
|
|
||||||
n = 3
|
|
||||||
elif fourcc == b"ATI1":
|
|
||||||
self.pixel_format = "BC4"
|
|
||||||
n = 4
|
|
||||||
self.mode = "L"
|
|
||||||
elif fourcc == b"ATI2":
|
|
||||||
self.pixel_format = "BC5"
|
|
||||||
n = 5
|
|
||||||
self.mode = "RGB"
|
|
||||||
elif fourcc == b"BC5S":
|
|
||||||
self.pixel_format = "BC5S"
|
|
||||||
n = 5
|
|
||||||
self.mode = "RGB"
|
|
||||||
elif fourcc == b"DX10":
|
|
||||||
data_start += 20
|
|
||||||
# ignoring flags which pertain to volume textures and cubemaps
|
|
||||||
(dxgi_format,) = struct.unpack("<I", self.fp.read(4))
|
|
||||||
self.fp.read(16)
|
|
||||||
if dxgi_format in (DXGI_FORMAT_BC5_TYPELESS, DXGI_FORMAT_BC5_UNORM):
|
|
||||||
self.pixel_format = "BC5"
|
|
||||||
n = 5
|
|
||||||
self.mode = "RGB"
|
|
||||||
elif dxgi_format == DXGI_FORMAT_BC5_SNORM:
|
|
||||||
self.pixel_format = "BC5S"
|
|
||||||
n = 5
|
|
||||||
self.mode = "RGB"
|
|
||||||
elif dxgi_format == DXGI_FORMAT_BC6H_UF16:
|
|
||||||
self.pixel_format = "BC6H"
|
|
||||||
n = 6
|
|
||||||
self.mode = "RGB"
|
|
||||||
elif dxgi_format == DXGI_FORMAT_BC6H_SF16:
|
|
||||||
self.pixel_format = "BC6HS"
|
|
||||||
n = 6
|
|
||||||
self.mode = "RGB"
|
|
||||||
elif dxgi_format in (DXGI_FORMAT_BC7_TYPELESS, DXGI_FORMAT_BC7_UNORM):
|
|
||||||
self.pixel_format = "BC7"
|
|
||||||
n = 7
|
|
||||||
elif dxgi_format == DXGI_FORMAT_BC7_UNORM_SRGB:
|
|
||||||
self.pixel_format = "BC7"
|
|
||||||
self.info["gamma"] = 1 / 2.2
|
|
||||||
n = 7
|
|
||||||
elif dxgi_format in (
|
|
||||||
DXGI_FORMAT_R8G8B8A8_TYPELESS,
|
|
||||||
DXGI_FORMAT_R8G8B8A8_UNORM,
|
|
||||||
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB,
|
|
||||||
):
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, 0, ("RGBA", 0, 1))]
|
|
||||||
if dxgi_format == DXGI_FORMAT_R8G8B8A8_UNORM_SRGB:
|
|
||||||
self.info["gamma"] = 1 / 2.2
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
msg = f"Unimplemented DXGI format {dxgi_format}"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unimplemented pixel format {repr(fourcc)}"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
self.tile = [
|
|
||||||
("bcn", (0, 0) + self.size, data_start, (n, self.pixel_format))
|
|
||||||
]
|
|
||||||
|
|
||||||
def load_seek(self, pos):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if im.mode not in ("RGB", "RGBA", "L", "LA"):
|
|
||||||
msg = f"cannot write mode {im.mode} as DDS"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
rawmode = im.mode
|
|
||||||
masks = [0xFF0000, 0xFF00, 0xFF]
|
|
||||||
if im.mode in ("L", "LA"):
|
|
||||||
pixel_flags = DDPF_LUMINANCE
|
|
||||||
else:
|
|
||||||
pixel_flags = DDPF_RGB
|
|
||||||
rawmode = rawmode[::-1]
|
|
||||||
if im.mode in ("LA", "RGBA"):
|
|
||||||
pixel_flags |= DDPF_ALPHAPIXELS
|
|
||||||
masks.append(0xFF000000)
|
|
||||||
|
|
||||||
bitcount = len(masks) * 8
|
|
||||||
while len(masks) < 4:
|
|
||||||
masks.append(0)
|
|
||||||
|
|
||||||
fp.write(
|
|
||||||
o32(DDS_MAGIC)
|
|
||||||
+ o32(124) # header size
|
|
||||||
+ o32(
|
|
||||||
DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PITCH | DDSD_PIXELFORMAT
|
|
||||||
) # flags
|
|
||||||
+ o32(im.height)
|
|
||||||
+ o32(im.width)
|
|
||||||
+ o32((im.width * bitcount + 7) // 8) # pitch
|
|
||||||
+ o32(0) # depth
|
|
||||||
+ o32(0) # mipmaps
|
|
||||||
+ o32(0) * 11 # reserved
|
|
||||||
+ o32(32) # pfsize
|
|
||||||
+ o32(pixel_flags) # pfflags
|
|
||||||
+ o32(0) # fourcc
|
|
||||||
+ o32(bitcount) # bitcount
|
|
||||||
+ b"".join(o32(mask) for mask in masks) # rgbabitmask
|
|
||||||
+ o32(DDSCAPS_TEXTURE) # dwCaps
|
|
||||||
+ o32(0) # dwCaps2
|
|
||||||
+ o32(0) # dwCaps3
|
|
||||||
+ o32(0) # dwCaps4
|
|
||||||
+ o32(0) # dwReserved2
|
|
||||||
)
|
|
||||||
if im.mode == "RGBA":
|
|
||||||
r, g, b, a = im.split()
|
|
||||||
im = Image.merge("RGBA", (a, r, g, b))
|
|
||||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"DDS "
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(DdsImageFile.format, DdsImageFile, _accept)
|
|
||||||
Image.register_save(DdsImageFile.format, _save)
|
|
||||||
Image.register_extension(DdsImageFile.format, ".dds")
|
|
@ -1,460 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# EPS file handling
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-09-01 fl Created (0.1)
|
|
||||||
# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2)
|
|
||||||
# 1996-08-22 fl Don't choke on floating point BoundingBox values
|
|
||||||
# 1996-08-23 fl Handle files from Macintosh (0.3)
|
|
||||||
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
|
|
||||||
# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5)
|
|
||||||
# 2014-05-07 e Handling of EPS with binary preview and fixed resolution
|
|
||||||
# resizing
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1995-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._deprecate import deprecate
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$")
|
|
||||||
field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$")
|
|
||||||
|
|
||||||
gs_windows_binary = None
|
|
||||||
if sys.platform.startswith("win"):
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
for binary in ("gswin32c", "gswin64c", "gs"):
|
|
||||||
if shutil.which(binary) is not None:
|
|
||||||
gs_windows_binary = binary
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
gs_windows_binary = False
|
|
||||||
|
|
||||||
|
|
||||||
def has_ghostscript():
|
|
||||||
if gs_windows_binary:
|
|
||||||
return True
|
|
||||||
if not sys.platform.startswith("win"):
|
|
||||||
try:
|
|
||||||
subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL)
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
# No Ghostscript
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def Ghostscript(tile, size, fp, scale=1, transparency=False):
|
|
||||||
"""Render an image using Ghostscript"""
|
|
||||||
|
|
||||||
# Unpack decoder tile
|
|
||||||
decoder, tile, offset, data = tile[0]
|
|
||||||
length, bbox = data
|
|
||||||
|
|
||||||
# Hack to support hi-res rendering
|
|
||||||
scale = int(scale) or 1
|
|
||||||
# orig_size = size
|
|
||||||
# orig_bbox = bbox
|
|
||||||
size = (size[0] * scale, size[1] * scale)
|
|
||||||
# resolution is dependent on bbox and size
|
|
||||||
res = (
|
|
||||||
72.0 * size[0] / (bbox[2] - bbox[0]),
|
|
||||||
72.0 * size[1] / (bbox[3] - bbox[1]),
|
|
||||||
)
|
|
||||||
|
|
||||||
out_fd, outfile = tempfile.mkstemp()
|
|
||||||
os.close(out_fd)
|
|
||||||
|
|
||||||
infile_temp = None
|
|
||||||
if hasattr(fp, "name") and os.path.exists(fp.name):
|
|
||||||
infile = fp.name
|
|
||||||
else:
|
|
||||||
in_fd, infile_temp = tempfile.mkstemp()
|
|
||||||
os.close(in_fd)
|
|
||||||
infile = infile_temp
|
|
||||||
|
|
||||||
# Ignore length and offset!
|
|
||||||
# Ghostscript can read it
|
|
||||||
# Copy whole file to read in Ghostscript
|
|
||||||
with open(infile_temp, "wb") as f:
|
|
||||||
# fetch length of fp
|
|
||||||
fp.seek(0, io.SEEK_END)
|
|
||||||
fsize = fp.tell()
|
|
||||||
# ensure start position
|
|
||||||
# go back
|
|
||||||
fp.seek(0)
|
|
||||||
lengthfile = fsize
|
|
||||||
while lengthfile > 0:
|
|
||||||
s = fp.read(min(lengthfile, 100 * 1024))
|
|
||||||
if not s:
|
|
||||||
break
|
|
||||||
lengthfile -= len(s)
|
|
||||||
f.write(s)
|
|
||||||
|
|
||||||
device = "pngalpha" if transparency else "ppmraw"
|
|
||||||
|
|
||||||
# Build Ghostscript command
|
|
||||||
command = [
|
|
||||||
"gs",
|
|
||||||
"-q", # quiet mode
|
|
||||||
"-g%dx%d" % size, # set output geometry (pixels)
|
|
||||||
"-r%fx%f" % res, # set input DPI (dots per inch)
|
|
||||||
"-dBATCH", # exit after processing
|
|
||||||
"-dNOPAUSE", # don't pause between pages
|
|
||||||
"-dSAFER", # safe mode
|
|
||||||
f"-sDEVICE={device}",
|
|
||||||
f"-sOutputFile={outfile}", # output file
|
|
||||||
# adjust for image origin
|
|
||||||
"-c",
|
|
||||||
f"{-bbox[0]} {-bbox[1]} translate",
|
|
||||||
"-f",
|
|
||||||
infile, # input file
|
|
||||||
# showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272)
|
|
||||||
"-c",
|
|
||||||
"showpage",
|
|
||||||
]
|
|
||||||
|
|
||||||
if gs_windows_binary is not None:
|
|
||||||
if not gs_windows_binary:
|
|
||||||
msg = "Unable to locate Ghostscript on paths"
|
|
||||||
raise OSError(msg)
|
|
||||||
command[0] = gs_windows_binary
|
|
||||||
|
|
||||||
# push data through Ghostscript
|
|
||||||
try:
|
|
||||||
startupinfo = None
|
|
||||||
if sys.platform.startswith("win"):
|
|
||||||
startupinfo = subprocess.STARTUPINFO()
|
|
||||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
|
||||||
subprocess.check_call(command, startupinfo=startupinfo)
|
|
||||||
out_im = Image.open(outfile)
|
|
||||||
out_im.load()
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.unlink(outfile)
|
|
||||||
if infile_temp:
|
|
||||||
os.unlink(infile_temp)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
im = out_im.im.copy()
|
|
||||||
out_im.close()
|
|
||||||
return im
|
|
||||||
|
|
||||||
|
|
||||||
class PSFile:
|
|
||||||
"""
|
|
||||||
Wrapper for bytesio object that treats either CR or LF as end of line.
|
|
||||||
This class is no longer used internally, but kept for backwards compatibility.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
deprecate(
|
|
||||||
"PSFile",
|
|
||||||
11,
|
|
||||||
action="If you need the functionality of this class "
|
|
||||||
"you will need to implement it yourself.",
|
|
||||||
)
|
|
||||||
self.fp = fp
|
|
||||||
self.char = None
|
|
||||||
|
|
||||||
def seek(self, offset, whence=io.SEEK_SET):
|
|
||||||
self.char = None
|
|
||||||
self.fp.seek(offset, whence)
|
|
||||||
|
|
||||||
def readline(self):
|
|
||||||
s = [self.char or b""]
|
|
||||||
self.char = None
|
|
||||||
|
|
||||||
c = self.fp.read(1)
|
|
||||||
while (c not in b"\r\n") and len(c):
|
|
||||||
s.append(c)
|
|
||||||
c = self.fp.read(1)
|
|
||||||
|
|
||||||
self.char = self.fp.read(1)
|
|
||||||
# line endings can be 1 or 2 of \r \n, in either order
|
|
||||||
if self.char in b"\r\n":
|
|
||||||
self.char = None
|
|
||||||
|
|
||||||
return b"".join(s).decode("latin-1")
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Encapsulated PostScript. This plugin supports only
|
|
||||||
# a few variants of this format.
|
|
||||||
|
|
||||||
|
|
||||||
class EpsImageFile(ImageFile.ImageFile):
|
|
||||||
"""EPS File Parser for the Python Imaging Library"""
|
|
||||||
|
|
||||||
format = "EPS"
|
|
||||||
format_description = "Encapsulated Postscript"
|
|
||||||
|
|
||||||
mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"}
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
(length, offset) = self._find_offset(self.fp)
|
|
||||||
|
|
||||||
# go to offset - start of "%!PS"
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
self.mode = "RGB"
|
|
||||||
self._size = None
|
|
||||||
|
|
||||||
byte_arr = bytearray(255)
|
|
||||||
bytes_mv = memoryview(byte_arr)
|
|
||||||
bytes_read = 0
|
|
||||||
reading_comments = True
|
|
||||||
|
|
||||||
def check_required_header_comments():
|
|
||||||
if "PS-Adobe" not in self.info:
|
|
||||||
msg = 'EPS header missing "%!PS-Adobe" comment'
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
if "BoundingBox" not in self.info:
|
|
||||||
msg = 'EPS header missing "%%BoundingBox" comment'
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
byte = self.fp.read(1)
|
|
||||||
if byte == b"":
|
|
||||||
# if we didn't read a byte we must be at the end of the file
|
|
||||||
if bytes_read == 0:
|
|
||||||
break
|
|
||||||
elif byte in b"\r\n":
|
|
||||||
# if we read a line ending character, ignore it and parse what
|
|
||||||
# we have already read. if we haven't read any other characters,
|
|
||||||
# continue reading
|
|
||||||
if bytes_read == 0:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
# ASCII/hexadecimal lines in an EPS file must not exceed
|
|
||||||
# 255 characters, not including line ending characters
|
|
||||||
if bytes_read >= 255:
|
|
||||||
# only enforce this for lines starting with a "%",
|
|
||||||
# otherwise assume it's binary data
|
|
||||||
if byte_arr[0] == ord("%"):
|
|
||||||
msg = "not an EPS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
else:
|
|
||||||
if reading_comments:
|
|
||||||
check_required_header_comments()
|
|
||||||
reading_comments = False
|
|
||||||
# reset bytes_read so we can keep reading
|
|
||||||
# data until the end of the line
|
|
||||||
bytes_read = 0
|
|
||||||
byte_arr[bytes_read] = byte[0]
|
|
||||||
bytes_read += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if reading_comments:
|
|
||||||
# Load EPS header
|
|
||||||
|
|
||||||
# if this line doesn't start with a "%",
|
|
||||||
# or does start with "%%EndComments",
|
|
||||||
# then we've reached the end of the header/comments
|
|
||||||
if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments":
|
|
||||||
check_required_header_comments()
|
|
||||||
reading_comments = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
s = str(bytes_mv[:bytes_read], "latin-1")
|
|
||||||
|
|
||||||
try:
|
|
||||||
m = split.match(s)
|
|
||||||
except re.error as e:
|
|
||||||
msg = "not an EPS file"
|
|
||||||
raise SyntaxError(msg) from e
|
|
||||||
|
|
||||||
if m:
|
|
||||||
k, v = m.group(1, 2)
|
|
||||||
self.info[k] = v
|
|
||||||
if k == "BoundingBox":
|
|
||||||
try:
|
|
||||||
# Note: The DSC spec says that BoundingBox
|
|
||||||
# fields should be integers, but some drivers
|
|
||||||
# put floating point values there anyway.
|
|
||||||
box = [int(float(i)) for i in v.split()]
|
|
||||||
self._size = box[2] - box[0], box[3] - box[1]
|
|
||||||
self.tile = [
|
|
||||||
("eps", (0, 0) + self.size, offset, (length, box))
|
|
||||||
]
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
m = field.match(s)
|
|
||||||
if m:
|
|
||||||
k = m.group(1)
|
|
||||||
if k[:8] == "PS-Adobe":
|
|
||||||
self.info["PS-Adobe"] = k[9:]
|
|
||||||
else:
|
|
||||||
self.info[k] = ""
|
|
||||||
elif s[0] == "%":
|
|
||||||
# handle non-DSC PostScript comments that some
|
|
||||||
# tools mistakenly put in the Comments section
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
msg = "bad EPS header"
|
|
||||||
raise OSError(msg)
|
|
||||||
elif bytes_mv[:11] == b"%ImageData:":
|
|
||||||
# Check for an "ImageData" descriptor
|
|
||||||
# https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096
|
|
||||||
|
|
||||||
# Values:
|
|
||||||
# columns
|
|
||||||
# rows
|
|
||||||
# bit depth (1 or 8)
|
|
||||||
# mode (1: L, 2: LAB, 3: RGB, 4: CMYK)
|
|
||||||
# number of padding channels
|
|
||||||
# block size (number of bytes per row per channel)
|
|
||||||
# binary/ascii (1: binary, 2: ascii)
|
|
||||||
# data start identifier (the image data follows after a single line
|
|
||||||
# consisting only of this quoted value)
|
|
||||||
image_data_values = byte_arr[11:bytes_read].split(None, 7)
|
|
||||||
columns, rows, bit_depth, mode_id = [
|
|
||||||
int(value) for value in image_data_values[:4]
|
|
||||||
]
|
|
||||||
|
|
||||||
if bit_depth == 1:
|
|
||||||
self.mode = "1"
|
|
||||||
elif bit_depth == 8:
|
|
||||||
try:
|
|
||||||
self.mode = self.mode_map[mode_id]
|
|
||||||
except ValueError:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
self._size = columns, rows
|
|
||||||
return
|
|
||||||
|
|
||||||
bytes_read = 0
|
|
||||||
|
|
||||||
check_required_header_comments()
|
|
||||||
|
|
||||||
if not self._size:
|
|
||||||
self._size = 1, 1 # errors if this isn't set. why (1,1)?
|
|
||||||
msg = "cannot determine EPS bounding box"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
def _find_offset(self, fp):
|
|
||||||
s = fp.read(4)
|
|
||||||
|
|
||||||
if s == b"%!PS":
|
|
||||||
# for HEAD without binary preview
|
|
||||||
fp.seek(0, io.SEEK_END)
|
|
||||||
length = fp.tell()
|
|
||||||
offset = 0
|
|
||||||
elif i32(s) == 0xC6D3D0C5:
|
|
||||||
# FIX for: Some EPS file not handled correctly / issue #302
|
|
||||||
# EPS can contain binary data
|
|
||||||
# or start directly with latin coding
|
|
||||||
# more info see:
|
|
||||||
# https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf
|
|
||||||
s = fp.read(8)
|
|
||||||
offset = i32(s)
|
|
||||||
length = i32(s, 4)
|
|
||||||
else:
|
|
||||||
msg = "not an EPS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
return length, offset
|
|
||||||
|
|
||||||
def load(self, scale=1, transparency=False):
|
|
||||||
# Load EPS via Ghostscript
|
|
||||||
if self.tile:
|
|
||||||
self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency)
|
|
||||||
self.mode = self.im.mode
|
|
||||||
self._size = self.im.size
|
|
||||||
self.tile = []
|
|
||||||
return Image.Image.load(self)
|
|
||||||
|
|
||||||
def load_seek(self, *args, **kwargs):
|
|
||||||
# we can't incrementally load, so force ImageFile.parser to
|
|
||||||
# use our custom load method by defining this method.
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename, eps=1):
|
|
||||||
"""EPS Writer for the Python Imaging Library."""
|
|
||||||
|
|
||||||
# make sure image data is available
|
|
||||||
im.load()
|
|
||||||
|
|
||||||
# determine PostScript image mode
|
|
||||||
if im.mode == "L":
|
|
||||||
operator = (8, 1, b"image")
|
|
||||||
elif im.mode == "RGB":
|
|
||||||
operator = (8, 3, b"false 3 colorimage")
|
|
||||||
elif im.mode == "CMYK":
|
|
||||||
operator = (8, 4, b"false 4 colorimage")
|
|
||||||
else:
|
|
||||||
msg = "image mode is not supported"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
if eps:
|
|
||||||
# write EPS header
|
|
||||||
fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n")
|
|
||||||
fp.write(b"%%Creator: PIL 0.1 EpsEncode\n")
|
|
||||||
# fp.write("%%CreationDate: %s"...)
|
|
||||||
fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size)
|
|
||||||
fp.write(b"%%Pages: 1\n")
|
|
||||||
fp.write(b"%%EndComments\n")
|
|
||||||
fp.write(b"%%Page: 1 1\n")
|
|
||||||
fp.write(b"%%ImageData: %d %d " % im.size)
|
|
||||||
fp.write(b'%d %d 0 1 1 "%s"\n' % operator)
|
|
||||||
|
|
||||||
# image header
|
|
||||||
fp.write(b"gsave\n")
|
|
||||||
fp.write(b"10 dict begin\n")
|
|
||||||
fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1]))
|
|
||||||
fp.write(b"%d %d scale\n" % im.size)
|
|
||||||
fp.write(b"%d %d 8\n" % im.size) # <= bits
|
|
||||||
fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
|
|
||||||
fp.write(b"{ currentfile buf readhexstring pop } bind\n")
|
|
||||||
fp.write(operator[2] + b"\n")
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
ImageFile._save(im, fp, [("eps", (0, 0) + im.size, 0, None)])
|
|
||||||
|
|
||||||
fp.write(b"\n%%%%EndBinary\n")
|
|
||||||
fp.write(b"grestore end\n")
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(EpsImageFile.format, EpsImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_save(EpsImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extensions(EpsImageFile.format, [".ps", ".eps"])
|
|
||||||
|
|
||||||
Image.register_mime(EpsImageFile.format, "application/postscript")
|
|
@ -1,380 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# EXIF tags
|
|
||||||
#
|
|
||||||
# Copyright (c) 2003 by Secret Labs AB
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
This module provides constants and clear-text names for various
|
|
||||||
well-known EXIF tags.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from enum import IntEnum
|
|
||||||
|
|
||||||
|
|
||||||
class Base(IntEnum):
|
|
||||||
# possibly incomplete
|
|
||||||
InteropIndex = 0x0001
|
|
||||||
ProcessingSoftware = 0x000B
|
|
||||||
NewSubfileType = 0x00FE
|
|
||||||
SubfileType = 0x00FF
|
|
||||||
ImageWidth = 0x0100
|
|
||||||
ImageLength = 0x0101
|
|
||||||
BitsPerSample = 0x0102
|
|
||||||
Compression = 0x0103
|
|
||||||
PhotometricInterpretation = 0x0106
|
|
||||||
Thresholding = 0x0107
|
|
||||||
CellWidth = 0x0108
|
|
||||||
CellLength = 0x0109
|
|
||||||
FillOrder = 0x010A
|
|
||||||
DocumentName = 0x010D
|
|
||||||
ImageDescription = 0x010E
|
|
||||||
Make = 0x010F
|
|
||||||
Model = 0x0110
|
|
||||||
StripOffsets = 0x0111
|
|
||||||
Orientation = 0x0112
|
|
||||||
SamplesPerPixel = 0x0115
|
|
||||||
RowsPerStrip = 0x0116
|
|
||||||
StripByteCounts = 0x0117
|
|
||||||
MinSampleValue = 0x0118
|
|
||||||
MaxSampleValue = 0x0119
|
|
||||||
XResolution = 0x011A
|
|
||||||
YResolution = 0x011B
|
|
||||||
PlanarConfiguration = 0x011C
|
|
||||||
PageName = 0x011D
|
|
||||||
FreeOffsets = 0x0120
|
|
||||||
FreeByteCounts = 0x0121
|
|
||||||
GrayResponseUnit = 0x0122
|
|
||||||
GrayResponseCurve = 0x0123
|
|
||||||
T4Options = 0x0124
|
|
||||||
T6Options = 0x0125
|
|
||||||
ResolutionUnit = 0x0128
|
|
||||||
PageNumber = 0x0129
|
|
||||||
TransferFunction = 0x012D
|
|
||||||
Software = 0x0131
|
|
||||||
DateTime = 0x0132
|
|
||||||
Artist = 0x013B
|
|
||||||
HostComputer = 0x013C
|
|
||||||
Predictor = 0x013D
|
|
||||||
WhitePoint = 0x013E
|
|
||||||
PrimaryChromaticities = 0x013F
|
|
||||||
ColorMap = 0x0140
|
|
||||||
HalftoneHints = 0x0141
|
|
||||||
TileWidth = 0x0142
|
|
||||||
TileLength = 0x0143
|
|
||||||
TileOffsets = 0x0144
|
|
||||||
TileByteCounts = 0x0145
|
|
||||||
SubIFDs = 0x014A
|
|
||||||
InkSet = 0x014C
|
|
||||||
InkNames = 0x014D
|
|
||||||
NumberOfInks = 0x014E
|
|
||||||
DotRange = 0x0150
|
|
||||||
TargetPrinter = 0x0151
|
|
||||||
ExtraSamples = 0x0152
|
|
||||||
SampleFormat = 0x0153
|
|
||||||
SMinSampleValue = 0x0154
|
|
||||||
SMaxSampleValue = 0x0155
|
|
||||||
TransferRange = 0x0156
|
|
||||||
ClipPath = 0x0157
|
|
||||||
XClipPathUnits = 0x0158
|
|
||||||
YClipPathUnits = 0x0159
|
|
||||||
Indexed = 0x015A
|
|
||||||
JPEGTables = 0x015B
|
|
||||||
OPIProxy = 0x015F
|
|
||||||
JPEGProc = 0x0200
|
|
||||||
JpegIFOffset = 0x0201
|
|
||||||
JpegIFByteCount = 0x0202
|
|
||||||
JpegRestartInterval = 0x0203
|
|
||||||
JpegLosslessPredictors = 0x0205
|
|
||||||
JpegPointTransforms = 0x0206
|
|
||||||
JpegQTables = 0x0207
|
|
||||||
JpegDCTables = 0x0208
|
|
||||||
JpegACTables = 0x0209
|
|
||||||
YCbCrCoefficients = 0x0211
|
|
||||||
YCbCrSubSampling = 0x0212
|
|
||||||
YCbCrPositioning = 0x0213
|
|
||||||
ReferenceBlackWhite = 0x0214
|
|
||||||
XMLPacket = 0x02BC
|
|
||||||
RelatedImageFileFormat = 0x1000
|
|
||||||
RelatedImageWidth = 0x1001
|
|
||||||
RelatedImageLength = 0x1002
|
|
||||||
Rating = 0x4746
|
|
||||||
RatingPercent = 0x4749
|
|
||||||
ImageID = 0x800D
|
|
||||||
CFARepeatPatternDim = 0x828D
|
|
||||||
BatteryLevel = 0x828F
|
|
||||||
Copyright = 0x8298
|
|
||||||
ExposureTime = 0x829A
|
|
||||||
FNumber = 0x829D
|
|
||||||
IPTCNAA = 0x83BB
|
|
||||||
ImageResources = 0x8649
|
|
||||||
ExifOffset = 0x8769
|
|
||||||
InterColorProfile = 0x8773
|
|
||||||
ExposureProgram = 0x8822
|
|
||||||
SpectralSensitivity = 0x8824
|
|
||||||
GPSInfo = 0x8825
|
|
||||||
ISOSpeedRatings = 0x8827
|
|
||||||
OECF = 0x8828
|
|
||||||
Interlace = 0x8829
|
|
||||||
TimeZoneOffset = 0x882A
|
|
||||||
SelfTimerMode = 0x882B
|
|
||||||
SensitivityType = 0x8830
|
|
||||||
StandardOutputSensitivity = 0x8831
|
|
||||||
RecommendedExposureIndex = 0x8832
|
|
||||||
ISOSpeed = 0x8833
|
|
||||||
ISOSpeedLatitudeyyy = 0x8834
|
|
||||||
ISOSpeedLatitudezzz = 0x8835
|
|
||||||
ExifVersion = 0x9000
|
|
||||||
DateTimeOriginal = 0x9003
|
|
||||||
DateTimeDigitized = 0x9004
|
|
||||||
OffsetTime = 0x9010
|
|
||||||
OffsetTimeOriginal = 0x9011
|
|
||||||
OffsetTimeDigitized = 0x9012
|
|
||||||
ComponentsConfiguration = 0x9101
|
|
||||||
CompressedBitsPerPixel = 0x9102
|
|
||||||
ShutterSpeedValue = 0x9201
|
|
||||||
ApertureValue = 0x9202
|
|
||||||
BrightnessValue = 0x9203
|
|
||||||
ExposureBiasValue = 0x9204
|
|
||||||
MaxApertureValue = 0x9205
|
|
||||||
SubjectDistance = 0x9206
|
|
||||||
MeteringMode = 0x9207
|
|
||||||
LightSource = 0x9208
|
|
||||||
Flash = 0x9209
|
|
||||||
FocalLength = 0x920A
|
|
||||||
Noise = 0x920D
|
|
||||||
ImageNumber = 0x9211
|
|
||||||
SecurityClassification = 0x9212
|
|
||||||
ImageHistory = 0x9213
|
|
||||||
TIFFEPStandardID = 0x9216
|
|
||||||
MakerNote = 0x927C
|
|
||||||
UserComment = 0x9286
|
|
||||||
SubsecTime = 0x9290
|
|
||||||
SubsecTimeOriginal = 0x9291
|
|
||||||
SubsecTimeDigitized = 0x9292
|
|
||||||
AmbientTemperature = 0x9400
|
|
||||||
Humidity = 0x9401
|
|
||||||
Pressure = 0x9402
|
|
||||||
WaterDepth = 0x9403
|
|
||||||
Acceleration = 0x9404
|
|
||||||
CameraElevationAngle = 0x9405
|
|
||||||
XPTitle = 0x9C9B
|
|
||||||
XPComment = 0x9C9C
|
|
||||||
XPAuthor = 0x9C9D
|
|
||||||
XPKeywords = 0x9C9E
|
|
||||||
XPSubject = 0x9C9F
|
|
||||||
FlashPixVersion = 0xA000
|
|
||||||
ColorSpace = 0xA001
|
|
||||||
ExifImageWidth = 0xA002
|
|
||||||
ExifImageHeight = 0xA003
|
|
||||||
RelatedSoundFile = 0xA004
|
|
||||||
ExifInteroperabilityOffset = 0xA005
|
|
||||||
FlashEnergy = 0xA20B
|
|
||||||
SpatialFrequencyResponse = 0xA20C
|
|
||||||
FocalPlaneXResolution = 0xA20E
|
|
||||||
FocalPlaneYResolution = 0xA20F
|
|
||||||
FocalPlaneResolutionUnit = 0xA210
|
|
||||||
SubjectLocation = 0xA214
|
|
||||||
ExposureIndex = 0xA215
|
|
||||||
SensingMethod = 0xA217
|
|
||||||
FileSource = 0xA300
|
|
||||||
SceneType = 0xA301
|
|
||||||
CFAPattern = 0xA302
|
|
||||||
CustomRendered = 0xA401
|
|
||||||
ExposureMode = 0xA402
|
|
||||||
WhiteBalance = 0xA403
|
|
||||||
DigitalZoomRatio = 0xA404
|
|
||||||
FocalLengthIn35mmFilm = 0xA405
|
|
||||||
SceneCaptureType = 0xA406
|
|
||||||
GainControl = 0xA407
|
|
||||||
Contrast = 0xA408
|
|
||||||
Saturation = 0xA409
|
|
||||||
Sharpness = 0xA40A
|
|
||||||
DeviceSettingDescription = 0xA40B
|
|
||||||
SubjectDistanceRange = 0xA40C
|
|
||||||
ImageUniqueID = 0xA420
|
|
||||||
CameraOwnerName = 0xA430
|
|
||||||
BodySerialNumber = 0xA431
|
|
||||||
LensSpecification = 0xA432
|
|
||||||
LensMake = 0xA433
|
|
||||||
LensModel = 0xA434
|
|
||||||
LensSerialNumber = 0xA435
|
|
||||||
CompositeImage = 0xA460
|
|
||||||
CompositeImageCount = 0xA461
|
|
||||||
CompositeImageExposureTimes = 0xA462
|
|
||||||
Gamma = 0xA500
|
|
||||||
PrintImageMatching = 0xC4A5
|
|
||||||
DNGVersion = 0xC612
|
|
||||||
DNGBackwardVersion = 0xC613
|
|
||||||
UniqueCameraModel = 0xC614
|
|
||||||
LocalizedCameraModel = 0xC615
|
|
||||||
CFAPlaneColor = 0xC616
|
|
||||||
CFALayout = 0xC617
|
|
||||||
LinearizationTable = 0xC618
|
|
||||||
BlackLevelRepeatDim = 0xC619
|
|
||||||
BlackLevel = 0xC61A
|
|
||||||
BlackLevelDeltaH = 0xC61B
|
|
||||||
BlackLevelDeltaV = 0xC61C
|
|
||||||
WhiteLevel = 0xC61D
|
|
||||||
DefaultScale = 0xC61E
|
|
||||||
DefaultCropOrigin = 0xC61F
|
|
||||||
DefaultCropSize = 0xC620
|
|
||||||
ColorMatrix1 = 0xC621
|
|
||||||
ColorMatrix2 = 0xC622
|
|
||||||
CameraCalibration1 = 0xC623
|
|
||||||
CameraCalibration2 = 0xC624
|
|
||||||
ReductionMatrix1 = 0xC625
|
|
||||||
ReductionMatrix2 = 0xC626
|
|
||||||
AnalogBalance = 0xC627
|
|
||||||
AsShotNeutral = 0xC628
|
|
||||||
AsShotWhiteXY = 0xC629
|
|
||||||
BaselineExposure = 0xC62A
|
|
||||||
BaselineNoise = 0xC62B
|
|
||||||
BaselineSharpness = 0xC62C
|
|
||||||
BayerGreenSplit = 0xC62D
|
|
||||||
LinearResponseLimit = 0xC62E
|
|
||||||
CameraSerialNumber = 0xC62F
|
|
||||||
LensInfo = 0xC630
|
|
||||||
ChromaBlurRadius = 0xC631
|
|
||||||
AntiAliasStrength = 0xC632
|
|
||||||
ShadowScale = 0xC633
|
|
||||||
DNGPrivateData = 0xC634
|
|
||||||
MakerNoteSafety = 0xC635
|
|
||||||
CalibrationIlluminant1 = 0xC65A
|
|
||||||
CalibrationIlluminant2 = 0xC65B
|
|
||||||
BestQualityScale = 0xC65C
|
|
||||||
RawDataUniqueID = 0xC65D
|
|
||||||
OriginalRawFileName = 0xC68B
|
|
||||||
OriginalRawFileData = 0xC68C
|
|
||||||
ActiveArea = 0xC68D
|
|
||||||
MaskedAreas = 0xC68E
|
|
||||||
AsShotICCProfile = 0xC68F
|
|
||||||
AsShotPreProfileMatrix = 0xC690
|
|
||||||
CurrentICCProfile = 0xC691
|
|
||||||
CurrentPreProfileMatrix = 0xC692
|
|
||||||
ColorimetricReference = 0xC6BF
|
|
||||||
CameraCalibrationSignature = 0xC6F3
|
|
||||||
ProfileCalibrationSignature = 0xC6F4
|
|
||||||
AsShotProfileName = 0xC6F6
|
|
||||||
NoiseReductionApplied = 0xC6F7
|
|
||||||
ProfileName = 0xC6F8
|
|
||||||
ProfileHueSatMapDims = 0xC6F9
|
|
||||||
ProfileHueSatMapData1 = 0xC6FA
|
|
||||||
ProfileHueSatMapData2 = 0xC6FB
|
|
||||||
ProfileToneCurve = 0xC6FC
|
|
||||||
ProfileEmbedPolicy = 0xC6FD
|
|
||||||
ProfileCopyright = 0xC6FE
|
|
||||||
ForwardMatrix1 = 0xC714
|
|
||||||
ForwardMatrix2 = 0xC715
|
|
||||||
PreviewApplicationName = 0xC716
|
|
||||||
PreviewApplicationVersion = 0xC717
|
|
||||||
PreviewSettingsName = 0xC718
|
|
||||||
PreviewSettingsDigest = 0xC719
|
|
||||||
PreviewColorSpace = 0xC71A
|
|
||||||
PreviewDateTime = 0xC71B
|
|
||||||
RawImageDigest = 0xC71C
|
|
||||||
OriginalRawFileDigest = 0xC71D
|
|
||||||
SubTileBlockSize = 0xC71E
|
|
||||||
RowInterleaveFactor = 0xC71F
|
|
||||||
ProfileLookTableDims = 0xC725
|
|
||||||
ProfileLookTableData = 0xC726
|
|
||||||
OpcodeList1 = 0xC740
|
|
||||||
OpcodeList2 = 0xC741
|
|
||||||
OpcodeList3 = 0xC74E
|
|
||||||
NoiseProfile = 0xC761
|
|
||||||
|
|
||||||
|
|
||||||
"""Maps EXIF tags to tag names."""
|
|
||||||
TAGS = {
|
|
||||||
**{i.value: i.name for i in Base},
|
|
||||||
0x920C: "SpatialFrequencyResponse",
|
|
||||||
0x9214: "SubjectLocation",
|
|
||||||
0x9215: "ExposureIndex",
|
|
||||||
0x828E: "CFAPattern",
|
|
||||||
0x920B: "FlashEnergy",
|
|
||||||
0x9216: "TIFF/EPStandardID",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class GPS(IntEnum):
|
|
||||||
GPSVersionID = 0
|
|
||||||
GPSLatitudeRef = 1
|
|
||||||
GPSLatitude = 2
|
|
||||||
GPSLongitudeRef = 3
|
|
||||||
GPSLongitude = 4
|
|
||||||
GPSAltitudeRef = 5
|
|
||||||
GPSAltitude = 6
|
|
||||||
GPSTimeStamp = 7
|
|
||||||
GPSSatellites = 8
|
|
||||||
GPSStatus = 9
|
|
||||||
GPSMeasureMode = 10
|
|
||||||
GPSDOP = 11
|
|
||||||
GPSSpeedRef = 12
|
|
||||||
GPSSpeed = 13
|
|
||||||
GPSTrackRef = 14
|
|
||||||
GPSTrack = 15
|
|
||||||
GPSImgDirectionRef = 16
|
|
||||||
GPSImgDirection = 17
|
|
||||||
GPSMapDatum = 18
|
|
||||||
GPSDestLatitudeRef = 19
|
|
||||||
GPSDestLatitude = 20
|
|
||||||
GPSDestLongitudeRef = 21
|
|
||||||
GPSDestLongitude = 22
|
|
||||||
GPSDestBearingRef = 23
|
|
||||||
GPSDestBearing = 24
|
|
||||||
GPSDestDistanceRef = 25
|
|
||||||
GPSDestDistance = 26
|
|
||||||
GPSProcessingMethod = 27
|
|
||||||
GPSAreaInformation = 28
|
|
||||||
GPSDateStamp = 29
|
|
||||||
GPSDifferential = 30
|
|
||||||
GPSHPositioningError = 31
|
|
||||||
|
|
||||||
|
|
||||||
"""Maps EXIF GPS tags to tag names."""
|
|
||||||
GPSTAGS = {i.value: i.name for i in GPS}
|
|
||||||
|
|
||||||
|
|
||||||
class Interop(IntEnum):
|
|
||||||
InteropIndex = 1
|
|
||||||
InteropVersion = 2
|
|
||||||
RelatedImageFileFormat = 4096
|
|
||||||
RelatedImageWidth = 4097
|
|
||||||
RleatedImageHeight = 4098
|
|
||||||
|
|
||||||
|
|
||||||
class IFD(IntEnum):
|
|
||||||
Exif = 34665
|
|
||||||
GPSInfo = 34853
|
|
||||||
Makernote = 37500
|
|
||||||
Interop = 40965
|
|
||||||
IFD1 = -1
|
|
||||||
|
|
||||||
|
|
||||||
class LightSource(IntEnum):
|
|
||||||
Unknown = 0
|
|
||||||
Daylight = 1
|
|
||||||
Fluorescent = 2
|
|
||||||
Tungsten = 3
|
|
||||||
Flash = 4
|
|
||||||
Fine = 9
|
|
||||||
Cloudy = 10
|
|
||||||
Shade = 11
|
|
||||||
DaylightFluorescent = 12
|
|
||||||
DayWhiteFluorescent = 13
|
|
||||||
CoolWhiteFluorescent = 14
|
|
||||||
WhiteFluorescent = 15
|
|
||||||
StandardLightA = 17
|
|
||||||
StandardLightB = 18
|
|
||||||
StandardLightC = 19
|
|
||||||
D55 = 20
|
|
||||||
D65 = 21
|
|
||||||
D75 = 22
|
|
||||||
D50 = 23
|
|
||||||
ISO = 24
|
|
||||||
Other = 255
|
|
@ -1,73 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FITS file handling
|
|
||||||
#
|
|
||||||
# Copyright (c) 1998-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
import math
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:6] == b"SIMPLE"
|
|
||||||
|
|
||||||
|
|
||||||
class FitsImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FITS"
|
|
||||||
format_description = "FITS"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
headers = {}
|
|
||||||
while True:
|
|
||||||
header = self.fp.read(80)
|
|
||||||
if not header:
|
|
||||||
msg = "Truncated FITS file"
|
|
||||||
raise OSError(msg)
|
|
||||||
keyword = header[:8].strip()
|
|
||||||
if keyword == b"END":
|
|
||||||
break
|
|
||||||
value = header[8:].split(b"/")[0].strip()
|
|
||||||
if value.startswith(b"="):
|
|
||||||
value = value[1:].strip()
|
|
||||||
if not headers and (not _accept(keyword) or value != b"T"):
|
|
||||||
msg = "Not a FITS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
headers[keyword] = value
|
|
||||||
|
|
||||||
naxis = int(headers[b"NAXIS"])
|
|
||||||
if naxis == 0:
|
|
||||||
msg = "No image data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
elif naxis == 1:
|
|
||||||
self._size = 1, int(headers[b"NAXIS1"])
|
|
||||||
else:
|
|
||||||
self._size = int(headers[b"NAXIS1"]), int(headers[b"NAXIS2"])
|
|
||||||
|
|
||||||
number_of_bits = int(headers[b"BITPIX"])
|
|
||||||
if number_of_bits == 8:
|
|
||||||
self.mode = "L"
|
|
||||||
elif number_of_bits == 16:
|
|
||||||
self.mode = "I"
|
|
||||||
# rawmode = "I;16S"
|
|
||||||
elif number_of_bits == 32:
|
|
||||||
self.mode = "I"
|
|
||||||
elif number_of_bits in (-32, -64):
|
|
||||||
self.mode = "F"
|
|
||||||
# rawmode = "F" if number_of_bits == -32 else "F;64F"
|
|
||||||
|
|
||||||
offset = math.ceil(self.fp.tell() / 2880) * 2880
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, offset, (self.mode, 0, -1))]
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(FitsImageFile.format, FitsImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extensions(FitsImageFile.format, [".fit", ".fits"])
|
|
@ -1,76 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FITS stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 1998-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
from . import FitsImagePlugin, Image, ImageFile
|
|
||||||
from ._deprecate import deprecate
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific FITS image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
deprecate(
|
|
||||||
"FitsStubImagePlugin",
|
|
||||||
10,
|
|
||||||
action="FITS images can now be read without "
|
|
||||||
"a handler through FitsImagePlugin instead",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Override FitsImagePlugin with this handler
|
|
||||||
# for backwards compatibility
|
|
||||||
try:
|
|
||||||
Image.ID.remove(FITSStubImageFile.format)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
Image.register_open(
|
|
||||||
FITSStubImageFile.format, FITSStubImageFile, FitsImagePlugin._accept
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FITSStubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = FitsImagePlugin.FitsImageFile.format
|
|
||||||
format_description = FitsImagePlugin.FitsImageFile.format_description
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
im = FitsImagePlugin.FitsImageFile(self.fp)
|
|
||||||
self._size = im.size
|
|
||||||
self.mode = im.mode
|
|
||||||
self.tile = []
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
msg = "FITS save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_save(FITSStubImageFile.format, _save)
|
|
@ -1,171 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FLI/FLC file handling.
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 95-09-01 fl Created
|
|
||||||
# 97-01-03 fl Fixed parser, setup decoder tile
|
|
||||||
# 98-07-15 fl Renamed offset attribute to avoid name clash
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997-98.
|
|
||||||
# Copyright (c) Fredrik Lundh 1995-97.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from . import Image, ImageFile, ImagePalette
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._binary import o8
|
|
||||||
|
|
||||||
#
|
|
||||||
# decoder
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return (
|
|
||||||
len(prefix) >= 6
|
|
||||||
and i16(prefix, 4) in [0xAF11, 0xAF12]
|
|
||||||
and i16(prefix, 14) in [0, 3] # flags
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the FLI/FLC animation format. Use the <b>seek</b>
|
|
||||||
# method to load individual frames.
|
|
||||||
|
|
||||||
|
|
||||||
class FliImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FLI"
|
|
||||||
format_description = "Autodesk FLI/FLC Animation"
|
|
||||||
_close_exclusive_fp_after_loading = False
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# HEAD
|
|
||||||
s = self.fp.read(128)
|
|
||||||
if not (_accept(s) and s[20:22] == b"\x00\x00"):
|
|
||||||
msg = "not an FLI/FLC file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# frames
|
|
||||||
self.n_frames = i16(s, 6)
|
|
||||||
self.is_animated = self.n_frames > 1
|
|
||||||
|
|
||||||
# image characteristics
|
|
||||||
self.mode = "P"
|
|
||||||
self._size = i16(s, 8), i16(s, 10)
|
|
||||||
|
|
||||||
# animation speed
|
|
||||||
duration = i32(s, 16)
|
|
||||||
magic = i16(s, 4)
|
|
||||||
if magic == 0xAF11:
|
|
||||||
duration = (duration * 1000) // 70
|
|
||||||
self.info["duration"] = duration
|
|
||||||
|
|
||||||
# look for palette
|
|
||||||
palette = [(a, a, a) for a in range(256)]
|
|
||||||
|
|
||||||
s = self.fp.read(16)
|
|
||||||
|
|
||||||
self.__offset = 128
|
|
||||||
|
|
||||||
if i16(s, 4) == 0xF100:
|
|
||||||
# prefix chunk; ignore it
|
|
||||||
self.__offset = self.__offset + i32(s)
|
|
||||||
s = self.fp.read(16)
|
|
||||||
|
|
||||||
if i16(s, 4) == 0xF1FA:
|
|
||||||
# look for palette chunk
|
|
||||||
number_of_subchunks = i16(s, 6)
|
|
||||||
chunk_size = None
|
|
||||||
for _ in range(number_of_subchunks):
|
|
||||||
if chunk_size is not None:
|
|
||||||
self.fp.seek(chunk_size - 6, os.SEEK_CUR)
|
|
||||||
s = self.fp.read(6)
|
|
||||||
chunk_type = i16(s, 4)
|
|
||||||
if chunk_type in (4, 11):
|
|
||||||
self._palette(palette, 2 if chunk_type == 11 else 0)
|
|
||||||
break
|
|
||||||
chunk_size = i32(s)
|
|
||||||
if not chunk_size:
|
|
||||||
break
|
|
||||||
|
|
||||||
palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette]
|
|
||||||
self.palette = ImagePalette.raw("RGB", b"".join(palette))
|
|
||||||
|
|
||||||
# set things up to decode first frame
|
|
||||||
self.__frame = -1
|
|
||||||
self._fp = self.fp
|
|
||||||
self.__rewind = self.fp.tell()
|
|
||||||
self.seek(0)
|
|
||||||
|
|
||||||
def _palette(self, palette, shift):
|
|
||||||
# load palette
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for e in range(i16(self.fp.read(2))):
|
|
||||||
s = self.fp.read(2)
|
|
||||||
i = i + s[0]
|
|
||||||
n = s[1]
|
|
||||||
if n == 0:
|
|
||||||
n = 256
|
|
||||||
s = self.fp.read(n * 3)
|
|
||||||
for n in range(0, len(s), 3):
|
|
||||||
r = s[n] << shift
|
|
||||||
g = s[n + 1] << shift
|
|
||||||
b = s[n + 2] << shift
|
|
||||||
palette[i] = (r, g, b)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
def seek(self, frame):
|
|
||||||
if not self._seek_check(frame):
|
|
||||||
return
|
|
||||||
if frame < self.__frame:
|
|
||||||
self._seek(0)
|
|
||||||
|
|
||||||
for f in range(self.__frame + 1, frame + 1):
|
|
||||||
self._seek(f)
|
|
||||||
|
|
||||||
def _seek(self, frame):
|
|
||||||
if frame == 0:
|
|
||||||
self.__frame = -1
|
|
||||||
self._fp.seek(self.__rewind)
|
|
||||||
self.__offset = 128
|
|
||||||
else:
|
|
||||||
# ensure that the previous frame was loaded
|
|
||||||
self.load()
|
|
||||||
|
|
||||||
if frame != self.__frame + 1:
|
|
||||||
msg = f"cannot seek to frame {frame}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.__frame = frame
|
|
||||||
|
|
||||||
# move to next frame
|
|
||||||
self.fp = self._fp
|
|
||||||
self.fp.seek(self.__offset)
|
|
||||||
|
|
||||||
s = self.fp.read(4)
|
|
||||||
if not s:
|
|
||||||
raise EOFError
|
|
||||||
|
|
||||||
framesize = i32(s)
|
|
||||||
|
|
||||||
self.decodermaxblock = framesize
|
|
||||||
self.tile = [("fli", (0, 0) + self.size, self.__offset, None)]
|
|
||||||
|
|
||||||
self.__offset += framesize
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
return self.__frame
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# registry
|
|
||||||
|
|
||||||
Image.register_open(FliImageFile.format, FliImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extensions(FliImageFile.format, [".fli", ".flc"])
|
|
@ -1,110 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# base class for raster font file parsers
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1997-06-05 fl created
|
|
||||||
# 1997-08-19 fl restrict image width
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-1998 by Secret Labs AB
|
|
||||||
# Copyright (c) 1997-1998 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from . import Image, _binary
|
|
||||||
|
|
||||||
WIDTH = 800
|
|
||||||
|
|
||||||
|
|
||||||
def puti16(fp, values):
|
|
||||||
"""Write network order (big-endian) 16-bit sequence"""
|
|
||||||
for v in values:
|
|
||||||
if v < 0:
|
|
||||||
v += 65536
|
|
||||||
fp.write(_binary.o16be(v))
|
|
||||||
|
|
||||||
|
|
||||||
class FontFile:
|
|
||||||
"""Base class for raster font file handlers."""
|
|
||||||
|
|
||||||
bitmap = None
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.info = {}
|
|
||||||
self.glyph = [None] * 256
|
|
||||||
|
|
||||||
def __getitem__(self, ix):
|
|
||||||
return self.glyph[ix]
|
|
||||||
|
|
||||||
def compile(self):
|
|
||||||
"""Create metrics and bitmap"""
|
|
||||||
|
|
||||||
if self.bitmap:
|
|
||||||
return
|
|
||||||
|
|
||||||
# create bitmap large enough to hold all data
|
|
||||||
h = w = maxwidth = 0
|
|
||||||
lines = 1
|
|
||||||
for glyph in self:
|
|
||||||
if glyph:
|
|
||||||
d, dst, src, im = glyph
|
|
||||||
h = max(h, src[3] - src[1])
|
|
||||||
w = w + (src[2] - src[0])
|
|
||||||
if w > WIDTH:
|
|
||||||
lines += 1
|
|
||||||
w = src[2] - src[0]
|
|
||||||
maxwidth = max(maxwidth, w)
|
|
||||||
|
|
||||||
xsize = maxwidth
|
|
||||||
ysize = lines * h
|
|
||||||
|
|
||||||
if xsize == 0 and ysize == 0:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
self.ysize = h
|
|
||||||
|
|
||||||
# paste glyphs into bitmap
|
|
||||||
self.bitmap = Image.new("1", (xsize, ysize))
|
|
||||||
self.metrics = [None] * 256
|
|
||||||
x = y = 0
|
|
||||||
for i in range(256):
|
|
||||||
glyph = self[i]
|
|
||||||
if glyph:
|
|
||||||
d, dst, src, im = glyph
|
|
||||||
xx = src[2] - src[0]
|
|
||||||
# yy = src[3] - src[1]
|
|
||||||
x0, y0 = x, y
|
|
||||||
x = x + xx
|
|
||||||
if x > WIDTH:
|
|
||||||
x, y = 0, y + h
|
|
||||||
x0, y0 = x, y
|
|
||||||
x = xx
|
|
||||||
s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
|
|
||||||
self.bitmap.paste(im.crop(src), s)
|
|
||||||
self.metrics[i] = d, dst, s
|
|
||||||
|
|
||||||
def save(self, filename):
|
|
||||||
"""Save font"""
|
|
||||||
|
|
||||||
self.compile()
|
|
||||||
|
|
||||||
# font data
|
|
||||||
self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG")
|
|
||||||
|
|
||||||
# font metrics
|
|
||||||
with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp:
|
|
||||||
fp.write(b"PILfont\n")
|
|
||||||
fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!!
|
|
||||||
fp.write(b"DATA\n")
|
|
||||||
for id in range(256):
|
|
||||||
m = self.metrics[id]
|
|
||||||
if not m:
|
|
||||||
puti16(fp, [0] * 10)
|
|
||||||
else:
|
|
||||||
puti16(fp, m[0] + m[1] + m[2])
|
|
@ -1,253 +0,0 @@
|
|||||||
#
|
|
||||||
# THIS IS WORK IN PROGRESS
|
|
||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FlashPix support for PIL
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 97-01-25 fl Created (reads uncompressed RGB images only)
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1997.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
import olefile
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
|
|
||||||
# we map from colour field tuples to (mode, rawmode) descriptors
|
|
||||||
MODES = {
|
|
||||||
# opacity
|
|
||||||
(0x00007FFE,): ("A", "L"),
|
|
||||||
# monochrome
|
|
||||||
(0x00010000,): ("L", "L"),
|
|
||||||
(0x00018000, 0x00017FFE): ("RGBA", "LA"),
|
|
||||||
# photo YCC
|
|
||||||
(0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
|
|
||||||
(0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"),
|
|
||||||
# standard RGB (NIFRGB)
|
|
||||||
(0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
|
|
||||||
(0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:8] == olefile.MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the FlashPix images.
|
|
||||||
|
|
||||||
|
|
||||||
class FpxImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FPX"
|
|
||||||
format_description = "FlashPix"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
#
|
|
||||||
# read the OLE directory and see if this is a likely
|
|
||||||
# to be a FlashPix file
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.ole = olefile.OleFileIO(self.fp)
|
|
||||||
except OSError as e:
|
|
||||||
msg = "not an FPX file; invalid OLE file"
|
|
||||||
raise SyntaxError(msg) from e
|
|
||||||
|
|
||||||
if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
|
|
||||||
msg = "not an FPX file; bad root CLSID"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self._open_index(1)
|
|
||||||
|
|
||||||
def _open_index(self, index=1):
|
|
||||||
#
|
|
||||||
# get the Image Contents Property Set
|
|
||||||
|
|
||||||
prop = self.ole.getproperties(
|
|
||||||
[f"Data Object Store {index:06d}", "\005Image Contents"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# size (highest resolution)
|
|
||||||
|
|
||||||
self._size = prop[0x1000002], prop[0x1000003]
|
|
||||||
|
|
||||||
size = max(self.size)
|
|
||||||
i = 1
|
|
||||||
while size > 64:
|
|
||||||
size = size / 2
|
|
||||||
i += 1
|
|
||||||
self.maxid = i - 1
|
|
||||||
|
|
||||||
# mode. instead of using a single field for this, flashpix
|
|
||||||
# requires you to specify the mode for each channel in each
|
|
||||||
# resolution subimage, and leaves it to the decoder to make
|
|
||||||
# sure that they all match. for now, we'll cheat and assume
|
|
||||||
# that this is always the case.
|
|
||||||
|
|
||||||
id = self.maxid << 16
|
|
||||||
|
|
||||||
s = prop[0x2000002 | id]
|
|
||||||
|
|
||||||
colors = []
|
|
||||||
bands = i32(s, 4)
|
|
||||||
if bands > 4:
|
|
||||||
msg = "Invalid number of bands"
|
|
||||||
raise OSError(msg)
|
|
||||||
for i in range(bands):
|
|
||||||
# note: for now, we ignore the "uncalibrated" flag
|
|
||||||
colors.append(i32(s, 8 + i * 4) & 0x7FFFFFFF)
|
|
||||||
|
|
||||||
self.mode, self.rawmode = MODES[tuple(colors)]
|
|
||||||
|
|
||||||
# load JPEG tables, if any
|
|
||||||
self.jpeg = {}
|
|
||||||
for i in range(256):
|
|
||||||
id = 0x3000001 | (i << 16)
|
|
||||||
if id in prop:
|
|
||||||
self.jpeg[i] = prop[id]
|
|
||||||
|
|
||||||
self._open_subimage(1, self.maxid)
|
|
||||||
|
|
||||||
def _open_subimage(self, index=1, subimage=0):
|
|
||||||
#
|
|
||||||
# setup tile descriptors for a given subimage
|
|
||||||
|
|
||||||
stream = [
|
|
||||||
f"Data Object Store {index:06d}",
|
|
||||||
f"Resolution {subimage:04d}",
|
|
||||||
"Subimage 0000 Header",
|
|
||||||
]
|
|
||||||
|
|
||||||
fp = self.ole.openstream(stream)
|
|
||||||
|
|
||||||
# skip prefix
|
|
||||||
fp.read(28)
|
|
||||||
|
|
||||||
# header stream
|
|
||||||
s = fp.read(36)
|
|
||||||
|
|
||||||
size = i32(s, 4), i32(s, 8)
|
|
||||||
# tilecount = i32(s, 12)
|
|
||||||
tilesize = i32(s, 16), i32(s, 20)
|
|
||||||
# channels = i32(s, 24)
|
|
||||||
offset = i32(s, 28)
|
|
||||||
length = i32(s, 32)
|
|
||||||
|
|
||||||
if size != self.size:
|
|
||||||
msg = "subimage mismatch"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# get tile descriptors
|
|
||||||
fp.seek(28 + offset)
|
|
||||||
s = fp.read(i32(s, 12) * length)
|
|
||||||
|
|
||||||
x = y = 0
|
|
||||||
xsize, ysize = size
|
|
||||||
xtile, ytile = tilesize
|
|
||||||
self.tile = []
|
|
||||||
|
|
||||||
for i in range(0, len(s), length):
|
|
||||||
x1 = min(xsize, x + xtile)
|
|
||||||
y1 = min(ysize, y + ytile)
|
|
||||||
|
|
||||||
compression = i32(s, i + 8)
|
|
||||||
|
|
||||||
if compression == 0:
|
|
||||||
self.tile.append(
|
|
||||||
(
|
|
||||||
"raw",
|
|
||||||
(x, y, x1, y1),
|
|
||||||
i32(s, i) + 28,
|
|
||||||
(self.rawmode,),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif compression == 1:
|
|
||||||
# FIXME: the fill decoder is not implemented
|
|
||||||
self.tile.append(
|
|
||||||
(
|
|
||||||
"fill",
|
|
||||||
(x, y, x1, y1),
|
|
||||||
i32(s, i) + 28,
|
|
||||||
(self.rawmode, s[12:16]),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif compression == 2:
|
|
||||||
internal_color_conversion = s[14]
|
|
||||||
jpeg_tables = s[15]
|
|
||||||
rawmode = self.rawmode
|
|
||||||
|
|
||||||
if internal_color_conversion:
|
|
||||||
# The image is stored as usual (usually YCbCr).
|
|
||||||
if rawmode == "RGBA":
|
|
||||||
# For "RGBA", data is stored as YCbCrA based on
|
|
||||||
# negative RGB. The following trick works around
|
|
||||||
# this problem :
|
|
||||||
jpegmode, rawmode = "YCbCrK", "CMYK"
|
|
||||||
else:
|
|
||||||
jpegmode = None # let the decoder decide
|
|
||||||
|
|
||||||
else:
|
|
||||||
# The image is stored as defined by rawmode
|
|
||||||
jpegmode = rawmode
|
|
||||||
|
|
||||||
self.tile.append(
|
|
||||||
(
|
|
||||||
"jpeg",
|
|
||||||
(x, y, x1, y1),
|
|
||||||
i32(s, i) + 28,
|
|
||||||
(rawmode, jpegmode),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# FIXME: jpeg tables are tile dependent; the prefix
|
|
||||||
# data must be placed in the tile descriptor itself!
|
|
||||||
|
|
||||||
if jpeg_tables:
|
|
||||||
self.tile_prefix = self.jpeg[jpeg_tables]
|
|
||||||
|
|
||||||
else:
|
|
||||||
msg = "unknown/invalid compression"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
x = x + xtile
|
|
||||||
if x >= xsize:
|
|
||||||
x, y = 0, y + ytile
|
|
||||||
if y >= ysize:
|
|
||||||
break # isn't really required
|
|
||||||
|
|
||||||
self.stream = stream
|
|
||||||
self.fp = None
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if not self.fp:
|
|
||||||
self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"])
|
|
||||||
|
|
||||||
return ImageFile.ImageFile.load(self)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self.ole.close()
|
|
||||||
super().close()
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.ole.close()
|
|
||||||
super().__exit__()
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extension(FpxImageFile.format, ".fpx")
|
|
@ -1,125 +0,0 @@
|
|||||||
"""
|
|
||||||
A Pillow loader for .ftc and .ftu files (FTEX)
|
|
||||||
Jerome Leclanche <jerome@leclan.ch>
|
|
||||||
|
|
||||||
The contents of this file are hereby released in the public domain (CC0)
|
|
||||||
Full text of the CC0 license:
|
|
||||||
https://creativecommons.org/publicdomain/zero/1.0/
|
|
||||||
|
|
||||||
Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
|
|
||||||
|
|
||||||
The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
|
|
||||||
packed custom format called FTEX. This file format uses file extensions FTC
|
|
||||||
and FTU.
|
|
||||||
* FTC files are compressed textures (using standard texture compression).
|
|
||||||
* FTU files are not compressed.
|
|
||||||
Texture File Format
|
|
||||||
The FTC and FTU texture files both use the same format. This
|
|
||||||
has the following structure:
|
|
||||||
{header}
|
|
||||||
{format_directory}
|
|
||||||
{data}
|
|
||||||
Where:
|
|
||||||
{header} = {
|
|
||||||
u32:magic,
|
|
||||||
u32:version,
|
|
||||||
u32:width,
|
|
||||||
u32:height,
|
|
||||||
u32:mipmap_count,
|
|
||||||
u32:format_count
|
|
||||||
}
|
|
||||||
|
|
||||||
* The "magic" number is "FTEX".
|
|
||||||
* "width" and "height" are the dimensions of the texture.
|
|
||||||
* "mipmap_count" is the number of mipmaps in the texture.
|
|
||||||
* "format_count" is the number of texture formats (different versions of the
|
|
||||||
same texture) in this file.
|
|
||||||
|
|
||||||
{format_directory} = format_count * { u32:format, u32:where }
|
|
||||||
|
|
||||||
The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
|
|
||||||
uncompressed textures.
|
|
||||||
The texture data for a format starts at the position "where" in the file.
|
|
||||||
|
|
||||||
Each set of texture data in the file has the following structure:
|
|
||||||
{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
|
|
||||||
* "mipmap_size" is the number of bytes in that mip level. For compressed
|
|
||||||
textures this is the size of the texture data compressed with DXT1. For 24 bit
|
|
||||||
uncompressed textures, this is 3 * width * height. Following this are the image
|
|
||||||
bytes for that mipmap level.
|
|
||||||
|
|
||||||
Note: All data is stored in little-Endian (Intel) byte order.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from enum import IntEnum
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._deprecate import deprecate
|
|
||||||
|
|
||||||
MAGIC = b"FTEX"
|
|
||||||
|
|
||||||
|
|
||||||
class Format(IntEnum):
|
|
||||||
DXT1 = 0
|
|
||||||
UNCOMPRESSED = 1
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name):
|
|
||||||
for enum, prefix in {Format: "FORMAT_"}.items():
|
|
||||||
if name.startswith(prefix):
|
|
||||||
name = name[len(prefix) :]
|
|
||||||
if name in enum.__members__:
|
|
||||||
deprecate(f"{prefix}{name}", 10, f"{enum.__name__}.{name}")
|
|
||||||
return enum[name]
|
|
||||||
msg = f"module '{__name__}' has no attribute '{name}'"
|
|
||||||
raise AttributeError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class FtexImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FTEX"
|
|
||||||
format_description = "Texture File Format (IW2:EOC)"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
if not _accept(self.fp.read(4)):
|
|
||||||
msg = "not an FTEX file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
struct.unpack("<i", self.fp.read(4)) # version
|
|
||||||
self._size = struct.unpack("<2i", self.fp.read(8))
|
|
||||||
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
|
|
||||||
|
|
||||||
self.mode = "RGB"
|
|
||||||
|
|
||||||
# Only support single-format files.
|
|
||||||
# I don't know of any multi-format file.
|
|
||||||
assert format_count == 1
|
|
||||||
|
|
||||||
format, where = struct.unpack("<2i", self.fp.read(8))
|
|
||||||
self.fp.seek(where)
|
|
||||||
(mipmap_size,) = struct.unpack("<i", self.fp.read(4))
|
|
||||||
|
|
||||||
data = self.fp.read(mipmap_size)
|
|
||||||
|
|
||||||
if format == Format.DXT1:
|
|
||||||
self.mode = "RGBA"
|
|
||||||
self.tile = [("bcn", (0, 0) + self.size, 0, 1)]
|
|
||||||
elif format == Format.UNCOMPRESSED:
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, 0, ("RGB", 0, 1))]
|
|
||||||
else:
|
|
||||||
msg = f"Invalid texture compression format: {repr(format)}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
self.fp.close()
|
|
||||||
self.fp = BytesIO(data)
|
|
||||||
|
|
||||||
def load_seek(self, pos):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(FtexImageFile.format, FtexImageFile, _accept)
|
|
||||||
Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"])
|
|
@ -1,102 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
#
|
|
||||||
# load a GIMP brush file
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 96-03-14 fl Created
|
|
||||||
# 16-01-08 es Version 2
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
# Copyright (c) Eric Soroos 2016.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# See https://github.com/GNOME/gimp/blob/mainline/devel-docs/gbr.txt for
|
|
||||||
# format documentation.
|
|
||||||
#
|
|
||||||
# This code Interprets version 1 and 2 .gbr files.
|
|
||||||
# Version 1 files are obsolete, and should not be used for new
|
|
||||||
# brushes.
|
|
||||||
# Version 2 files are saved by GIMP v2.8 (at least)
|
|
||||||
# Version 3 files have a format specifier of 18 for 16bit floats in
|
|
||||||
# the color depth field. This is currently unsupported by Pillow.
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import i32be as i32
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return len(prefix) >= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the GIMP brush format.
|
|
||||||
|
|
||||||
|
|
||||||
class GbrImageFile(ImageFile.ImageFile):
|
|
||||||
format = "GBR"
|
|
||||||
format_description = "GIMP brush file"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
header_size = i32(self.fp.read(4))
|
|
||||||
if header_size < 20:
|
|
||||||
msg = "not a GIMP brush"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
version = i32(self.fp.read(4))
|
|
||||||
if version not in (1, 2):
|
|
||||||
msg = f"Unsupported GIMP brush version: {version}"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
width = i32(self.fp.read(4))
|
|
||||||
height = i32(self.fp.read(4))
|
|
||||||
color_depth = i32(self.fp.read(4))
|
|
||||||
if width <= 0 or height <= 0:
|
|
||||||
msg = "not a GIMP brush"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
if color_depth not in (1, 4):
|
|
||||||
msg = f"Unsupported GIMP brush color depth: {color_depth}"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
if version == 1:
|
|
||||||
comment_length = header_size - 20
|
|
||||||
else:
|
|
||||||
comment_length = header_size - 28
|
|
||||||
magic_number = self.fp.read(4)
|
|
||||||
if magic_number != b"GIMP":
|
|
||||||
msg = "not a GIMP brush, bad magic number"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
self.info["spacing"] = i32(self.fp.read(4))
|
|
||||||
|
|
||||||
comment = self.fp.read(comment_length)[:-1]
|
|
||||||
|
|
||||||
if color_depth == 1:
|
|
||||||
self.mode = "L"
|
|
||||||
else:
|
|
||||||
self.mode = "RGBA"
|
|
||||||
|
|
||||||
self._size = width, height
|
|
||||||
|
|
||||||
self.info["comment"] = comment
|
|
||||||
|
|
||||||
# Image might not be small
|
|
||||||
Image._decompression_bomb_check(self.size)
|
|
||||||
|
|
||||||
# Data is an uncompressed block of w * h * bytes/pixel
|
|
||||||
self._data_size = width * height * color_depth
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if not self.im:
|
|
||||||
self.im = Image.core.new(self.mode, self.size)
|
|
||||||
self.frombytes(self.fp.read(self._data_size))
|
|
||||||
return Image.Image.load(self)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# registry
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(GbrImageFile.format, GbrImageFile, _accept)
|
|
||||||
Image.register_extension(GbrImageFile.format, ".gbr")
|
|
@ -1,97 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# GD file handling
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1996-04-12 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1996 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
.. note::
|
|
||||||
This format cannot be automatically recognized, so the
|
|
||||||
class is not registered for use with :py:func:`PIL.Image.open()`. To open a
|
|
||||||
gd file, use the :py:func:`PIL.GdImageFile.open()` function instead.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This
|
|
||||||
implementation is provided for convenience and demonstrational
|
|
||||||
purposes only.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
from . import ImageFile, ImagePalette, UnidentifiedImageError
|
|
||||||
from ._binary import i16be as i16
|
|
||||||
from ._binary import i32be as i32
|
|
||||||
|
|
||||||
|
|
||||||
class GdImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
Image plugin for the GD uncompressed format. Note that this format
|
|
||||||
is not supported by the standard :py:func:`PIL.Image.open()` function. To use
|
|
||||||
this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and
|
|
||||||
use the :py:func:`PIL.GdImageFile.open()` function.
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "GD"
|
|
||||||
format_description = "GD uncompressed images"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# Header
|
|
||||||
s = self.fp.read(1037)
|
|
||||||
|
|
||||||
if not i16(s) in [65534, 65535]:
|
|
||||||
msg = "Not a valid GD 2.x .gd file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.mode = "L" # FIXME: "P"
|
|
||||||
self._size = i16(s, 2), i16(s, 4)
|
|
||||||
|
|
||||||
true_color = s[6]
|
|
||||||
true_color_offset = 2 if true_color else 0
|
|
||||||
|
|
||||||
# transparency index
|
|
||||||
tindex = i32(s, 7 + true_color_offset)
|
|
||||||
if tindex < 256:
|
|
||||||
self.info["transparency"] = tindex
|
|
||||||
|
|
||||||
self.palette = ImagePalette.raw(
|
|
||||||
"XBGR", s[7 + true_color_offset + 4 : 7 + true_color_offset + 4 + 256 * 4]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tile = [
|
|
||||||
(
|
|
||||||
"raw",
|
|
||||||
(0, 0) + self.size,
|
|
||||||
7 + true_color_offset + 4 + 256 * 4,
|
|
||||||
("L", 0, 1),
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def open(fp, mode="r"):
|
|
||||||
"""
|
|
||||||
Load texture from a GD image file.
|
|
||||||
|
|
||||||
:param fp: GD file name, or an opened file handle.
|
|
||||||
:param mode: Optional mode. In this version, if the mode argument
|
|
||||||
is given, it must be "r".
|
|
||||||
:returns: An image instance.
|
|
||||||
:raises OSError: If the image could not be read.
|
|
||||||
"""
|
|
||||||
if mode != "r":
|
|
||||||
msg = "bad mode"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return GdImageFile(fp)
|
|
||||||
except SyntaxError as e:
|
|
||||||
msg = "cannot identify this image file"
|
|
||||||
raise UnidentifiedImageError(msg) from e
|
|
File diff suppressed because it is too large
Load Diff
@ -1,137 +0,0 @@
|
|||||||
#
|
|
||||||
# Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# stuff to read (and render) GIMP gradient files
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 97-08-23 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1997.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Stuff to translate curve segments to palette values (derived from
|
|
||||||
the corresponding code in GIMP, written by Federico Mena Quintero.
|
|
||||||
See the GIMP distribution for more information.)
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
from math import log, pi, sin, sqrt
|
|
||||||
|
|
||||||
from ._binary import o8
|
|
||||||
|
|
||||||
EPSILON = 1e-10
|
|
||||||
"""""" # Enable auto-doc for data member
|
|
||||||
|
|
||||||
|
|
||||||
def linear(middle, pos):
|
|
||||||
if pos <= middle:
|
|
||||||
if middle < EPSILON:
|
|
||||||
return 0.0
|
|
||||||
else:
|
|
||||||
return 0.5 * pos / middle
|
|
||||||
else:
|
|
||||||
pos = pos - middle
|
|
||||||
middle = 1.0 - middle
|
|
||||||
if middle < EPSILON:
|
|
||||||
return 1.0
|
|
||||||
else:
|
|
||||||
return 0.5 + 0.5 * pos / middle
|
|
||||||
|
|
||||||
|
|
||||||
def curved(middle, pos):
|
|
||||||
return pos ** (log(0.5) / log(max(middle, EPSILON)))
|
|
||||||
|
|
||||||
|
|
||||||
def sine(middle, pos):
|
|
||||||
return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0
|
|
||||||
|
|
||||||
|
|
||||||
def sphere_increasing(middle, pos):
|
|
||||||
return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2)
|
|
||||||
|
|
||||||
|
|
||||||
def sphere_decreasing(middle, pos):
|
|
||||||
return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2)
|
|
||||||
|
|
||||||
|
|
||||||
SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing]
|
|
||||||
"""""" # Enable auto-doc for data member
|
|
||||||
|
|
||||||
|
|
||||||
class GradientFile:
|
|
||||||
gradient = None
|
|
||||||
|
|
||||||
def getpalette(self, entries=256):
|
|
||||||
palette = []
|
|
||||||
|
|
||||||
ix = 0
|
|
||||||
x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
|
|
||||||
|
|
||||||
for i in range(entries):
|
|
||||||
x = i / (entries - 1)
|
|
||||||
|
|
||||||
while x1 < x:
|
|
||||||
ix += 1
|
|
||||||
x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
|
|
||||||
|
|
||||||
w = x1 - x0
|
|
||||||
|
|
||||||
if w < EPSILON:
|
|
||||||
scale = segment(0.5, 0.5)
|
|
||||||
else:
|
|
||||||
scale = segment((xm - x0) / w, (x - x0) / w)
|
|
||||||
|
|
||||||
# expand to RGBA
|
|
||||||
r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5))
|
|
||||||
g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5))
|
|
||||||
b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5))
|
|
||||||
a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5))
|
|
||||||
|
|
||||||
# add to palette
|
|
||||||
palette.append(r + g + b + a)
|
|
||||||
|
|
||||||
return b"".join(palette), "RGBA"
|
|
||||||
|
|
||||||
|
|
||||||
class GimpGradientFile(GradientFile):
|
|
||||||
"""File handler for GIMP's gradient format."""
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
if fp.readline()[:13] != b"GIMP Gradient":
|
|
||||||
msg = "not a GIMP gradient file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
line = fp.readline()
|
|
||||||
|
|
||||||
# GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do
|
|
||||||
if line.startswith(b"Name: "):
|
|
||||||
line = fp.readline().strip()
|
|
||||||
|
|
||||||
count = int(line)
|
|
||||||
|
|
||||||
gradient = []
|
|
||||||
|
|
||||||
for i in range(count):
|
|
||||||
s = fp.readline().split()
|
|
||||||
w = [float(x) for x in s[:11]]
|
|
||||||
|
|
||||||
x0, x1 = w[0], w[2]
|
|
||||||
xm = w[1]
|
|
||||||
rgb0 = w[3:7]
|
|
||||||
rgb1 = w[7:11]
|
|
||||||
|
|
||||||
segment = SEGMENTS[int(s[11])]
|
|
||||||
cspace = int(s[12])
|
|
||||||
|
|
||||||
if cspace != 0:
|
|
||||||
msg = "cannot handle HSV colour space"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
gradient.append((x0, x1, xm, rgb0, rgb1, segment))
|
|
||||||
|
|
||||||
self.gradient = gradient
|
|
@ -1,56 +0,0 @@
|
|||||||
#
|
|
||||||
# Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# stuff to read GIMP palette files
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1997-08-23 fl Created
|
|
||||||
# 2004-09-07 fl Support GIMP 2.0 palette files.
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
|
|
||||||
# Copyright (c) Fredrik Lundh 1997-2004.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from ._binary import o8
|
|
||||||
|
|
||||||
|
|
||||||
class GimpPaletteFile:
|
|
||||||
"""File handler for GIMP's palette format."""
|
|
||||||
|
|
||||||
rawmode = "RGB"
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
self.palette = [o8(i) * 3 for i in range(256)]
|
|
||||||
|
|
||||||
if fp.readline()[:12] != b"GIMP Palette":
|
|
||||||
msg = "not a GIMP palette file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
for i in range(256):
|
|
||||||
s = fp.readline()
|
|
||||||
if not s:
|
|
||||||
break
|
|
||||||
|
|
||||||
# skip fields and comment lines
|
|
||||||
if re.match(rb"\w+:|#", s):
|
|
||||||
continue
|
|
||||||
if len(s) > 100:
|
|
||||||
msg = "bad palette file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
v = tuple(map(int, s.split()[:3]))
|
|
||||||
if len(v) != 3:
|
|
||||||
msg = "bad palette entry"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2])
|
|
||||||
|
|
||||||
self.palette = b"".join(self.palette)
|
|
||||||
|
|
||||||
def getpalette(self):
|
|
||||||
return self.palette, self.rawmode
|
|
@ -1,73 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# GRIB stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 1996-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific GRIB image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Image adapter
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"GRIB" and prefix[7] == 1
|
|
||||||
|
|
||||||
|
|
||||||
class GribStubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = "GRIB"
|
|
||||||
format_description = "GRIB"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
if not _accept(self.fp.read(8)):
|
|
||||||
msg = "Not a GRIB file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
# make something up
|
|
||||||
self.mode = "F"
|
|
||||||
self._size = 1, 1
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if _handler is None or not hasattr(_handler, "save"):
|
|
||||||
msg = "GRIB save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
_handler.save(im, fp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept)
|
|
||||||
Image.register_save(GribStubImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(GribStubImageFile.format, ".grib")
|
|
@ -1,73 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# HDF5 stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 2000-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific HDF5 image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Image adapter
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:8] == b"\x89HDF\r\n\x1a\n"
|
|
||||||
|
|
||||||
|
|
||||||
class HDF5StubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = "HDF5"
|
|
||||||
format_description = "HDF5"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
if not _accept(self.fp.read(8)):
|
|
||||||
msg = "Not an HDF file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
# make something up
|
|
||||||
self.mode = "F"
|
|
||||||
self._size = 1, 1
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if _handler is None or not hasattr(_handler, "save"):
|
|
||||||
msg = "HDF5 save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
_handler.save(im, fp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept)
|
|
||||||
Image.register_save(HDF5StubImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"])
|
|
@ -1,399 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# macOS icns file decoder, based on icns.py by Bob Ippolito.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies.
|
|
||||||
# 2020-04-04 Allow saving on all operating systems.
|
|
||||||
#
|
|
||||||
# Copyright (c) 2004 by Bob Ippolito.
|
|
||||||
# Copyright (c) 2004 by Secret Labs.
|
|
||||||
# Copyright (c) 2004 by Fredrik Lundh.
|
|
||||||
# Copyright (c) 2014 by Alastair Houghton.
|
|
||||||
# Copyright (c) 2020 by Pan Jing.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from PIL import Image, ImageFile, PngImagePlugin, features
|
|
||||||
|
|
||||||
enable_jpeg2k = features.check_codec("jpg_2000")
|
|
||||||
if enable_jpeg2k:
|
|
||||||
from PIL import Jpeg2KImagePlugin
|
|
||||||
|
|
||||||
MAGIC = b"icns"
|
|
||||||
HEADERSIZE = 8
|
|
||||||
|
|
||||||
|
|
||||||
def nextheader(fobj):
|
|
||||||
return struct.unpack(">4sI", fobj.read(HEADERSIZE))
|
|
||||||
|
|
||||||
|
|
||||||
def read_32t(fobj, start_length, size):
|
|
||||||
# The 128x128 icon seems to have an extra header for some reason.
|
|
||||||
(start, length) = start_length
|
|
||||||
fobj.seek(start)
|
|
||||||
sig = fobj.read(4)
|
|
||||||
if sig != b"\x00\x00\x00\x00":
|
|
||||||
msg = "Unknown signature, expecting 0x00000000"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
return read_32(fobj, (start + 4, length - 4), size)
|
|
||||||
|
|
||||||
|
|
||||||
def read_32(fobj, start_length, size):
|
|
||||||
"""
|
|
||||||
Read a 32bit RGB icon resource. Seems to be either uncompressed or
|
|
||||||
an RLE packbits-like scheme.
|
|
||||||
"""
|
|
||||||
(start, length) = start_length
|
|
||||||
fobj.seek(start)
|
|
||||||
pixel_size = (size[0] * size[2], size[1] * size[2])
|
|
||||||
sizesq = pixel_size[0] * pixel_size[1]
|
|
||||||
if length == sizesq * 3:
|
|
||||||
# uncompressed ("RGBRGBGB")
|
|
||||||
indata = fobj.read(length)
|
|
||||||
im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1)
|
|
||||||
else:
|
|
||||||
# decode image
|
|
||||||
im = Image.new("RGB", pixel_size, None)
|
|
||||||
for band_ix in range(3):
|
|
||||||
data = []
|
|
||||||
bytesleft = sizesq
|
|
||||||
while bytesleft > 0:
|
|
||||||
byte = fobj.read(1)
|
|
||||||
if not byte:
|
|
||||||
break
|
|
||||||
byte = byte[0]
|
|
||||||
if byte & 0x80:
|
|
||||||
blocksize = byte - 125
|
|
||||||
byte = fobj.read(1)
|
|
||||||
for i in range(blocksize):
|
|
||||||
data.append(byte)
|
|
||||||
else:
|
|
||||||
blocksize = byte + 1
|
|
||||||
data.append(fobj.read(blocksize))
|
|
||||||
bytesleft -= blocksize
|
|
||||||
if bytesleft <= 0:
|
|
||||||
break
|
|
||||||
if bytesleft != 0:
|
|
||||||
msg = f"Error reading channel [{repr(bytesleft)} left]"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1)
|
|
||||||
im.im.putband(band.im, band_ix)
|
|
||||||
return {"RGB": im}
|
|
||||||
|
|
||||||
|
|
||||||
def read_mk(fobj, start_length, size):
|
|
||||||
# Alpha masks seem to be uncompressed
|
|
||||||
start = start_length[0]
|
|
||||||
fobj.seek(start)
|
|
||||||
pixel_size = (size[0] * size[2], size[1] * size[2])
|
|
||||||
sizesq = pixel_size[0] * pixel_size[1]
|
|
||||||
band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1)
|
|
||||||
return {"A": band}
|
|
||||||
|
|
||||||
|
|
||||||
def read_png_or_jpeg2000(fobj, start_length, size):
|
|
||||||
(start, length) = start_length
|
|
||||||
fobj.seek(start)
|
|
||||||
sig = fobj.read(12)
|
|
||||||
if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a":
|
|
||||||
fobj.seek(start)
|
|
||||||
im = PngImagePlugin.PngImageFile(fobj)
|
|
||||||
Image._decompression_bomb_check(im.size)
|
|
||||||
return {"RGBA": im}
|
|
||||||
elif (
|
|
||||||
sig[:4] == b"\xff\x4f\xff\x51"
|
|
||||||
or sig[:4] == b"\x0d\x0a\x87\x0a"
|
|
||||||
or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a"
|
|
||||||
):
|
|
||||||
if not enable_jpeg2k:
|
|
||||||
msg = (
|
|
||||||
"Unsupported icon subimage format (rebuild PIL "
|
|
||||||
"with JPEG 2000 support to fix this)"
|
|
||||||
)
|
|
||||||
raise ValueError(msg)
|
|
||||||
# j2k, jpc or j2c
|
|
||||||
fobj.seek(start)
|
|
||||||
jp2kstream = fobj.read(length)
|
|
||||||
f = io.BytesIO(jp2kstream)
|
|
||||||
im = Jpeg2KImagePlugin.Jpeg2KImageFile(f)
|
|
||||||
Image._decompression_bomb_check(im.size)
|
|
||||||
if im.mode != "RGBA":
|
|
||||||
im = im.convert("RGBA")
|
|
||||||
return {"RGBA": im}
|
|
||||||
else:
|
|
||||||
msg = "Unsupported icon subimage format"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class IcnsFile:
|
|
||||||
SIZES = {
|
|
||||||
(512, 512, 2): [(b"ic10", read_png_or_jpeg2000)],
|
|
||||||
(512, 512, 1): [(b"ic09", read_png_or_jpeg2000)],
|
|
||||||
(256, 256, 2): [(b"ic14", read_png_or_jpeg2000)],
|
|
||||||
(256, 256, 1): [(b"ic08", read_png_or_jpeg2000)],
|
|
||||||
(128, 128, 2): [(b"ic13", read_png_or_jpeg2000)],
|
|
||||||
(128, 128, 1): [
|
|
||||||
(b"ic07", read_png_or_jpeg2000),
|
|
||||||
(b"it32", read_32t),
|
|
||||||
(b"t8mk", read_mk),
|
|
||||||
],
|
|
||||||
(64, 64, 1): [(b"icp6", read_png_or_jpeg2000)],
|
|
||||||
(32, 32, 2): [(b"ic12", read_png_or_jpeg2000)],
|
|
||||||
(48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)],
|
|
||||||
(32, 32, 1): [
|
|
||||||
(b"icp5", read_png_or_jpeg2000),
|
|
||||||
(b"il32", read_32),
|
|
||||||
(b"l8mk", read_mk),
|
|
||||||
],
|
|
||||||
(16, 16, 2): [(b"ic11", read_png_or_jpeg2000)],
|
|
||||||
(16, 16, 1): [
|
|
||||||
(b"icp4", read_png_or_jpeg2000),
|
|
||||||
(b"is32", read_32),
|
|
||||||
(b"s8mk", read_mk),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, fobj):
|
|
||||||
"""
|
|
||||||
fobj is a file-like object as an icns resource
|
|
||||||
"""
|
|
||||||
# signature : (start, length)
|
|
||||||
self.dct = dct = {}
|
|
||||||
self.fobj = fobj
|
|
||||||
sig, filesize = nextheader(fobj)
|
|
||||||
if not _accept(sig):
|
|
||||||
msg = "not an icns file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
i = HEADERSIZE
|
|
||||||
while i < filesize:
|
|
||||||
sig, blocksize = nextheader(fobj)
|
|
||||||
if blocksize <= 0:
|
|
||||||
msg = "invalid block header"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
i += HEADERSIZE
|
|
||||||
blocksize -= HEADERSIZE
|
|
||||||
dct[sig] = (i, blocksize)
|
|
||||||
fobj.seek(blocksize, io.SEEK_CUR)
|
|
||||||
i += blocksize
|
|
||||||
|
|
||||||
def itersizes(self):
|
|
||||||
sizes = []
|
|
||||||
for size, fmts in self.SIZES.items():
|
|
||||||
for fmt, reader in fmts:
|
|
||||||
if fmt in self.dct:
|
|
||||||
sizes.append(size)
|
|
||||||
break
|
|
||||||
return sizes
|
|
||||||
|
|
||||||
def bestsize(self):
|
|
||||||
sizes = self.itersizes()
|
|
||||||
if not sizes:
|
|
||||||
msg = "No 32bit icon resources found"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
return max(sizes)
|
|
||||||
|
|
||||||
def dataforsize(self, size):
|
|
||||||
"""
|
|
||||||
Get an icon resource as {channel: array}. Note that
|
|
||||||
the arrays are bottom-up like windows bitmaps and will likely
|
|
||||||
need to be flipped or transposed in some way.
|
|
||||||
"""
|
|
||||||
dct = {}
|
|
||||||
for code, reader in self.SIZES[size]:
|
|
||||||
desc = self.dct.get(code)
|
|
||||||
if desc is not None:
|
|
||||||
dct.update(reader(self.fobj, desc, size))
|
|
||||||
return dct
|
|
||||||
|
|
||||||
def getimage(self, size=None):
|
|
||||||
if size is None:
|
|
||||||
size = self.bestsize()
|
|
||||||
if len(size) == 2:
|
|
||||||
size = (size[0], size[1], 1)
|
|
||||||
channels = self.dataforsize(size)
|
|
||||||
|
|
||||||
im = channels.get("RGBA", None)
|
|
||||||
if im:
|
|
||||||
return im
|
|
||||||
|
|
||||||
im = channels.get("RGB").copy()
|
|
||||||
try:
|
|
||||||
im.putalpha(channels["A"])
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return im
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Mac OS icons.
|
|
||||||
|
|
||||||
|
|
||||||
class IcnsImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
PIL image support for Mac OS .icns files.
|
|
||||||
Chooses the best resolution, but will possibly load
|
|
||||||
a different size image if you mutate the size attribute
|
|
||||||
before calling 'load'.
|
|
||||||
|
|
||||||
The info dictionary has a key 'sizes' that is a list
|
|
||||||
of sizes that the icns file has.
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "ICNS"
|
|
||||||
format_description = "Mac OS icns resource"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self.icns = IcnsFile(self.fp)
|
|
||||||
self.mode = "RGBA"
|
|
||||||
self.info["sizes"] = self.icns.itersizes()
|
|
||||||
self.best_size = self.icns.bestsize()
|
|
||||||
self.size = (
|
|
||||||
self.best_size[0] * self.best_size[2],
|
|
||||||
self.best_size[1] * self.best_size[2],
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self):
|
|
||||||
return self._size
|
|
||||||
|
|
||||||
@size.setter
|
|
||||||
def size(self, value):
|
|
||||||
info_size = value
|
|
||||||
if info_size not in self.info["sizes"] and len(info_size) == 2:
|
|
||||||
info_size = (info_size[0], info_size[1], 1)
|
|
||||||
if (
|
|
||||||
info_size not in self.info["sizes"]
|
|
||||||
and len(info_size) == 3
|
|
||||||
and info_size[2] == 1
|
|
||||||
):
|
|
||||||
simple_sizes = [
|
|
||||||
(size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"]
|
|
||||||
]
|
|
||||||
if value in simple_sizes:
|
|
||||||
info_size = self.info["sizes"][simple_sizes.index(value)]
|
|
||||||
if info_size not in self.info["sizes"]:
|
|
||||||
msg = "This is not one of the allowed sizes of this image"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self._size = value
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if len(self.size) == 3:
|
|
||||||
self.best_size = self.size
|
|
||||||
self.size = (
|
|
||||||
self.best_size[0] * self.best_size[2],
|
|
||||||
self.best_size[1] * self.best_size[2],
|
|
||||||
)
|
|
||||||
|
|
||||||
px = Image.Image.load(self)
|
|
||||||
if self.im is not None and self.im.size == self.size:
|
|
||||||
# Already loaded
|
|
||||||
return px
|
|
||||||
self.load_prepare()
|
|
||||||
# This is likely NOT the best way to do it, but whatever.
|
|
||||||
im = self.icns.getimage(self.best_size)
|
|
||||||
|
|
||||||
# If this is a PNG or JPEG 2000, it won't be loaded yet
|
|
||||||
px = im.load()
|
|
||||||
|
|
||||||
self.im = im.im
|
|
||||||
self.mode = im.mode
|
|
||||||
self.size = im.size
|
|
||||||
|
|
||||||
return px
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
"""
|
|
||||||
Saves the image as a series of PNG files,
|
|
||||||
that are then combined into a .icns file.
|
|
||||||
"""
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
sizes = {
|
|
||||||
b"ic07": 128,
|
|
||||||
b"ic08": 256,
|
|
||||||
b"ic09": 512,
|
|
||||||
b"ic10": 1024,
|
|
||||||
b"ic11": 32,
|
|
||||||
b"ic12": 64,
|
|
||||||
b"ic13": 256,
|
|
||||||
b"ic14": 512,
|
|
||||||
}
|
|
||||||
provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])}
|
|
||||||
size_streams = {}
|
|
||||||
for size in set(sizes.values()):
|
|
||||||
image = (
|
|
||||||
provided_images[size]
|
|
||||||
if size in provided_images
|
|
||||||
else im.resize((size, size))
|
|
||||||
)
|
|
||||||
|
|
||||||
temp = io.BytesIO()
|
|
||||||
image.save(temp, "png")
|
|
||||||
size_streams[size] = temp.getvalue()
|
|
||||||
|
|
||||||
entries = []
|
|
||||||
for type, size in sizes.items():
|
|
||||||
stream = size_streams[size]
|
|
||||||
entries.append(
|
|
||||||
{"type": type, "size": HEADERSIZE + len(stream), "stream": stream}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Header
|
|
||||||
fp.write(MAGIC)
|
|
||||||
file_length = HEADERSIZE # Header
|
|
||||||
file_length += HEADERSIZE + 8 * len(entries) # TOC
|
|
||||||
file_length += sum(entry["size"] for entry in entries)
|
|
||||||
fp.write(struct.pack(">i", file_length))
|
|
||||||
|
|
||||||
# TOC
|
|
||||||
fp.write(b"TOC ")
|
|
||||||
fp.write(struct.pack(">i", HEADERSIZE + len(entries) * HEADERSIZE))
|
|
||||||
for entry in entries:
|
|
||||||
fp.write(entry["type"])
|
|
||||||
fp.write(struct.pack(">i", entry["size"]))
|
|
||||||
|
|
||||||
# Data
|
|
||||||
for entry in entries:
|
|
||||||
fp.write(entry["type"])
|
|
||||||
fp.write(struct.pack(">i", entry["size"]))
|
|
||||||
fp.write(entry["stream"])
|
|
||||||
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(IcnsImageFile.format, IcnsImageFile, _accept)
|
|
||||||
Image.register_extension(IcnsImageFile.format, ".icns")
|
|
||||||
|
|
||||||
Image.register_save(IcnsImageFile.format, _save)
|
|
||||||
Image.register_mime(IcnsImageFile.format, "image/icns")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Syntax: python3 IcnsImagePlugin.py [file]")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
with open(sys.argv[1], "rb") as fp:
|
|
||||||
imf = IcnsImageFile(fp)
|
|
||||||
for size in imf.info["sizes"]:
|
|
||||||
imf.size = size
|
|
||||||
imf.save("out-%s-%s-%s.png" % size)
|
|
||||||
with Image.open(sys.argv[1]) as im:
|
|
||||||
im.save("out.png")
|
|
||||||
if sys.platform == "windows":
|
|
||||||
os.startfile("out.png")
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user