Add venv
This commit is contained in:
parent
d67462ecfe
commit
dde9b24015
|
@ -1 +0,0 @@
|
||||||
.venv/
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
# This file must be used with "source bin/activate" *from bash*
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
deactivate () {
|
||||||
|
# reset old environment variables
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||||
|
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||||
|
export PATH
|
||||||
|
unset _OLD_VIRTUAL_PATH
|
||||||
|
fi
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||||
|
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||||
|
export PYTHONHOME
|
||||||
|
unset _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||||
|
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||||
|
export PS1
|
||||||
|
unset _OLD_VIRTUAL_PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
unset VIRTUAL_ENV
|
||||||
|
if [ ! "$1" = "nondestructive" ] ; then
|
||||||
|
# Self destruct!
|
||||||
|
unset -f deactivate
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
VIRTUAL_ENV="/home/miguel456/PycharmProjects/PythonVPNGateway/venv"
|
||||||
|
export VIRTUAL_ENV
|
||||||
|
|
||||||
|
_OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
export PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||||
|
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||||
|
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||||
|
unset PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||||
|
if [ "x(venv) " != x ] ; then
|
||||||
|
PS1="(venv) ${PS1:-}"
|
||||||
|
else
|
||||||
|
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
|
||||||
|
# special case for Aspen magic directories
|
||||||
|
# see http://www.zetadev.com/software/aspen/
|
||||||
|
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
|
||||||
|
else
|
||||||
|
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
export PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r
|
||||||
|
fi
|
|
@ -0,0 +1,37 @@
|
||||||
|
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||||
|
# You cannot run it directly.
|
||||||
|
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||||
|
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
|
||||||
|
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
setenv VIRTUAL_ENV "/home/miguel456/PycharmProjects/PythonVPNGateway/venv"
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||||
|
|
||||||
|
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||||
|
if ("venv" != "") then
|
||||||
|
set env_name = "venv"
|
||||||
|
else
|
||||||
|
if (`basename "VIRTUAL_ENV"` == "__") then
|
||||||
|
# special case for Aspen magic directories
|
||||||
|
# see http://www.zetadev.com/software/aspen/
|
||||||
|
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
|
||||||
|
else
|
||||||
|
set env_name = `basename "$VIRTUAL_ENV"`
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
set prompt = "[$env_name] $prompt"
|
||||||
|
unset env_name
|
||||||
|
endif
|
||||||
|
|
||||||
|
alias pydoc python -m pydoc
|
||||||
|
|
||||||
|
rehash
|
|
@ -0,0 +1,75 @@
|
||||||
|
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
function deactivate -d "Exit virtualenv and return to normal shell environment"
|
||||||
|
# reset old environment variables
|
||||||
|
if test -n "$_OLD_VIRTUAL_PATH"
|
||||||
|
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||||
|
set -e _OLD_VIRTUAL_PATH
|
||||||
|
end
|
||||||
|
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||||
|
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||||
|
functions -e fish_prompt
|
||||||
|
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||||
|
functions -c _old_fish_prompt fish_prompt
|
||||||
|
functions -e _old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -e VIRTUAL_ENV
|
||||||
|
if test "$argv[1]" != "nondestructive"
|
||||||
|
# Self destruct!
|
||||||
|
functions -e deactivate
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
set -gx VIRTUAL_ENV "/home/miguel456/PycharmProjects/PythonVPNGateway/venv"
|
||||||
|
|
||||||
|
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||||
|
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
if set -q PYTHONHOME
|
||||||
|
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||||
|
set -e PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||||
|
# fish uses a function instead of an env var to generate the prompt.
|
||||||
|
|
||||||
|
# save the current fish_prompt function as the function _old_fish_prompt
|
||||||
|
functions -c fish_prompt _old_fish_prompt
|
||||||
|
|
||||||
|
# with the original prompt function renamed, we can override with our own.
|
||||||
|
function fish_prompt
|
||||||
|
# Save the return status of the last command
|
||||||
|
set -l old_status $status
|
||||||
|
|
||||||
|
# Prompt override?
|
||||||
|
if test -n "(venv) "
|
||||||
|
printf "%s%s" "(venv) " (set_color normal)
|
||||||
|
else
|
||||||
|
# ...Otherwise, prepend env
|
||||||
|
set -l _checkbase (basename "$VIRTUAL_ENV")
|
||||||
|
if test $_checkbase = "__"
|
||||||
|
# special case for Aspen magic directories
|
||||||
|
# see http://www.zetadev.com/software/aspen/
|
||||||
|
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
|
||||||
|
else
|
||||||
|
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Restore the return status of the previous command.
|
||||||
|
echo "exit $old_status" | .
|
||||||
|
_old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||||
|
end
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/home/miguel456/PycharmProjects/PythonVPNGateway/venv/bin/python
|
||||||
|
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
|
||||||
|
__requires__ = 'setuptools==40.8.0'
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pkg_resources import load_entry_point
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(
|
||||||
|
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
|
||||||
|
)
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/home/miguel456/PycharmProjects/PythonVPNGateway/venv/bin/python
|
||||||
|
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.6'
|
||||||
|
__requires__ = 'setuptools==40.8.0'
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pkg_resources import load_entry_point
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(
|
||||||
|
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.6')()
|
||||||
|
)
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/home/miguel456/PycharmProjects/PythonVPNGateway/venv/bin/python
|
||||||
|
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
|
||||||
|
__requires__ = 'pip==19.0.3'
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pkg_resources import load_entry_point
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(
|
||||||
|
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
|
||||||
|
)
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/home/miguel456/PycharmProjects/PythonVPNGateway/venv/bin/python
|
||||||
|
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
|
||||||
|
__requires__ = 'pip==19.0.3'
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pkg_resources import load_entry_point
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(
|
||||||
|
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
|
||||||
|
)
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/home/miguel456/PycharmProjects/PythonVPNGateway/venv/bin/python
|
||||||
|
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.6'
|
||||||
|
__requires__ = 'pip==19.0.3'
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pkg_resources import load_entry_point
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(
|
||||||
|
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.6')()
|
||||||
|
)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,2 @@
|
||||||
|
./setuptools-40.8.0-py3.6.egg
|
||||||
|
./pip-19.0.3-py3.6.egg
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1,20 @@
|
||||||
|
ISC LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2013-2014, Pexpect development team
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: pexpect
|
||||||
|
Version: 4.7.0
|
||||||
|
Summary: Pexpect allows easy control of interactive console applications.
|
||||||
|
Home-page: https://pexpect.readthedocs.io/
|
||||||
|
Author: Noah Spurrier; Thomas Kluyver; Jeff Quast
|
||||||
|
Author-email: noah@noah.org, thomas@kluyver.me.uk, contact@jeffquast.com
|
||||||
|
License: ISC license
|
||||||
|
Platform: UNIX
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Intended Audience :: System Administrators
|
||||||
|
Classifier: License :: OSI Approved :: ISC License (ISCL)
|
||||||
|
Classifier: Operating System :: POSIX
|
||||||
|
Classifier: Operating System :: MacOS :: MacOS X
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Topic :: Software Development
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Classifier: Topic :: Software Development :: Quality Assurance
|
||||||
|
Classifier: Topic :: Software Development :: Testing
|
||||||
|
Classifier: Topic :: System
|
||||||
|
Classifier: Topic :: System :: Archiving :: Packaging
|
||||||
|
Classifier: Topic :: System :: Installation/Setup
|
||||||
|
Classifier: Topic :: System :: Shells
|
||||||
|
Classifier: Topic :: System :: Software Distribution
|
||||||
|
Classifier: Topic :: Terminals
|
||||||
|
Requires-Dist: ptyprocess (>=0.5)
|
||||||
|
|
||||||
|
|
||||||
|
Pexpect is a pure Python module for spawning child applications; controlling
|
||||||
|
them; and responding to expected patterns in their output. Pexpect works like
|
||||||
|
Don Libes' Expect. Pexpect allows your script to spawn a child application and
|
||||||
|
control it as if a human were typing commands.
|
||||||
|
|
||||||
|
Pexpect can be used for automating interactive applications such as ssh, ftp,
|
||||||
|
passwd, telnet, etc. It can be used to a automate setup scripts for duplicating
|
||||||
|
software package installations on different servers. It can be used for
|
||||||
|
automated software testing. Pexpect is in the spirit of Don Libes' Expect, but
|
||||||
|
Pexpect is pure Python.
|
||||||
|
|
||||||
|
The main features of Pexpect require the pty module in the Python standard
|
||||||
|
library, which is only available on Unix-like systems. Some features—waiting
|
||||||
|
for patterns from file descriptors or subprocesses—are also available on
|
||||||
|
Windows.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
pexpect-4.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
pexpect-4.7.0.dist-info/LICENSE,sha256=Skg64cTcc4psi3P-tJB04YNdoCq1qmhvJnUCmQb6Nk0,987
|
||||||
|
pexpect-4.7.0.dist-info/METADATA,sha256=uFn-yQJFlWw7o9tU0oqvzwMMdjh9ZGrRgvUdNkgcYrQ,2180
|
||||||
|
pexpect-4.7.0.dist-info/RECORD,,
|
||||||
|
pexpect-4.7.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110
|
||||||
|
pexpect-4.7.0.dist-info/top_level.txt,sha256=O-b3UY9VQZkW3yDAeFNatUOKO4GojVWO4TTHoI9-E7k,8
|
||||||
|
pexpect/ANSI.py,sha256=aA-3tdXz_FZ4G7PAqFZi5g1KBGQ6PzJzS0gm3ALZKZw,12177
|
||||||
|
pexpect/FSM.py,sha256=tluiyUGMyIH3q_wLG6Ak1NZVuXUAGNDjq6k6BK1q8RY,13419
|
||||||
|
pexpect/__init__.py,sha256=N7atAMzeTCxNKr2ZyA02q8c9kM1Up-EIWCZIqRE_N-w,3902
|
||||||
|
pexpect/__pycache__/ANSI.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/FSM.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/_async.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/exceptions.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/expect.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/fdpexpect.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/popen_spawn.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/pty_spawn.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/pxssh.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/replwrap.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/run.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/screen.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/spawnbase.cpython-36.pyc,,
|
||||||
|
pexpect/__pycache__/utils.cpython-36.pyc,,
|
||||||
|
pexpect/_async.py,sha256=NG99qvu4mbu5LBYoxvSm4T67CJxUSc_JwuVdzhlWZDc,3439
|
||||||
|
pexpect/bashrc.sh,sha256=CHK8qDg_HtDVdfyDULOV8MZDRDr4pOaIbo31XV58nQs,380
|
||||||
|
pexpect/exceptions.py,sha256=A9C1PWbBc2j9AKvnv7UkPCawhFTEGYmeULW0vwbMvXQ,1068
|
||||||
|
pexpect/expect.py,sha256=qbwIYKkbB2HZYpPbNjDl7oUqyCifDIRi9qjcaaKtghM,11013
|
||||||
|
pexpect/fdpexpect.py,sha256=ugTrwveFi-zfl_nOPjbRyLUER1Wmhu8YxczCWtZgZWc,5828
|
||||||
|
pexpect/popen_spawn.py,sha256=hVHOqr22jD2Pr-yVgsfwgqGAtULLi6kJLKQRrTBPvEg,6161
|
||||||
|
pexpect/pty_spawn.py,sha256=SIMR7qoIw4c5L-kUZsj6Dz1L0aP1bZJ8SOJZqxxEMRs,37057
|
||||||
|
pexpect/pxssh.py,sha256=bZHwFDOn1gC8U_Sl07eFFRlYfCjGCwEoC9WaZCHQo5Y,24279
|
||||||
|
pexpect/replwrap.py,sha256=Raq9XgYfIlF-rH_CALgFbzK1H_A4o0NqmK9q45anmVA,5633
|
||||||
|
pexpect/run.py,sha256=0QkC-tYvIL7fK_UBMc1THP5IzqUub5ogb68zrF9ZKfc,6632
|
||||||
|
pexpect/screen.py,sha256=UhWy544cw9oYiEdmmuSdHgNtUtPEQy-sFuX7FYZpYJU,13716
|
||||||
|
pexpect/spawnbase.py,sha256=v6T_teBR4UifDLESaMQPNvs5PAqvT8Jhtv-dYWJ3PBo,21067
|
||||||
|
pexpect/utils.py,sha256=1jIhzU7eBvY3pbW3LZoJhCOU2KWqgty5HgQ6VBYIp5U,6019
|
|
@ -0,0 +1,6 @@
|
||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.33.1)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py2-none-any
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
pexpect
|
|
@ -0,0 +1,351 @@
|
||||||
|
'''This implements an ANSI (VT100) terminal emulator as a subclass of screen.
|
||||||
|
|
||||||
|
PEXPECT LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
|
||||||
|
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
|
||||||
|
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# references:
|
||||||
|
# http://en.wikipedia.org/wiki/ANSI_escape_code
|
||||||
|
# http://www.retards.org/terminals/vt102.html
|
||||||
|
# http://vt100.net/docs/vt102-ug/contents.html
|
||||||
|
# http://vt100.net/docs/vt220-rm/
|
||||||
|
# http://www.termsys.demon.co.uk/vtansi.htm
|
||||||
|
|
||||||
|
from . import screen
|
||||||
|
from . import FSM
|
||||||
|
import string
|
||||||
|
|
||||||
|
#
|
||||||
|
# The 'Do.*' functions are helper functions for the ANSI class.
|
||||||
|
#
|
||||||
|
def DoEmit (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.write_ch(fsm.input_symbol)
|
||||||
|
|
||||||
|
def DoStartNumber (fsm):
|
||||||
|
|
||||||
|
fsm.memory.append (fsm.input_symbol)
|
||||||
|
|
||||||
|
def DoBuildNumber (fsm):
|
||||||
|
|
||||||
|
ns = fsm.memory.pop()
|
||||||
|
ns = ns + fsm.input_symbol
|
||||||
|
fsm.memory.append (ns)
|
||||||
|
|
||||||
|
def DoBackOne (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_back ()
|
||||||
|
|
||||||
|
def DoBack (fsm):
|
||||||
|
|
||||||
|
count = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_back (count)
|
||||||
|
|
||||||
|
def DoDownOne (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_down ()
|
||||||
|
|
||||||
|
def DoDown (fsm):
|
||||||
|
|
||||||
|
count = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_down (count)
|
||||||
|
|
||||||
|
def DoForwardOne (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_forward ()
|
||||||
|
|
||||||
|
def DoForward (fsm):
|
||||||
|
|
||||||
|
count = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_forward (count)
|
||||||
|
|
||||||
|
def DoUpReverse (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_up_reverse()
|
||||||
|
|
||||||
|
def DoUpOne (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_up ()
|
||||||
|
|
||||||
|
def DoUp (fsm):
|
||||||
|
|
||||||
|
count = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_up (count)
|
||||||
|
|
||||||
|
def DoHome (fsm):
|
||||||
|
|
||||||
|
c = int(fsm.memory.pop())
|
||||||
|
r = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_home (r,c)
|
||||||
|
|
||||||
|
def DoHomeOrigin (fsm):
|
||||||
|
|
||||||
|
c = 1
|
||||||
|
r = 1
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_home (r,c)
|
||||||
|
|
||||||
|
def DoEraseDown (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.erase_down()
|
||||||
|
|
||||||
|
def DoErase (fsm):
|
||||||
|
|
||||||
|
arg = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
if arg == 0:
|
||||||
|
screen.erase_down()
|
||||||
|
elif arg == 1:
|
||||||
|
screen.erase_up()
|
||||||
|
elif arg == 2:
|
||||||
|
screen.erase_screen()
|
||||||
|
|
||||||
|
def DoEraseEndOfLine (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.erase_end_of_line()
|
||||||
|
|
||||||
|
def DoEraseLine (fsm):
|
||||||
|
|
||||||
|
arg = int(fsm.memory.pop())
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
if arg == 0:
|
||||||
|
screen.erase_end_of_line()
|
||||||
|
elif arg == 1:
|
||||||
|
screen.erase_start_of_line()
|
||||||
|
elif arg == 2:
|
||||||
|
screen.erase_line()
|
||||||
|
|
||||||
|
def DoEnableScroll (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.scroll_screen()
|
||||||
|
|
||||||
|
def DoCursorSave (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_save_attrs()
|
||||||
|
|
||||||
|
def DoCursorRestore (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
screen.cursor_restore_attrs()
|
||||||
|
|
||||||
|
def DoScrollRegion (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
r2 = int(fsm.memory.pop())
|
||||||
|
r1 = int(fsm.memory.pop())
|
||||||
|
screen.scroll_screen_rows (r1,r2)
|
||||||
|
|
||||||
|
def DoMode (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
mode = fsm.memory.pop() # Should be 4
|
||||||
|
# screen.setReplaceMode ()
|
||||||
|
|
||||||
|
def DoLog (fsm):
|
||||||
|
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
fsm.memory = [screen]
|
||||||
|
fout = open ('log', 'a')
|
||||||
|
fout.write (fsm.input_symbol + ',' + fsm.current_state + '\n')
|
||||||
|
fout.close()
|
||||||
|
|
||||||
|
class term (screen.screen):
|
||||||
|
|
||||||
|
'''This class is an abstract, generic terminal.
|
||||||
|
This does nothing. This is a placeholder that
|
||||||
|
provides a common base class for other terminals
|
||||||
|
such as an ANSI terminal. '''
|
||||||
|
|
||||||
|
def __init__ (self, r=24, c=80, *args, **kwargs):
|
||||||
|
|
||||||
|
screen.screen.__init__(self, r,c,*args,**kwargs)
|
||||||
|
|
||||||
|
class ANSI (term):
|
||||||
|
'''This class implements an ANSI (VT100) terminal.
|
||||||
|
It is a stream filter that recognizes ANSI terminal
|
||||||
|
escape sequences and maintains the state of a screen object. '''
|
||||||
|
|
||||||
|
def __init__ (self, r=24,c=80,*args,**kwargs):
|
||||||
|
|
||||||
|
term.__init__(self,r,c,*args,**kwargs)
|
||||||
|
|
||||||
|
#self.screen = screen (24,80)
|
||||||
|
self.state = FSM.FSM ('INIT',[self])
|
||||||
|
self.state.set_default_transition (DoLog, 'INIT')
|
||||||
|
self.state.add_transition_any ('INIT', DoEmit, 'INIT')
|
||||||
|
self.state.add_transition ('\x1b', 'INIT', None, 'ESC')
|
||||||
|
self.state.add_transition_any ('ESC', DoLog, 'INIT')
|
||||||
|
self.state.add_transition ('(', 'ESC', None, 'G0SCS')
|
||||||
|
self.state.add_transition (')', 'ESC', None, 'G1SCS')
|
||||||
|
self.state.add_transition_list ('AB012', 'G0SCS', None, 'INIT')
|
||||||
|
self.state.add_transition_list ('AB012', 'G1SCS', None, 'INIT')
|
||||||
|
self.state.add_transition ('7', 'ESC', DoCursorSave, 'INIT')
|
||||||
|
self.state.add_transition ('8', 'ESC', DoCursorRestore, 'INIT')
|
||||||
|
self.state.add_transition ('M', 'ESC', DoUpReverse, 'INIT')
|
||||||
|
self.state.add_transition ('>', 'ESC', DoUpReverse, 'INIT')
|
||||||
|
self.state.add_transition ('<', 'ESC', DoUpReverse, 'INIT')
|
||||||
|
self.state.add_transition ('=', 'ESC', None, 'INIT') # Selects application keypad.
|
||||||
|
self.state.add_transition ('#', 'ESC', None, 'GRAPHICS_POUND')
|
||||||
|
self.state.add_transition_any ('GRAPHICS_POUND', None, 'INIT')
|
||||||
|
self.state.add_transition ('[', 'ESC', None, 'ELB')
|
||||||
|
# ELB means Escape Left Bracket. That is ^[[
|
||||||
|
self.state.add_transition ('H', 'ELB', DoHomeOrigin, 'INIT')
|
||||||
|
self.state.add_transition ('D', 'ELB', DoBackOne, 'INIT')
|
||||||
|
self.state.add_transition ('B', 'ELB', DoDownOne, 'INIT')
|
||||||
|
self.state.add_transition ('C', 'ELB', DoForwardOne, 'INIT')
|
||||||
|
self.state.add_transition ('A', 'ELB', DoUpOne, 'INIT')
|
||||||
|
self.state.add_transition ('J', 'ELB', DoEraseDown, 'INIT')
|
||||||
|
self.state.add_transition ('K', 'ELB', DoEraseEndOfLine, 'INIT')
|
||||||
|
self.state.add_transition ('r', 'ELB', DoEnableScroll, 'INIT')
|
||||||
|
self.state.add_transition ('m', 'ELB', self.do_sgr, 'INIT')
|
||||||
|
self.state.add_transition ('?', 'ELB', None, 'MODECRAP')
|
||||||
|
self.state.add_transition_list (string.digits, 'ELB', DoStartNumber, 'NUMBER_1')
|
||||||
|
self.state.add_transition_list (string.digits, 'NUMBER_1', DoBuildNumber, 'NUMBER_1')
|
||||||
|
self.state.add_transition ('D', 'NUMBER_1', DoBack, 'INIT')
|
||||||
|
self.state.add_transition ('B', 'NUMBER_1', DoDown, 'INIT')
|
||||||
|
self.state.add_transition ('C', 'NUMBER_1', DoForward, 'INIT')
|
||||||
|
self.state.add_transition ('A', 'NUMBER_1', DoUp, 'INIT')
|
||||||
|
self.state.add_transition ('J', 'NUMBER_1', DoErase, 'INIT')
|
||||||
|
self.state.add_transition ('K', 'NUMBER_1', DoEraseLine, 'INIT')
|
||||||
|
self.state.add_transition ('l', 'NUMBER_1', DoMode, 'INIT')
|
||||||
|
### It gets worse... the 'm' code can have infinite number of
|
||||||
|
### number;number;number before it. I've never seen more than two,
|
||||||
|
### but the specs say it's allowed. crap!
|
||||||
|
self.state.add_transition ('m', 'NUMBER_1', self.do_sgr, 'INIT')
|
||||||
|
### LED control. Same implementation problem as 'm' code.
|
||||||
|
self.state.add_transition ('q', 'NUMBER_1', self.do_decsca, 'INIT')
|
||||||
|
|
||||||
|
# \E[?47h switch to alternate screen
|
||||||
|
# \E[?47l restores to normal screen from alternate screen.
|
||||||
|
self.state.add_transition_list (string.digits, 'MODECRAP', DoStartNumber, 'MODECRAP_NUM')
|
||||||
|
self.state.add_transition_list (string.digits, 'MODECRAP_NUM', DoBuildNumber, 'MODECRAP_NUM')
|
||||||
|
self.state.add_transition ('l', 'MODECRAP_NUM', self.do_modecrap, 'INIT')
|
||||||
|
self.state.add_transition ('h', 'MODECRAP_NUM', self.do_modecrap, 'INIT')
|
||||||
|
|
||||||
|
#RM Reset Mode Esc [ Ps l none
|
||||||
|
self.state.add_transition (';', 'NUMBER_1', None, 'SEMICOLON')
|
||||||
|
self.state.add_transition_any ('SEMICOLON', DoLog, 'INIT')
|
||||||
|
self.state.add_transition_list (string.digits, 'SEMICOLON', DoStartNumber, 'NUMBER_2')
|
||||||
|
self.state.add_transition_list (string.digits, 'NUMBER_2', DoBuildNumber, 'NUMBER_2')
|
||||||
|
self.state.add_transition_any ('NUMBER_2', DoLog, 'INIT')
|
||||||
|
self.state.add_transition ('H', 'NUMBER_2', DoHome, 'INIT')
|
||||||
|
self.state.add_transition ('f', 'NUMBER_2', DoHome, 'INIT')
|
||||||
|
self.state.add_transition ('r', 'NUMBER_2', DoScrollRegion, 'INIT')
|
||||||
|
### It gets worse... the 'm' code can have infinite number of
|
||||||
|
### number;number;number before it. I've never seen more than two,
|
||||||
|
### but the specs say it's allowed. crap!
|
||||||
|
self.state.add_transition ('m', 'NUMBER_2', self.do_sgr, 'INIT')
|
||||||
|
### LED control. Same problem as 'm' code.
|
||||||
|
self.state.add_transition ('q', 'NUMBER_2', self.do_decsca, 'INIT')
|
||||||
|
self.state.add_transition (';', 'NUMBER_2', None, 'SEMICOLON_X')
|
||||||
|
|
||||||
|
# Create a state for 'q' and 'm' which allows an infinite number of ignored numbers
|
||||||
|
self.state.add_transition_any ('SEMICOLON_X', DoLog, 'INIT')
|
||||||
|
self.state.add_transition_list (string.digits, 'SEMICOLON_X', DoStartNumber, 'NUMBER_X')
|
||||||
|
self.state.add_transition_list (string.digits, 'NUMBER_X', DoBuildNumber, 'NUMBER_X')
|
||||||
|
self.state.add_transition_any ('NUMBER_X', DoLog, 'INIT')
|
||||||
|
self.state.add_transition ('m', 'NUMBER_X', self.do_sgr, 'INIT')
|
||||||
|
self.state.add_transition ('q', 'NUMBER_X', self.do_decsca, 'INIT')
|
||||||
|
self.state.add_transition (';', 'NUMBER_X', None, 'SEMICOLON_X')
|
||||||
|
|
||||||
|
def process (self, c):
|
||||||
|
"""Process a single character. Called by :meth:`write`."""
|
||||||
|
if isinstance(c, bytes):
|
||||||
|
c = self._decode(c)
|
||||||
|
self.state.process(c)
|
||||||
|
|
||||||
|
def process_list (self, l):
|
||||||
|
|
||||||
|
self.write(l)
|
||||||
|
|
||||||
|
def write (self, s):
|
||||||
|
"""Process text, writing it to the virtual screen while handling
|
||||||
|
ANSI escape codes.
|
||||||
|
"""
|
||||||
|
if isinstance(s, bytes):
|
||||||
|
s = self._decode(s)
|
||||||
|
for c in s:
|
||||||
|
self.process(c)
|
||||||
|
|
||||||
|
def flush (self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def write_ch (self, ch):
|
||||||
|
'''This puts a character at the current cursor position. The cursor
|
||||||
|
position is moved forward with wrap-around, but no scrolling is done if
|
||||||
|
the cursor hits the lower-right corner of the screen. '''
|
||||||
|
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)
|
||||||
|
|
||||||
|
#\r and \n both produce a call to cr() and lf(), respectively.
|
||||||
|
ch = ch[0]
|
||||||
|
|
||||||
|
if ch == u'\r':
|
||||||
|
self.cr()
|
||||||
|
return
|
||||||
|
if ch == u'\n':
|
||||||
|
self.crlf()
|
||||||
|
return
|
||||||
|
if ch == chr(screen.BS):
|
||||||
|
self.cursor_back()
|
||||||
|
return
|
||||||
|
self.put_abs(self.cur_r, self.cur_c, ch)
|
||||||
|
old_r = self.cur_r
|
||||||
|
old_c = self.cur_c
|
||||||
|
self.cursor_forward()
|
||||||
|
if old_c == self.cur_c:
|
||||||
|
self.cursor_down()
|
||||||
|
if old_r != self.cur_r:
|
||||||
|
self.cursor_home (self.cur_r, 1)
|
||||||
|
else:
|
||||||
|
self.scroll_up ()
|
||||||
|
self.cursor_home (self.cur_r, 1)
|
||||||
|
self.erase_line()
|
||||||
|
|
||||||
|
def do_sgr (self, fsm):
|
||||||
|
'''Select Graphic Rendition, e.g. color. '''
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
fsm.memory = [screen]
|
||||||
|
|
||||||
|
def do_decsca (self, fsm):
|
||||||
|
'''Select character protection attribute. '''
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
fsm.memory = [screen]
|
||||||
|
|
||||||
|
def do_modecrap (self, fsm):
|
||||||
|
'''Handler for \x1b[?<number>h and \x1b[?<number>l. If anyone
|
||||||
|
wanted to actually use these, they'd need to add more states to the
|
||||||
|
FSM rather than just improve or override this method. '''
|
||||||
|
screen = fsm.memory[0]
|
||||||
|
fsm.memory = [screen]
|
|
@ -0,0 +1,334 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
'''This module implements a Finite State Machine (FSM). In addition to state
|
||||||
|
this FSM also maintains a user defined "memory". So this FSM can be used as a
|
||||||
|
Push-down Automata (PDA) since a PDA is a FSM + memory.
|
||||||
|
|
||||||
|
The following describes how the FSM works, but you will probably also need to
|
||||||
|
see the example function to understand how the FSM is used in practice.
|
||||||
|
|
||||||
|
You define an FSM by building tables of transitions. For a given input symbol
|
||||||
|
the process() method uses these tables to decide what action to call and what
|
||||||
|
the next state will be. The FSM has a table of transitions that associate:
|
||||||
|
|
||||||
|
(input_symbol, current_state) --> (action, next_state)
|
||||||
|
|
||||||
|
Where "action" is a function you define. The symbols and states can be any
|
||||||
|
objects. You use the add_transition() and add_transition_list() methods to add
|
||||||
|
to the transition table. The FSM also has a table of transitions that
|
||||||
|
associate:
|
||||||
|
|
||||||
|
(current_state) --> (action, next_state)
|
||||||
|
|
||||||
|
You use the add_transition_any() method to add to this transition table. The
|
||||||
|
FSM also has one default transition that is not associated with any specific
|
||||||
|
input_symbol or state. You use the set_default_transition() method to set the
|
||||||
|
default transition.
|
||||||
|
|
||||||
|
When an action function is called it is passed a reference to the FSM. The
|
||||||
|
action function may then access attributes of the FSM such as input_symbol,
|
||||||
|
current_state, or "memory". The "memory" attribute can be any object that you
|
||||||
|
want to pass along to the action functions. It is not used by the FSM itself.
|
||||||
|
For parsing you would typically pass a list to be used as a stack.
|
||||||
|
|
||||||
|
The processing sequence is as follows. The process() method is given an
|
||||||
|
input_symbol to process. The FSM will search the table of transitions that
|
||||||
|
associate:
|
||||||
|
|
||||||
|
(input_symbol, current_state) --> (action, next_state)
|
||||||
|
|
||||||
|
If the pair (input_symbol, current_state) is found then process() will call the
|
||||||
|
associated action function and then set the current state to the next_state.
|
||||||
|
|
||||||
|
If the FSM cannot find a match for (input_symbol, current_state) it will then
|
||||||
|
search the table of transitions that associate:
|
||||||
|
|
||||||
|
(current_state) --> (action, next_state)
|
||||||
|
|
||||||
|
If the current_state is found then the process() method will call the
|
||||||
|
associated action function and then set the current state to the next_state.
|
||||||
|
Notice that this table lacks an input_symbol. It lets you define transitions
|
||||||
|
for a current_state and ANY input_symbol. Hence, it is called the "any" table.
|
||||||
|
Remember, it is always checked after first searching the table for a specific
|
||||||
|
(input_symbol, current_state).
|
||||||
|
|
||||||
|
For the case where the FSM did not match either of the previous two cases the
|
||||||
|
FSM will try to use the default transition. If the default transition is
|
||||||
|
defined then the process() method will call the associated action function and
|
||||||
|
then set the current state to the next_state. This lets you define a default
|
||||||
|
transition as a catch-all case. You can think of it as an exception handler.
|
||||||
|
There can be only one default transition.
|
||||||
|
|
||||||
|
Finally, if none of the previous cases are defined for an input_symbol and
|
||||||
|
current_state then the FSM will raise an exception. This may be desirable, but
|
||||||
|
you can always prevent this just by defining a default transition.
|
||||||
|
|
||||||
|
Noah Spurrier 20020822
|
||||||
|
|
||||||
|
PEXPECT LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
|
||||||
|
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
|
||||||
|
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
class ExceptionFSM(Exception):
|
||||||
|
|
||||||
|
'''This is the FSM Exception class.'''
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'ExceptionFSM: ' + str(self.value)
|
||||||
|
|
||||||
|
class FSM:
|
||||||
|
|
||||||
|
'''This is a Finite State Machine (FSM).
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, initial_state, memory=None):
|
||||||
|
|
||||||
|
'''This creates the FSM. You set the initial state here. The "memory"
|
||||||
|
attribute is any object that you want to pass along to the action
|
||||||
|
functions. It is not used by the FSM. For parsing you would typically
|
||||||
|
pass a list to be used as a stack. '''
|
||||||
|
|
||||||
|
# Map (input_symbol, current_state) --> (action, next_state).
|
||||||
|
self.state_transitions = {}
|
||||||
|
# Map (current_state) --> (action, next_state).
|
||||||
|
self.state_transitions_any = {}
|
||||||
|
self.default_transition = None
|
||||||
|
|
||||||
|
self.input_symbol = None
|
||||||
|
self.initial_state = initial_state
|
||||||
|
self.current_state = self.initial_state
|
||||||
|
self.next_state = None
|
||||||
|
self.action = None
|
||||||
|
self.memory = memory
|
||||||
|
|
||||||
|
def reset (self):
|
||||||
|
|
||||||
|
'''This sets the current_state to the initial_state and sets
|
||||||
|
input_symbol to None. The initial state was set by the constructor
|
||||||
|
__init__(). '''
|
||||||
|
|
||||||
|
self.current_state = self.initial_state
|
||||||
|
self.input_symbol = None
|
||||||
|
|
||||||
|
def add_transition (self, input_symbol, state, action=None, next_state=None):
|
||||||
|
|
||||||
|
'''This adds a transition that associates:
|
||||||
|
|
||||||
|
(input_symbol, current_state) --> (action, next_state)
|
||||||
|
|
||||||
|
The action may be set to None in which case the process() method will
|
||||||
|
ignore the action and only set the next_state. The next_state may be
|
||||||
|
set to None in which case the current state will be unchanged.
|
||||||
|
|
||||||
|
You can also set transitions for a list of symbols by using
|
||||||
|
add_transition_list(). '''
|
||||||
|
|
||||||
|
if next_state is None:
|
||||||
|
next_state = state
|
||||||
|
self.state_transitions[(input_symbol, state)] = (action, next_state)
|
||||||
|
|
||||||
|
def add_transition_list (self, list_input_symbols, state, action=None, next_state=None):
|
||||||
|
|
||||||
|
'''This adds the same transition for a list of input symbols.
|
||||||
|
You can pass a list or a string. Note that it is handy to use
|
||||||
|
string.digits, string.whitespace, string.letters, etc. to add
|
||||||
|
transitions that match character classes.
|
||||||
|
|
||||||
|
The action may be set to None in which case the process() method will
|
||||||
|
ignore the action and only set the next_state. The next_state may be
|
||||||
|
set to None in which case the current state will be unchanged. '''
|
||||||
|
|
||||||
|
if next_state is None:
|
||||||
|
next_state = state
|
||||||
|
for input_symbol in list_input_symbols:
|
||||||
|
self.add_transition (input_symbol, state, action, next_state)
|
||||||
|
|
||||||
|
def add_transition_any (self, state, action=None, next_state=None):
|
||||||
|
|
||||||
|
'''This adds a transition that associates:
|
||||||
|
|
||||||
|
(current_state) --> (action, next_state)
|
||||||
|
|
||||||
|
That is, any input symbol will match the current state.
|
||||||
|
The process() method checks the "any" state associations after it first
|
||||||
|
checks for an exact match of (input_symbol, current_state).
|
||||||
|
|
||||||
|
The action may be set to None in which case the process() method will
|
||||||
|
ignore the action and only set the next_state. The next_state may be
|
||||||
|
set to None in which case the current state will be unchanged. '''
|
||||||
|
|
||||||
|
if next_state is None:
|
||||||
|
next_state = state
|
||||||
|
self.state_transitions_any [state] = (action, next_state)
|
||||||
|
|
||||||
|
def set_default_transition (self, action, next_state):
|
||||||
|
|
||||||
|
'''This sets the default transition. This defines an action and
|
||||||
|
next_state if the FSM cannot find the input symbol and the current
|
||||||
|
state in the transition list and if the FSM cannot find the
|
||||||
|
current_state in the transition_any list. This is useful as a final
|
||||||
|
fall-through state for catching errors and undefined states.
|
||||||
|
|
||||||
|
The default transition can be removed by setting the attribute
|
||||||
|
default_transition to None. '''
|
||||||
|
|
||||||
|
self.default_transition = (action, next_state)
|
||||||
|
|
||||||
|
def get_transition (self, input_symbol, state):
|
||||||
|
|
||||||
|
'''This returns (action, next state) given an input_symbol and state.
|
||||||
|
This does not modify the FSM state, so calling this method has no side
|
||||||
|
effects. Normally you do not call this method directly. It is called by
|
||||||
|
process().
|
||||||
|
|
||||||
|
The sequence of steps to check for a defined transition goes from the
|
||||||
|
most specific to the least specific.
|
||||||
|
|
||||||
|
1. Check state_transitions[] that match exactly the tuple,
|
||||||
|
(input_symbol, state)
|
||||||
|
|
||||||
|
2. Check state_transitions_any[] that match (state)
|
||||||
|
In other words, match a specific state and ANY input_symbol.
|
||||||
|
|
||||||
|
3. Check if the default_transition is defined.
|
||||||
|
This catches any input_symbol and any state.
|
||||||
|
This is a handler for errors, undefined states, or defaults.
|
||||||
|
|
||||||
|
4. No transition was defined. If we get here then raise an exception.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if (input_symbol, state) in self.state_transitions:
|
||||||
|
return self.state_transitions[(input_symbol, state)]
|
||||||
|
elif state in self.state_transitions_any:
|
||||||
|
return self.state_transitions_any[state]
|
||||||
|
elif self.default_transition is not None:
|
||||||
|
return self.default_transition
|
||||||
|
else:
|
||||||
|
raise ExceptionFSM ('Transition is undefined: (%s, %s).' %
|
||||||
|
(str(input_symbol), str(state)) )
|
||||||
|
|
||||||
|
def process (self, input_symbol):
|
||||||
|
|
||||||
|
'''This is the main method that you call to process input. This may
|
||||||
|
cause the FSM to change state and call an action. This method calls
|
||||||
|
get_transition() to find the action and next_state associated with the
|
||||||
|
input_symbol and current_state. If the action is None then the action
|
||||||
|
is not called and only the current state is changed. This method
|
||||||
|
processes one complete input symbol. You can process a list of symbols
|
||||||
|
(or a string) by calling process_list(). '''
|
||||||
|
|
||||||
|
self.input_symbol = input_symbol
|
||||||
|
(self.action, self.next_state) = self.get_transition (self.input_symbol, self.current_state)
|
||||||
|
if self.action is not None:
|
||||||
|
self.action (self)
|
||||||
|
self.current_state = self.next_state
|
||||||
|
self.next_state = None
|
||||||
|
|
||||||
|
def process_list (self, input_symbols):
|
||||||
|
|
||||||
|
'''This takes a list and sends each element to process(). The list may
|
||||||
|
be a string or any iterable object. '''
|
||||||
|
|
||||||
|
for s in input_symbols:
|
||||||
|
self.process (s)
|
||||||
|
|
||||||
|
##############################################################################
|
||||||
|
# The following is an example that demonstrates the use of the FSM class to
|
||||||
|
# process an RPN expression. Run this module from the command line. You will
|
||||||
|
# get a prompt > for input. Enter an RPN Expression. Numbers may be integers.
|
||||||
|
# Operators are * / + - Use the = sign to evaluate and print the expression.
|
||||||
|
# For example:
|
||||||
|
#
|
||||||
|
# 167 3 2 2 * * * 1 - =
|
||||||
|
#
|
||||||
|
# will print:
|
||||||
|
#
|
||||||
|
# 2003
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import string
|
||||||
|
|
||||||
|
PY3 = (sys.version_info[0] >= 3)
|
||||||
|
|
||||||
|
#
|
||||||
|
# These define the actions.
|
||||||
|
# Note that "memory" is a list being used as a stack.
|
||||||
|
#
|
||||||
|
|
||||||
|
def BeginBuildNumber (fsm):
|
||||||
|
fsm.memory.append (fsm.input_symbol)
|
||||||
|
|
||||||
|
def BuildNumber (fsm):
|
||||||
|
s = fsm.memory.pop ()
|
||||||
|
s = s + fsm.input_symbol
|
||||||
|
fsm.memory.append (s)
|
||||||
|
|
||||||
|
def EndBuildNumber (fsm):
|
||||||
|
s = fsm.memory.pop ()
|
||||||
|
fsm.memory.append (int(s))
|
||||||
|
|
||||||
|
def DoOperator (fsm):
|
||||||
|
ar = fsm.memory.pop()
|
||||||
|
al = fsm.memory.pop()
|
||||||
|
if fsm.input_symbol == '+':
|
||||||
|
fsm.memory.append (al + ar)
|
||||||
|
elif fsm.input_symbol == '-':
|
||||||
|
fsm.memory.append (al - ar)
|
||||||
|
elif fsm.input_symbol == '*':
|
||||||
|
fsm.memory.append (al * ar)
|
||||||
|
elif fsm.input_symbol == '/':
|
||||||
|
fsm.memory.append (al / ar)
|
||||||
|
|
||||||
|
def DoEqual (fsm):
|
||||||
|
print(str(fsm.memory.pop()))
|
||||||
|
|
||||||
|
def Error (fsm):
|
||||||
|
print('That does not compute.')
|
||||||
|
print(str(fsm.input_symbol))
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
'''This is where the example starts and the FSM state transitions are
|
||||||
|
defined. Note that states are strings (such as 'INIT'). This is not
|
||||||
|
necessary, but it makes the example easier to read. '''
|
||||||
|
|
||||||
|
f = FSM ('INIT', [])
|
||||||
|
f.set_default_transition (Error, 'INIT')
|
||||||
|
f.add_transition_any ('INIT', None, 'INIT')
|
||||||
|
f.add_transition ('=', 'INIT', DoEqual, 'INIT')
|
||||||
|
f.add_transition_list (string.digits, 'INIT', BeginBuildNumber, 'BUILDING_NUMBER')
|
||||||
|
f.add_transition_list (string.digits, 'BUILDING_NUMBER', BuildNumber, 'BUILDING_NUMBER')
|
||||||
|
f.add_transition_list (string.whitespace, 'BUILDING_NUMBER', EndBuildNumber, 'INIT')
|
||||||
|
f.add_transition_list ('+-*/', 'INIT', DoOperator, 'INIT')
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('Enter an RPN Expression.')
|
||||||
|
print('Numbers may be integers. Operators are * / + -')
|
||||||
|
print('Use the = sign to evaluate and print the expression.')
|
||||||
|
print('For example: ')
|
||||||
|
print(' 167 3 2 2 * * * 1 - =')
|
||||||
|
inputstr = (input if PY3 else raw_input)('> ') # analysis:ignore
|
||||||
|
f.process_list(inputstr)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,85 @@
|
||||||
|
'''Pexpect is a Python module for spawning child applications and controlling
|
||||||
|
them automatically. Pexpect can be used for automating interactive applications
|
||||||
|
such as ssh, ftp, passwd, telnet, etc. It can be used to a automate setup
|
||||||
|
scripts for duplicating software package installations on different servers. It
|
||||||
|
can be used for automated software testing. Pexpect is in the spirit of Don
|
||||||
|
Libes' Expect, but Pexpect is pure Python. Other Expect-like modules for Python
|
||||||
|
require TCL and Expect or require C extensions to be compiled. Pexpect does not
|
||||||
|
use C, Expect, or TCL extensions. It should work on any platform that supports
|
||||||
|
the standard Python pty module. The Pexpect interface focuses on ease of use so
|
||||||
|
that simple tasks are easy.
|
||||||
|
|
||||||
|
There are two main interfaces to the Pexpect system; these are the function,
|
||||||
|
run() and the class, spawn. The spawn class is more powerful. The run()
|
||||||
|
function is simpler than spawn, and is good for quickly calling program. When
|
||||||
|
you call the run() function it executes a given program and then returns the
|
||||||
|
output. This is a handy replacement for os.system().
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
|
pexpect.run('ls -la')
|
||||||
|
|
||||||
|
The spawn class is the more powerful interface to the Pexpect system. You can
|
||||||
|
use this to spawn a child program then interact with it by sending input and
|
||||||
|
expecting responses (waiting for patterns in the child's output).
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
|
child = pexpect.spawn('scp foo user@example.com:.')
|
||||||
|
child.expect('Password:')
|
||||||
|
child.sendline(mypassword)
|
||||||
|
|
||||||
|
This works even for commands that ask for passwords or other input outside of
|
||||||
|
the normal stdio streams. For example, ssh reads input directly from the TTY
|
||||||
|
device which bypasses stdin.
|
||||||
|
|
||||||
|
Credits: Noah Spurrier, Richard Holden, Marco Molteni, Kimberley Burchett,
|
||||||
|
Robert Stone, Hartmut Goebel, Chad Schroeder, Erick Tryzelaar, Dave Kirby, Ids
|
||||||
|
vander Molen, George Todd, Noel Taylor, Nicolas D. Cesar, Alexander Gattin,
|
||||||
|
Jacques-Etienne Baudoux, Geoffrey Marshall, Francisco Lourenco, Glen Mabey,
|
||||||
|
Karthik Gurusamy, Fernando Perez, Corey Minyard, Jon Cohen, Guillaume
|
||||||
|
Chazarain, Andrew Ryan, Nick Craig-Wood, Andrew Stone, Jorgen Grahn, John
|
||||||
|
Spiegel, Jan Grant, and Shane Kerr. Let me know if I forgot anyone.
|
||||||
|
|
||||||
|
Pexpect is free, open source, and all that good stuff.
|
||||||
|
http://pexpect.sourceforge.net/
|
||||||
|
|
||||||
|
PEXPECT LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
|
||||||
|
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
|
||||||
|
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
import sys
|
||||||
|
PY3 = (sys.version_info[0] >= 3)
|
||||||
|
|
||||||
|
from .exceptions import ExceptionPexpect, EOF, TIMEOUT
|
||||||
|
from .utils import split_command_line, which, is_executable_file
|
||||||
|
from .expect import Expecter, searcher_re, searcher_string
|
||||||
|
|
||||||
|
if sys.platform != 'win32':
|
||||||
|
# On Unix, these are available at the top level for backwards compatibility
|
||||||
|
from .pty_spawn import spawn, spawnu
|
||||||
|
from .run import run, runu
|
||||||
|
|
||||||
|
__version__ = '4.7.0'
|
||||||
|
__revision__ = ''
|
||||||
|
__all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'spawnu', 'run', 'runu',
|
||||||
|
'which', 'split_command_line', '__version__', '__revision__']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# vim: set shiftround expandtab tabstop=4 shiftwidth=4 ft=python autoindent :
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,105 @@
|
||||||
|
import asyncio
|
||||||
|
import errno
|
||||||
|
import signal
|
||||||
|
|
||||||
|
from pexpect import EOF
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def expect_async(expecter, timeout=None):
|
||||||
|
# First process data that was previously read - if it maches, we don't need
|
||||||
|
# async stuff.
|
||||||
|
previously_read = expecter.spawn.buffer
|
||||||
|
expecter.spawn._buffer = expecter.spawn.buffer_type()
|
||||||
|
expecter.spawn._before = expecter.spawn.buffer_type()
|
||||||
|
idx = expecter.new_data(previously_read)
|
||||||
|
if idx is not None:
|
||||||
|
return idx
|
||||||
|
if not expecter.spawn.async_pw_transport:
|
||||||
|
pw = PatternWaiter()
|
||||||
|
pw.set_expecter(expecter)
|
||||||
|
transport, pw = yield from asyncio.get_event_loop()\
|
||||||
|
.connect_read_pipe(lambda: pw, expecter.spawn)
|
||||||
|
expecter.spawn.async_pw_transport = pw, transport
|
||||||
|
else:
|
||||||
|
pw, transport = expecter.spawn.async_pw_transport
|
||||||
|
pw.set_expecter(expecter)
|
||||||
|
transport.resume_reading()
|
||||||
|
try:
|
||||||
|
return (yield from asyncio.wait_for(pw.fut, timeout))
|
||||||
|
except asyncio.TimeoutError as e:
|
||||||
|
transport.pause_reading()
|
||||||
|
return expecter.timeout(e)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def repl_run_command_async(repl, cmdlines, timeout=-1):
|
||||||
|
res = []
|
||||||
|
repl.child.sendline(cmdlines[0])
|
||||||
|
for line in cmdlines[1:]:
|
||||||
|
yield from repl._expect_prompt(timeout=timeout, async_=True)
|
||||||
|
res.append(repl.child.before)
|
||||||
|
repl.child.sendline(line)
|
||||||
|
|
||||||
|
# Command was fully submitted, now wait for the next prompt
|
||||||
|
prompt_idx = yield from repl._expect_prompt(timeout=timeout, async_=True)
|
||||||
|
if prompt_idx == 1:
|
||||||
|
# We got the continuation prompt - command was incomplete
|
||||||
|
repl.child.kill(signal.SIGINT)
|
||||||
|
yield from repl._expect_prompt(timeout=1, async_=True)
|
||||||
|
raise ValueError("Continuation prompt found - input was incomplete:")
|
||||||
|
return u''.join(res + [repl.child.before])
|
||||||
|
|
||||||
|
class PatternWaiter(asyncio.Protocol):
|
||||||
|
transport = None
|
||||||
|
|
||||||
|
def set_expecter(self, expecter):
|
||||||
|
self.expecter = expecter
|
||||||
|
self.fut = asyncio.Future()
|
||||||
|
|
||||||
|
def found(self, result):
|
||||||
|
if not self.fut.done():
|
||||||
|
self.fut.set_result(result)
|
||||||
|
self.transport.pause_reading()
|
||||||
|
|
||||||
|
def error(self, exc):
|
||||||
|
if not self.fut.done():
|
||||||
|
self.fut.set_exception(exc)
|
||||||
|
self.transport.pause_reading()
|
||||||
|
|
||||||
|
def connection_made(self, transport):
|
||||||
|
self.transport = transport
|
||||||
|
|
||||||
|
def data_received(self, data):
|
||||||
|
spawn = self.expecter.spawn
|
||||||
|
s = spawn._decoder.decode(data)
|
||||||
|
spawn._log(s, 'read')
|
||||||
|
|
||||||
|
if self.fut.done():
|
||||||
|
spawn._buffer.write(s)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
index = self.expecter.new_data(s)
|
||||||
|
if index is not None:
|
||||||
|
# Found a match
|
||||||
|
self.found(index)
|
||||||
|
except Exception as e:
|
||||||
|
self.expecter.errored()
|
||||||
|
self.error(e)
|
||||||
|
|
||||||
|
def eof_received(self):
|
||||||
|
# N.B. If this gets called, async will close the pipe (the spawn object)
|
||||||
|
# for us
|
||||||
|
try:
|
||||||
|
self.expecter.spawn.flag_eof = True
|
||||||
|
index = self.expecter.eof()
|
||||||
|
except EOF as e:
|
||||||
|
self.error(e)
|
||||||
|
else:
|
||||||
|
self.found(index)
|
||||||
|
|
||||||
|
def connection_lost(self, exc):
|
||||||
|
if isinstance(exc, OSError) and exc.errno == errno.EIO:
|
||||||
|
# We may get here without eof_received being called, e.g on Linux
|
||||||
|
self.eof_received()
|
||||||
|
elif exc is not None:
|
||||||
|
self.error(exc)
|
|
@ -0,0 +1,16 @@
|
||||||
|
# Different platforms have different names for the systemwide bashrc
|
||||||
|
if [[ -f /etc/bashrc ]]; then
|
||||||
|
source /etc/bashrc
|
||||||
|
fi
|
||||||
|
if [[ -f /etc/bash.bashrc ]]; then
|
||||||
|
source /etc/bash.bashrc
|
||||||
|
fi
|
||||||
|
if [[ -f ~/.bashrc ]]; then
|
||||||
|
source ~/.bashrc
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reset PS1 so pexpect can find it
|
||||||
|
PS1="$"
|
||||||
|
|
||||||
|
# Unset PROMPT_COMMAND, so that it can't change PS1 to something unexpected.
|
||||||
|
unset PROMPT_COMMAND
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""Exception classes used by Pexpect"""
|
||||||
|
|
||||||
|
import traceback
|
||||||
|
import sys
|
||||||
|
|
||||||
|
class ExceptionPexpect(Exception):
|
||||||
|
'''Base class for all exceptions raised by this module.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
super(ExceptionPexpect, self).__init__(value)
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.value)
|
||||||
|
|
||||||
|
def get_trace(self):
|
||||||
|
'''This returns an abbreviated stack trace with lines that only concern
|
||||||
|
the caller. In other words, the stack trace inside the Pexpect module
|
||||||
|
is not included. '''
|
||||||
|
|
||||||
|
tblist = traceback.extract_tb(sys.exc_info()[2])
|
||||||
|
tblist = [item for item in tblist if ('pexpect/__init__' not in item[0])
|
||||||
|
and ('pexpect/expect' not in item[0])]
|
||||||
|
tblist = traceback.format_list(tblist)
|
||||||
|
return ''.join(tblist)
|
||||||
|
|
||||||
|
|
||||||
|
class EOF(ExceptionPexpect):
|
||||||
|
'''Raised when EOF is read from a child.
|
||||||
|
This usually means the child has exited.'''
|
||||||
|
|
||||||
|
|
||||||
|
class TIMEOUT(ExceptionPexpect):
|
||||||
|
'''Raised when a read time exceeds the timeout. '''
|
|
@ -0,0 +1,306 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .exceptions import EOF, TIMEOUT
|
||||||
|
|
||||||
|
class Expecter(object):
|
||||||
|
def __init__(self, spawn, searcher, searchwindowsize=-1):
|
||||||
|
self.spawn = spawn
|
||||||
|
self.searcher = searcher
|
||||||
|
if searchwindowsize == -1:
|
||||||
|
searchwindowsize = spawn.searchwindowsize
|
||||||
|
self.searchwindowsize = searchwindowsize
|
||||||
|
|
||||||
|
def new_data(self, data):
|
||||||
|
spawn = self.spawn
|
||||||
|
searcher = self.searcher
|
||||||
|
|
||||||
|
pos = spawn._buffer.tell()
|
||||||
|
spawn._buffer.write(data)
|
||||||
|
spawn._before.write(data)
|
||||||
|
|
||||||
|
# determine which chunk of data to search; if a windowsize is
|
||||||
|
# specified, this is the *new* data + the preceding <windowsize> bytes
|
||||||
|
if self.searchwindowsize:
|
||||||
|
spawn._buffer.seek(max(0, pos - self.searchwindowsize))
|
||||||
|
window = spawn._buffer.read(self.searchwindowsize + len(data))
|
||||||
|
else:
|
||||||
|
# otherwise, search the whole buffer (really slow for large datasets)
|
||||||
|
window = spawn.buffer
|
||||||
|
index = searcher.search(window, len(data))
|
||||||
|
if index >= 0:
|
||||||
|
spawn._buffer = spawn.buffer_type()
|
||||||
|
spawn._buffer.write(window[searcher.end:])
|
||||||
|
spawn.before = spawn._before.getvalue()[0:-(len(window) - searcher.start)]
|
||||||
|
spawn._before = spawn.buffer_type()
|
||||||
|
spawn.after = window[searcher.start: searcher.end]
|
||||||
|
spawn.match = searcher.match
|
||||||
|
spawn.match_index = index
|
||||||
|
# Found a match
|
||||||
|
return index
|
||||||
|
elif self.searchwindowsize:
|
||||||
|
spawn._buffer = spawn.buffer_type()
|
||||||
|
spawn._buffer.write(window)
|
||||||
|
|
||||||
|
def eof(self, err=None):
|
||||||
|
spawn = self.spawn
|
||||||
|
|
||||||
|
spawn.before = spawn.buffer
|
||||||
|
spawn._buffer = spawn.buffer_type()
|
||||||
|
spawn._before = spawn.buffer_type()
|
||||||
|
spawn.after = EOF
|
||||||
|
index = self.searcher.eof_index
|
||||||
|
if index >= 0:
|
||||||
|
spawn.match = EOF
|
||||||
|
spawn.match_index = index
|
||||||
|
return index
|
||||||
|
else:
|
||||||
|
spawn.match = None
|
||||||
|
spawn.match_index = None
|
||||||
|
msg = str(spawn)
|
||||||
|
msg += '\nsearcher: %s' % self.searcher
|
||||||
|
if err is not None:
|
||||||
|
msg = str(err) + '\n' + msg
|
||||||
|
raise EOF(msg)
|
||||||
|
|
||||||
|
def timeout(self, err=None):
|
||||||
|
spawn = self.spawn
|
||||||
|
|
||||||
|
spawn.before = spawn.buffer
|
||||||
|
spawn.after = TIMEOUT
|
||||||
|
index = self.searcher.timeout_index
|
||||||
|
if index >= 0:
|
||||||
|
spawn.match = TIMEOUT
|
||||||
|
spawn.match_index = index
|
||||||
|
return index
|
||||||
|
else:
|
||||||
|
spawn.match = None
|
||||||
|
spawn.match_index = None
|
||||||
|
msg = str(spawn)
|
||||||
|
msg += '\nsearcher: %s' % self.searcher
|
||||||
|
if err is not None:
|
||||||
|
msg = str(err) + '\n' + msg
|
||||||
|
raise TIMEOUT(msg)
|
||||||
|
|
||||||
|
def errored(self):
|
||||||
|
spawn = self.spawn
|
||||||
|
spawn.before = spawn.buffer
|
||||||
|
spawn.after = None
|
||||||
|
spawn.match = None
|
||||||
|
spawn.match_index = None
|
||||||
|
|
||||||
|
def expect_loop(self, timeout=-1):
|
||||||
|
"""Blocking expect"""
|
||||||
|
spawn = self.spawn
|
||||||
|
|
||||||
|
if timeout is not None:
|
||||||
|
end_time = time.time() + timeout
|
||||||
|
|
||||||
|
try:
|
||||||
|
incoming = spawn.buffer
|
||||||
|
spawn._buffer = spawn.buffer_type()
|
||||||
|
spawn._before = spawn.buffer_type()
|
||||||
|
while True:
|
||||||
|
idx = self.new_data(incoming)
|
||||||
|
# Keep reading until exception or return.
|
||||||
|
if idx is not None:
|
||||||
|
return idx
|
||||||
|
# No match at this point
|
||||||
|
if (timeout is not None) and (timeout < 0):
|
||||||
|
return self.timeout()
|
||||||
|
# Still have time left, so read more data
|
||||||
|
incoming = spawn.read_nonblocking(spawn.maxread, timeout)
|
||||||
|
if self.spawn.delayafterread is not None:
|
||||||
|
time.sleep(self.spawn.delayafterread)
|
||||||
|
if timeout is not None:
|
||||||
|
timeout = end_time - time.time()
|
||||||
|
except EOF as e:
|
||||||
|
return self.eof(e)
|
||||||
|
except TIMEOUT as e:
|
||||||
|
return self.timeout(e)
|
||||||
|
except:
|
||||||
|
self.errored()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class searcher_string(object):
|
||||||
|
'''This is a plain string search helper for the spawn.expect_any() method.
|
||||||
|
This helper class is for speed. For more powerful regex patterns
|
||||||
|
see the helper class, searcher_re.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
|
||||||
|
eof_index - index of EOF, or -1
|
||||||
|
timeout_index - index of TIMEOUT, or -1
|
||||||
|
|
||||||
|
After a successful match by the search() method the following attributes
|
||||||
|
are available:
|
||||||
|
|
||||||
|
start - index into the buffer, first byte of match
|
||||||
|
end - index into the buffer, first byte after match
|
||||||
|
match - the matching string itself
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, strings):
|
||||||
|
'''This creates an instance of searcher_string. This argument 'strings'
|
||||||
|
may be a list; a sequence of strings; or the EOF or TIMEOUT types. '''
|
||||||
|
|
||||||
|
self.eof_index = -1
|
||||||
|
self.timeout_index = -1
|
||||||
|
self._strings = []
|
||||||
|
for n, s in enumerate(strings):
|
||||||
|
if s is EOF:
|
||||||
|
self.eof_index = n
|
||||||
|
continue
|
||||||
|
if s is TIMEOUT:
|
||||||
|
self.timeout_index = n
|
||||||
|
continue
|
||||||
|
self._strings.append((n, s))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''This returns a human-readable string that represents the state of
|
||||||
|
the object.'''
|
||||||
|
|
||||||
|
ss = [(ns[0], ' %d: %r' % ns) for ns in self._strings]
|
||||||
|
ss.append((-1, 'searcher_string:'))
|
||||||
|
if self.eof_index >= 0:
|
||||||
|
ss.append((self.eof_index, ' %d: EOF' % self.eof_index))
|
||||||
|
if self.timeout_index >= 0:
|
||||||
|
ss.append((self.timeout_index,
|
||||||
|
' %d: TIMEOUT' % self.timeout_index))
|
||||||
|
ss.sort()
|
||||||
|
ss = list(zip(*ss))[1]
|
||||||
|
return '\n'.join(ss)
|
||||||
|
|
||||||
|
def search(self, buffer, freshlen, searchwindowsize=None):
|
||||||
|
'''This searches 'buffer' for the first occurrence of one of the search
|
||||||
|
strings. 'freshlen' must indicate the number of bytes at the end of
|
||||||
|
'buffer' which have not been searched before. It helps to avoid
|
||||||
|
searching the same, possibly big, buffer over and over again.
|
||||||
|
|
||||||
|
See class spawn for the 'searchwindowsize' argument.
|
||||||
|
|
||||||
|
If there is a match this returns the index of that string, and sets
|
||||||
|
'start', 'end' and 'match'. Otherwise, this returns -1. '''
|
||||||
|
|
||||||
|
first_match = None
|
||||||
|
|
||||||
|
# 'freshlen' helps a lot here. Further optimizations could
|
||||||
|
# possibly include:
|
||||||
|
#
|
||||||
|
# using something like the Boyer-Moore Fast String Searching
|
||||||
|
# Algorithm; pre-compiling the search through a list of
|
||||||
|
# strings into something that can scan the input once to
|
||||||
|
# search for all N strings; realize that if we search for
|
||||||
|
# ['bar', 'baz'] and the input is '...foo' we need not bother
|
||||||
|
# rescanning until we've read three more bytes.
|
||||||
|
#
|
||||||
|
# Sadly, I don't know enough about this interesting topic. /grahn
|
||||||
|
|
||||||
|
for index, s in self._strings:
|
||||||
|
if searchwindowsize is None:
|
||||||
|
# the match, if any, can only be in the fresh data,
|
||||||
|
# or at the very end of the old data
|
||||||
|
offset = -(freshlen + len(s))
|
||||||
|
else:
|
||||||
|
# better obey searchwindowsize
|
||||||
|
offset = -searchwindowsize
|
||||||
|
n = buffer.find(s, offset)
|
||||||
|
if n >= 0 and (first_match is None or n < first_match):
|
||||||
|
first_match = n
|
||||||
|
best_index, best_match = index, s
|
||||||
|
if first_match is None:
|
||||||
|
return -1
|
||||||
|
self.match = best_match
|
||||||
|
self.start = first_match
|
||||||
|
self.end = self.start + len(self.match)
|
||||||
|
return best_index
|
||||||
|
|
||||||
|
|
||||||
|
class searcher_re(object):
|
||||||
|
'''This is regular expression string search helper for the
|
||||||
|
spawn.expect_any() method. This helper class is for powerful
|
||||||
|
pattern matching. For speed, see the helper class, searcher_string.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
|
||||||
|
eof_index - index of EOF, or -1
|
||||||
|
timeout_index - index of TIMEOUT, or -1
|
||||||
|
|
||||||
|
After a successful match by the search() method the following attributes
|
||||||
|
are available:
|
||||||
|
|
||||||
|
start - index into the buffer, first byte of match
|
||||||
|
end - index into the buffer, first byte after match
|
||||||
|
match - the re.match object returned by a successful re.search
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, patterns):
|
||||||
|
'''This creates an instance that searches for 'patterns' Where
|
||||||
|
'patterns' may be a list or other sequence of compiled regular
|
||||||
|
expressions, or the EOF or TIMEOUT types.'''
|
||||||
|
|
||||||
|
self.eof_index = -1
|
||||||
|
self.timeout_index = -1
|
||||||
|
self._searches = []
|
||||||
|
for n, s in enumerate(patterns):
|
||||||
|
if s is EOF:
|
||||||
|
self.eof_index = n
|
||||||
|
continue
|
||||||
|
if s is TIMEOUT:
|
||||||
|
self.timeout_index = n
|
||||||
|
continue
|
||||||
|
self._searches.append((n, s))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''This returns a human-readable string that represents the state of
|
||||||
|
the object.'''
|
||||||
|
|
||||||
|
#ss = [(n, ' %d: re.compile("%s")' %
|
||||||
|
# (n, repr(s.pattern))) for n, s in self._searches]
|
||||||
|
ss = list()
|
||||||
|
for n, s in self._searches:
|
||||||
|
ss.append((n, ' %d: re.compile(%r)' % (n, s.pattern)))
|
||||||
|
ss.append((-1, 'searcher_re:'))
|
||||||
|
if self.eof_index >= 0:
|
||||||
|
ss.append((self.eof_index, ' %d: EOF' % self.eof_index))
|
||||||
|
if self.timeout_index >= 0:
|
||||||
|
ss.append((self.timeout_index, ' %d: TIMEOUT' %
|
||||||
|
self.timeout_index))
|
||||||
|
ss.sort()
|
||||||
|
ss = list(zip(*ss))[1]
|
||||||
|
return '\n'.join(ss)
|
||||||
|
|
||||||
|
def search(self, buffer, freshlen, searchwindowsize=None):
|
||||||
|
'''This searches 'buffer' for the first occurrence of one of the regular
|
||||||
|
expressions. 'freshlen' must indicate the number of bytes at the end of
|
||||||
|
'buffer' which have not been searched before.
|
||||||
|
|
||||||
|
See class spawn for the 'searchwindowsize' argument.
|
||||||
|
|
||||||
|
If there is a match this returns the index of that string, and sets
|
||||||
|
'start', 'end' and 'match'. Otherwise, returns -1.'''
|
||||||
|
|
||||||
|
first_match = None
|
||||||
|
# 'freshlen' doesn't help here -- we cannot predict the
|
||||||
|
# length of a match, and the re module provides no help.
|
||||||
|
if searchwindowsize is None:
|
||||||
|
searchstart = 0
|
||||||
|
else:
|
||||||
|
searchstart = max(0, len(buffer) - searchwindowsize)
|
||||||
|
for index, s in self._searches:
|
||||||
|
match = s.search(buffer, searchstart)
|
||||||
|
if match is None:
|
||||||
|
continue
|
||||||
|
n = match.start()
|
||||||
|
if first_match is None or n < first_match:
|
||||||
|
first_match = n
|
||||||
|
the_match = match
|
||||||
|
best_index = index
|
||||||
|
if first_match is None:
|
||||||
|
return -1
|
||||||
|
self.start = first_match
|
||||||
|
self.match = the_match
|
||||||
|
self.end = self.match.end()
|
||||||
|
return best_index
|
|
@ -0,0 +1,148 @@
|
||||||
|
'''This is like pexpect, but it will work with any file descriptor that you
|
||||||
|
pass it. You are responsible for opening and close the file descriptor.
|
||||||
|
This allows you to use Pexpect with sockets and named pipes (FIFOs).
|
||||||
|
|
||||||
|
PEXPECT LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
|
||||||
|
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
|
||||||
|
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from .spawnbase import SpawnBase
|
||||||
|
from .exceptions import ExceptionPexpect, TIMEOUT
|
||||||
|
from .utils import select_ignore_interrupts, poll_ignore_interrupts
|
||||||
|
import os
|
||||||
|
|
||||||
|
__all__ = ['fdspawn']
|
||||||
|
|
||||||
|
class fdspawn(SpawnBase):
|
||||||
|
'''This is like pexpect.spawn but allows you to supply your own open file
|
||||||
|
descriptor. For example, you could use it to read through a file looking
|
||||||
|
for patterns, or to control a modem or serial device. '''
|
||||||
|
|
||||||
|
def __init__ (self, fd, args=None, timeout=30, maxread=2000, searchwindowsize=None,
|
||||||
|
logfile=None, encoding=None, codec_errors='strict', use_poll=False):
|
||||||
|
'''This takes a file descriptor (an int) or an object that support the
|
||||||
|
fileno() method (returning an int). All Python file-like objects
|
||||||
|
support fileno(). '''
|
||||||
|
|
||||||
|
if type(fd) != type(0) and hasattr(fd, 'fileno'):
|
||||||
|
fd = fd.fileno()
|
||||||
|
|
||||||
|
if type(fd) != type(0):
|
||||||
|
raise ExceptionPexpect('The fd argument is not an int. If this is a command string then maybe you want to use pexpect.spawn.')
|
||||||
|
|
||||||
|
try: # make sure fd is a valid file descriptor
|
||||||
|
os.fstat(fd)
|
||||||
|
except OSError:
|
||||||
|
raise ExceptionPexpect('The fd argument is not a valid file descriptor.')
|
||||||
|
|
||||||
|
self.args = None
|
||||||
|
self.command = None
|
||||||
|
SpawnBase.__init__(self, timeout, maxread, searchwindowsize, logfile,
|
||||||
|
encoding=encoding, codec_errors=codec_errors)
|
||||||
|
self.child_fd = fd
|
||||||
|
self.own_fd = False
|
||||||
|
self.closed = False
|
||||||
|
self.name = '<file descriptor %d>' % fd
|
||||||
|
self.use_poll = use_poll
|
||||||
|
|
||||||
|
def close (self):
|
||||||
|
"""Close the file descriptor.
|
||||||
|
|
||||||
|
Calling this method a second time does nothing, but if the file
|
||||||
|
descriptor was closed elsewhere, :class:`OSError` will be raised.
|
||||||
|
"""
|
||||||
|
if self.child_fd == -1:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.flush()
|
||||||
|
os.close(self.child_fd)
|
||||||
|
self.child_fd = -1
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
def isalive (self):
|
||||||
|
'''This checks if the file descriptor is still valid. If :func:`os.fstat`
|
||||||
|
does not raise an exception then we assume it is alive. '''
|
||||||
|
|
||||||
|
if self.child_fd == -1:
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
os.fstat(self.child_fd)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def terminate (self, force=False): # pragma: no cover
|
||||||
|
'''Deprecated and invalid. Just raises an exception.'''
|
||||||
|
raise ExceptionPexpect('This method is not valid for file descriptors.')
|
||||||
|
|
||||||
|
# These four methods are left around for backwards compatibility, but not
|
||||||
|
# documented as part of fdpexpect. You're encouraged to use os.write
|
||||||
|
# directly.
|
||||||
|
def send(self, s):
|
||||||
|
"Write to fd, return number of bytes written"
|
||||||
|
s = self._coerce_send_string(s)
|
||||||
|
self._log(s, 'send')
|
||||||
|
|
||||||
|
b = self._encoder.encode(s, final=False)
|
||||||
|
return os.write(self.child_fd, b)
|
||||||
|
|
||||||
|
def sendline(self, s):
|
||||||
|
"Write to fd with trailing newline, return number of bytes written"
|
||||||
|
s = self._coerce_send_string(s)
|
||||||
|
return self.send(s + self.linesep)
|
||||||
|
|
||||||
|
def write(self, s):
|
||||||
|
"Write to fd, return None"
|
||||||
|
self.send(s)
|
||||||
|
|
||||||
|
def writelines(self, sequence):
|
||||||
|
"Call self.write() for each item in sequence"
|
||||||
|
for s in sequence:
|
||||||
|
self.write(s)
|
||||||
|
|
||||||
|
def read_nonblocking(self, size=1, timeout=-1):
|
||||||
|
"""
|
||||||
|
Read from the file descriptor and return the result as a string.
|
||||||
|
|
||||||
|
The read_nonblocking method of :class:`SpawnBase` assumes that a call
|
||||||
|
to os.read will not block (timeout parameter is ignored). This is not
|
||||||
|
the case for POSIX file-like objects such as sockets and serial ports.
|
||||||
|
|
||||||
|
Use :func:`select.select`, timeout is implemented conditionally for
|
||||||
|
POSIX systems.
|
||||||
|
|
||||||
|
:param int size: Read at most *size* bytes.
|
||||||
|
:param int timeout: Wait timeout seconds for file descriptor to be
|
||||||
|
ready to read. When -1 (default), use self.timeout. When 0, poll.
|
||||||
|
:return: String containing the bytes read
|
||||||
|
"""
|
||||||
|
if os.name == 'posix':
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
rlist = [self.child_fd]
|
||||||
|
wlist = []
|
||||||
|
xlist = []
|
||||||
|
if self.use_poll:
|
||||||
|
rlist = poll_ignore_interrupts(rlist, timeout)
|
||||||
|
else:
|
||||||
|
rlist, wlist, xlist = select_ignore_interrupts(
|
||||||
|
rlist, wlist, xlist, timeout
|
||||||
|
)
|
||||||
|
if self.child_fd not in rlist:
|
||||||
|
raise TIMEOUT('Timeout exceeded.')
|
||||||
|
return super(fdspawn, self).read_nonblocking(size)
|
|
@ -0,0 +1,188 @@
|
||||||
|
"""Provides an interface like pexpect.spawn interface using subprocess.Popen
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import threading
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import signal
|
||||||
|
import shlex
|
||||||
|
|
||||||
|
try:
|
||||||
|
from queue import Queue, Empty # Python 3
|
||||||
|
except ImportError:
|
||||||
|
from Queue import Queue, Empty # Python 2
|
||||||
|
|
||||||
|
from .spawnbase import SpawnBase, PY3
|
||||||
|
from .exceptions import EOF
|
||||||
|
from .utils import string_types
|
||||||
|
|
||||||
|
class PopenSpawn(SpawnBase):
|
||||||
|
def __init__(self, cmd, timeout=30, maxread=2000, searchwindowsize=None,
|
||||||
|
logfile=None, cwd=None, env=None, encoding=None,
|
||||||
|
codec_errors='strict', preexec_fn=None):
|
||||||
|
super(PopenSpawn, self).__init__(timeout=timeout, maxread=maxread,
|
||||||
|
searchwindowsize=searchwindowsize, logfile=logfile,
|
||||||
|
encoding=encoding, codec_errors=codec_errors)
|
||||||
|
|
||||||
|
# Note that `SpawnBase` initializes `self.crlf` to `\r\n`
|
||||||
|
# because the default behaviour for a PTY is to convert
|
||||||
|
# incoming LF to `\r\n` (see the `onlcr` flag and
|
||||||
|
# https://stackoverflow.com/a/35887657/5397009). Here we set
|
||||||
|
# it to `os.linesep` because that is what the spawned
|
||||||
|
# application outputs by default and `popen` doesn't translate
|
||||||
|
# anything.
|
||||||
|
if encoding is None:
|
||||||
|
self.crlf = os.linesep.encode ("ascii")
|
||||||
|
else:
|
||||||
|
self.crlf = self.string_type (os.linesep)
|
||||||
|
|
||||||
|
kwargs = dict(bufsize=0, stdin=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT, stdout=subprocess.PIPE,
|
||||||
|
cwd=cwd, preexec_fn=preexec_fn, env=env)
|
||||||
|
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
startupinfo = subprocess.STARTUPINFO()
|
||||||
|
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||||
|
kwargs['startupinfo'] = startupinfo
|
||||||
|
kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
|
||||||
|
|
||||||
|
if isinstance(cmd, string_types) and sys.platform != 'win32':
|
||||||
|
cmd = shlex.split(cmd, posix=os.name == 'posix')
|
||||||
|
|
||||||
|
self.proc = subprocess.Popen(cmd, **kwargs)
|
||||||
|
self.pid = self.proc.pid
|
||||||
|
self.closed = False
|
||||||
|
self._buf = self.string_type()
|
||||||
|
|
||||||
|
self._read_queue = Queue()
|
||||||
|
self._read_thread = threading.Thread(target=self._read_incoming)
|
||||||
|
self._read_thread.setDaemon(True)
|
||||||
|
self._read_thread.start()
|
||||||
|
|
||||||
|
_read_reached_eof = False
|
||||||
|
|
||||||
|
def read_nonblocking(self, size, timeout):
|
||||||
|
buf = self._buf
|
||||||
|
if self._read_reached_eof:
|
||||||
|
# We have already finished reading. Use up any buffered data,
|
||||||
|
# then raise EOF
|
||||||
|
if buf:
|
||||||
|
self._buf = buf[size:]
|
||||||
|
return buf[:size]
|
||||||
|
else:
|
||||||
|
self.flag_eof = True
|
||||||
|
raise EOF('End Of File (EOF).')
|
||||||
|
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
elif timeout is None:
|
||||||
|
timeout = 1e6
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
while (time.time() - t0) < timeout and size and len(buf) < size:
|
||||||
|
try:
|
||||||
|
incoming = self._read_queue.get_nowait()
|
||||||
|
except Empty:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if incoming is None:
|
||||||
|
self._read_reached_eof = True
|
||||||
|
break
|
||||||
|
|
||||||
|
buf += self._decoder.decode(incoming, final=False)
|
||||||
|
|
||||||
|
r, self._buf = buf[:size], buf[size:]
|
||||||
|
|
||||||
|
self._log(r, 'read')
|
||||||
|
return r
|
||||||
|
|
||||||
|
def _read_incoming(self):
|
||||||
|
"""Run in a thread to move output from a pipe to a queue."""
|
||||||
|
fileno = self.proc.stdout.fileno()
|
||||||
|
while 1:
|
||||||
|
buf = b''
|
||||||
|
try:
|
||||||
|
buf = os.read(fileno, 1024)
|
||||||
|
except OSError as e:
|
||||||
|
self._log(e, 'read')
|
||||||
|
|
||||||
|
if not buf:
|
||||||
|
# This indicates we have reached EOF
|
||||||
|
self._read_queue.put(None)
|
||||||
|
return
|
||||||
|
|
||||||
|
self._read_queue.put(buf)
|
||||||
|
|
||||||
|
def write(self, s):
|
||||||
|
'''This is similar to send() except that there is no return value.
|
||||||
|
'''
|
||||||
|
self.send(s)
|
||||||
|
|
||||||
|
def writelines(self, sequence):
|
||||||
|
'''This calls write() for each element in the sequence.
|
||||||
|
|
||||||
|
The sequence can be any iterable object producing strings, typically a
|
||||||
|
list of strings. This does not add line separators. There is no return
|
||||||
|
value.
|
||||||
|
'''
|
||||||
|
for s in sequence:
|
||||||
|
self.send(s)
|
||||||
|
|
||||||
|
def send(self, s):
|
||||||
|
'''Send data to the subprocess' stdin.
|
||||||
|
|
||||||
|
Returns the number of bytes written.
|
||||||
|
'''
|
||||||
|
s = self._coerce_send_string(s)
|
||||||
|
self._log(s, 'send')
|
||||||
|
|
||||||
|
b = self._encoder.encode(s, final=False)
|
||||||
|
if PY3:
|
||||||
|
return self.proc.stdin.write(b)
|
||||||
|
else:
|
||||||
|
# On Python 2, .write() returns None, so we return the length of
|
||||||
|
# bytes written ourselves. This assumes they all got written.
|
||||||
|
self.proc.stdin.write(b)
|
||||||
|
return len(b)
|
||||||
|
|
||||||
|
def sendline(self, s=''):
|
||||||
|
'''Wraps send(), sending string ``s`` to child process, with os.linesep
|
||||||
|
automatically appended. Returns number of bytes written. '''
|
||||||
|
|
||||||
|
n = self.send(s)
|
||||||
|
return n + self.send(self.linesep)
|
||||||
|
|
||||||
|
def wait(self):
|
||||||
|
'''Wait for the subprocess to finish.
|
||||||
|
|
||||||
|
Returns the exit code.
|
||||||
|
'''
|
||||||
|
status = self.proc.wait()
|
||||||
|
if status >= 0:
|
||||||
|
self.exitstatus = status
|
||||||
|
self.signalstatus = None
|
||||||
|
else:
|
||||||
|
self.exitstatus = None
|
||||||
|
self.signalstatus = -status
|
||||||
|
self.terminated = True
|
||||||
|
return status
|
||||||
|
|
||||||
|
def kill(self, sig):
|
||||||
|
'''Sends a Unix signal to the subprocess.
|
||||||
|
|
||||||
|
Use constants from the :mod:`signal` module to specify which signal.
|
||||||
|
'''
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
if sig in [signal.SIGINT, signal.CTRL_C_EVENT]:
|
||||||
|
sig = signal.CTRL_C_EVENT
|
||||||
|
elif sig in [signal.SIGBREAK, signal.CTRL_BREAK_EVENT]:
|
||||||
|
sig = signal.CTRL_BREAK_EVENT
|
||||||
|
else:
|
||||||
|
sig = signal.SIGTERM
|
||||||
|
|
||||||
|
os.kill(self.proc.pid, sig)
|
||||||
|
|
||||||
|
def sendeof(self):
|
||||||
|
'''Closes the stdin pipe from the writing end.'''
|
||||||
|
self.proc.stdin.close()
|
|
@ -0,0 +1,855 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import pty
|
||||||
|
import tty
|
||||||
|
import errno
|
||||||
|
import signal
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
import ptyprocess
|
||||||
|
from ptyprocess.ptyprocess import use_native_pty_fork
|
||||||
|
|
||||||
|
from .exceptions import ExceptionPexpect, EOF, TIMEOUT
|
||||||
|
from .spawnbase import SpawnBase
|
||||||
|
from .utils import (
|
||||||
|
which, split_command_line, select_ignore_interrupts, poll_ignore_interrupts
|
||||||
|
)
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def _wrap_ptyprocess_err():
|
||||||
|
"""Turn ptyprocess errors into our own ExceptionPexpect errors"""
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except ptyprocess.PtyProcessError as e:
|
||||||
|
raise ExceptionPexpect(*e.args)
|
||||||
|
|
||||||
|
PY3 = (sys.version_info[0] >= 3)
|
||||||
|
|
||||||
|
class spawn(SpawnBase):
|
||||||
|
'''This is the main class interface for Pexpect. Use this class to start
|
||||||
|
and control child applications. '''
|
||||||
|
|
||||||
|
# This is purely informational now - changing it has no effect
|
||||||
|
use_native_pty_fork = use_native_pty_fork
|
||||||
|
|
||||||
|
def __init__(self, command, args=[], timeout=30, maxread=2000,
|
||||||
|
searchwindowsize=None, logfile=None, cwd=None, env=None,
|
||||||
|
ignore_sighup=False, echo=True, preexec_fn=None,
|
||||||
|
encoding=None, codec_errors='strict', dimensions=None,
|
||||||
|
use_poll=False):
|
||||||
|
'''This is the constructor. The command parameter may be a string that
|
||||||
|
includes a command and any arguments to the command. For example::
|
||||||
|
|
||||||
|
child = pexpect.spawn('/usr/bin/ftp')
|
||||||
|
child = pexpect.spawn('/usr/bin/ssh user@example.com')
|
||||||
|
child = pexpect.spawn('ls -latr /tmp')
|
||||||
|
|
||||||
|
You may also construct it with a list of arguments like so::
|
||||||
|
|
||||||
|
child = pexpect.spawn('/usr/bin/ftp', [])
|
||||||
|
child = pexpect.spawn('/usr/bin/ssh', ['user@example.com'])
|
||||||
|
child = pexpect.spawn('ls', ['-latr', '/tmp'])
|
||||||
|
|
||||||
|
After this the child application will be created and will be ready to
|
||||||
|
talk to. For normal use, see expect() and send() and sendline().
|
||||||
|
|
||||||
|
Remember that Pexpect does NOT interpret shell meta characters such as
|
||||||
|
redirect, pipe, or wild cards (``>``, ``|``, or ``*``). This is a
|
||||||
|
common mistake. If you want to run a command and pipe it through
|
||||||
|
another command then you must also start a shell. For example::
|
||||||
|
|
||||||
|
child = pexpect.spawn('/bin/bash -c "ls -l | grep LOG > logs.txt"')
|
||||||
|
child.expect(pexpect.EOF)
|
||||||
|
|
||||||
|
The second form of spawn (where you pass a list of arguments) is useful
|
||||||
|
in situations where you wish to spawn a command and pass it its own
|
||||||
|
argument list. This can make syntax more clear. For example, the
|
||||||
|
following is equivalent to the previous example::
|
||||||
|
|
||||||
|
shell_cmd = 'ls -l | grep LOG > logs.txt'
|
||||||
|
child = pexpect.spawn('/bin/bash', ['-c', shell_cmd])
|
||||||
|
child.expect(pexpect.EOF)
|
||||||
|
|
||||||
|
The maxread attribute sets the read buffer size. This is maximum number
|
||||||
|
of bytes that Pexpect will try to read from a TTY at one time. Setting
|
||||||
|
the maxread size to 1 will turn off buffering. Setting the maxread
|
||||||
|
value higher may help performance in cases where large amounts of
|
||||||
|
output are read back from the child. This feature is useful in
|
||||||
|
conjunction with searchwindowsize.
|
||||||
|
|
||||||
|
When the keyword argument *searchwindowsize* is None (default), the
|
||||||
|
full buffer is searched at each iteration of receiving incoming data.
|
||||||
|
The default number of bytes scanned at each iteration is very large
|
||||||
|
and may be reduced to collaterally reduce search cost. After
|
||||||
|
:meth:`~.expect` returns, the full buffer attribute remains up to
|
||||||
|
size *maxread* irrespective of *searchwindowsize* value.
|
||||||
|
|
||||||
|
When the keyword argument ``timeout`` is specified as a number,
|
||||||
|
(default: *30*), then :class:`TIMEOUT` will be raised after the value
|
||||||
|
specified has elapsed, in seconds, for any of the :meth:`~.expect`
|
||||||
|
family of method calls. When None, TIMEOUT will not be raised, and
|
||||||
|
:meth:`~.expect` may block indefinitely until match.
|
||||||
|
|
||||||
|
|
||||||
|
The logfile member turns on or off logging. All input and output will
|
||||||
|
be copied to the given file object. Set logfile to None to stop
|
||||||
|
logging. This is the default. Set logfile to sys.stdout to echo
|
||||||
|
everything to standard output. The logfile is flushed after each write.
|
||||||
|
|
||||||
|
Example log input and output to a file::
|
||||||
|
|
||||||
|
child = pexpect.spawn('some_command')
|
||||||
|
fout = open('mylog.txt','wb')
|
||||||
|
child.logfile = fout
|
||||||
|
|
||||||
|
Example log to stdout::
|
||||||
|
|
||||||
|
# In Python 2:
|
||||||
|
child = pexpect.spawn('some_command')
|
||||||
|
child.logfile = sys.stdout
|
||||||
|
|
||||||
|
# In Python 3, we'll use the ``encoding`` argument to decode data
|
||||||
|
# from the subprocess and handle it as unicode:
|
||||||
|
child = pexpect.spawn('some_command', encoding='utf-8')
|
||||||
|
child.logfile = sys.stdout
|
||||||
|
|
||||||
|
The logfile_read and logfile_send members can be used to separately log
|
||||||
|
the input from the child and output sent to the child. Sometimes you
|
||||||
|
don't want to see everything you write to the child. You only want to
|
||||||
|
log what the child sends back. For example::
|
||||||
|
|
||||||
|
child = pexpect.spawn('some_command')
|
||||||
|
child.logfile_read = sys.stdout
|
||||||
|
|
||||||
|
You will need to pass an encoding to spawn in the above code if you are
|
||||||
|
using Python 3.
|
||||||
|
|
||||||
|
To separately log output sent to the child use logfile_send::
|
||||||
|
|
||||||
|
child.logfile_send = fout
|
||||||
|
|
||||||
|
If ``ignore_sighup`` is True, the child process will ignore SIGHUP
|
||||||
|
signals. The default is False from Pexpect 4.0, meaning that SIGHUP
|
||||||
|
will be handled normally by the child.
|
||||||
|
|
||||||
|
The delaybeforesend helps overcome a weird behavior that many users
|
||||||
|
were experiencing. The typical problem was that a user would expect() a
|
||||||
|
"Password:" prompt and then immediately call sendline() to send the
|
||||||
|
password. The user would then see that their password was echoed back
|
||||||
|
to them. Passwords don't normally echo. The problem is caused by the
|
||||||
|
fact that most applications print out the "Password" prompt and then
|
||||||
|
turn off stdin echo, but if you send your password before the
|
||||||
|
application turned off echo, then you get your password echoed.
|
||||||
|
Normally this wouldn't be a problem when interacting with a human at a
|
||||||
|
real keyboard. If you introduce a slight delay just before writing then
|
||||||
|
this seems to clear up the problem. This was such a common problem for
|
||||||
|
many users that I decided that the default pexpect behavior should be
|
||||||
|
to sleep just before writing to the child application. 1/20th of a
|
||||||
|
second (50 ms) seems to be enough to clear up the problem. You can set
|
||||||
|
delaybeforesend to None to return to the old behavior.
|
||||||
|
|
||||||
|
Note that spawn is clever about finding commands on your path.
|
||||||
|
It uses the same logic that "which" uses to find executables.
|
||||||
|
|
||||||
|
If you wish to get the exit status of the child you must call the
|
||||||
|
close() method. The exit or signal status of the child will be stored
|
||||||
|
in self.exitstatus or self.signalstatus. If the child exited normally
|
||||||
|
then exitstatus will store the exit return code and signalstatus will
|
||||||
|
be None. If the child was terminated abnormally with a signal then
|
||||||
|
signalstatus will store the signal value and exitstatus will be None::
|
||||||
|
|
||||||
|
child = pexpect.spawn('some_command')
|
||||||
|
child.close()
|
||||||
|
print(child.exitstatus, child.signalstatus)
|
||||||
|
|
||||||
|
If you need more detail you can also read the self.status member which
|
||||||
|
stores the status returned by os.waitpid. You can interpret this using
|
||||||
|
os.WIFEXITED/os.WEXITSTATUS or os.WIFSIGNALED/os.TERMSIG.
|
||||||
|
|
||||||
|
The echo attribute may be set to False to disable echoing of input.
|
||||||
|
As a pseudo-terminal, all input echoed by the "keyboard" (send()
|
||||||
|
or sendline()) will be repeated to output. For many cases, it is
|
||||||
|
not desirable to have echo enabled, and it may be later disabled
|
||||||
|
using setecho(False) followed by waitnoecho(). However, for some
|
||||||
|
platforms such as Solaris, this is not possible, and should be
|
||||||
|
disabled immediately on spawn.
|
||||||
|
|
||||||
|
If preexec_fn is given, it will be called in the child process before
|
||||||
|
launching the given command. This is useful to e.g. reset inherited
|
||||||
|
signal handlers.
|
||||||
|
|
||||||
|
The dimensions attribute specifies the size of the pseudo-terminal as
|
||||||
|
seen by the subprocess, and is specified as a two-entry tuple (rows,
|
||||||
|
columns). If this is unspecified, the defaults in ptyprocess will apply.
|
||||||
|
|
||||||
|
The use_poll attribute enables using select.poll() over select.select()
|
||||||
|
for socket handling. This is handy if your system could have > 1024 fds
|
||||||
|
'''
|
||||||
|
super(spawn, self).__init__(timeout=timeout, maxread=maxread, searchwindowsize=searchwindowsize,
|
||||||
|
logfile=logfile, encoding=encoding, codec_errors=codec_errors)
|
||||||
|
self.STDIN_FILENO = pty.STDIN_FILENO
|
||||||
|
self.STDOUT_FILENO = pty.STDOUT_FILENO
|
||||||
|
self.STDERR_FILENO = pty.STDERR_FILENO
|
||||||
|
self.cwd = cwd
|
||||||
|
self.env = env
|
||||||
|
self.echo = echo
|
||||||
|
self.ignore_sighup = ignore_sighup
|
||||||
|
self.__irix_hack = sys.platform.lower().startswith('irix')
|
||||||
|
if command is None:
|
||||||
|
self.command = None
|
||||||
|
self.args = None
|
||||||
|
self.name = '<pexpect factory incomplete>'
|
||||||
|
else:
|
||||||
|
self._spawn(command, args, preexec_fn, dimensions)
|
||||||
|
self.use_poll = use_poll
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''This returns a human-readable string that represents the state of
|
||||||
|
the object. '''
|
||||||
|
|
||||||
|
s = []
|
||||||
|
s.append(repr(self))
|
||||||
|
s.append('command: ' + str(self.command))
|
||||||
|
s.append('args: %r' % (self.args,))
|
||||||
|
s.append('buffer (last 100 chars): %r' % self.buffer[-100:])
|
||||||
|
s.append('before (last 100 chars): %r' % self.before[-100:] if self.before else '')
|
||||||
|
s.append('after: %r' % (self.after,))
|
||||||
|
s.append('match: %r' % (self.match,))
|
||||||
|
s.append('match_index: ' + str(self.match_index))
|
||||||
|
s.append('exitstatus: ' + str(self.exitstatus))
|
||||||
|
if hasattr(self, 'ptyproc'):
|
||||||
|
s.append('flag_eof: ' + str(self.flag_eof))
|
||||||
|
s.append('pid: ' + str(self.pid))
|
||||||
|
s.append('child_fd: ' + str(self.child_fd))
|
||||||
|
s.append('closed: ' + str(self.closed))
|
||||||
|
s.append('timeout: ' + str(self.timeout))
|
||||||
|
s.append('delimiter: ' + str(self.delimiter))
|
||||||
|
s.append('logfile: ' + str(self.logfile))
|
||||||
|
s.append('logfile_read: ' + str(self.logfile_read))
|
||||||
|
s.append('logfile_send: ' + str(self.logfile_send))
|
||||||
|
s.append('maxread: ' + str(self.maxread))
|
||||||
|
s.append('ignorecase: ' + str(self.ignorecase))
|
||||||
|
s.append('searchwindowsize: ' + str(self.searchwindowsize))
|
||||||
|
s.append('delaybeforesend: ' + str(self.delaybeforesend))
|
||||||
|
s.append('delayafterclose: ' + str(self.delayafterclose))
|
||||||
|
s.append('delayafterterminate: ' + str(self.delayafterterminate))
|
||||||
|
return '\n'.join(s)
|
||||||
|
|
||||||
|
def _spawn(self, command, args=[], preexec_fn=None, dimensions=None):
|
||||||
|
'''This starts the given command in a child process. This does all the
|
||||||
|
fork/exec type of stuff for a pty. This is called by __init__. If args
|
||||||
|
is empty then command will be parsed (split on spaces) and args will be
|
||||||
|
set to parsed arguments. '''
|
||||||
|
|
||||||
|
# The pid and child_fd of this object get set by this method.
|
||||||
|
# Note that it is difficult for this method to fail.
|
||||||
|
# You cannot detect if the child process cannot start.
|
||||||
|
# So the only way you can tell if the child process started
|
||||||
|
# or not is to try to read from the file descriptor. If you get
|
||||||
|
# EOF immediately then it means that the child is already dead.
|
||||||
|
# That may not necessarily be bad because you may have spawned a child
|
||||||
|
# that performs some task; creates no stdout output; and then dies.
|
||||||
|
|
||||||
|
# If command is an int type then it may represent a file descriptor.
|
||||||
|
if isinstance(command, type(0)):
|
||||||
|
raise ExceptionPexpect('Command is an int type. ' +
|
||||||
|
'If this is a file descriptor then maybe you want to ' +
|
||||||
|
'use fdpexpect.fdspawn which takes an existing ' +
|
||||||
|
'file descriptor instead of a command string.')
|
||||||
|
|
||||||
|
if not isinstance(args, type([])):
|
||||||
|
raise TypeError('The argument, args, must be a list.')
|
||||||
|
|
||||||
|
if args == []:
|
||||||
|
self.args = split_command_line(command)
|
||||||
|
self.command = self.args[0]
|
||||||
|
else:
|
||||||
|
# Make a shallow copy of the args list.
|
||||||
|
self.args = args[:]
|
||||||
|
self.args.insert(0, command)
|
||||||
|
self.command = command
|
||||||
|
|
||||||
|
command_with_path = which(self.command, env=self.env)
|
||||||
|
if command_with_path is None:
|
||||||
|
raise ExceptionPexpect('The command was not found or was not ' +
|
||||||
|
'executable: %s.' % self.command)
|
||||||
|
self.command = command_with_path
|
||||||
|
self.args[0] = self.command
|
||||||
|
|
||||||
|
self.name = '<' + ' '.join(self.args) + '>'
|
||||||
|
|
||||||
|
assert self.pid is None, 'The pid member must be None.'
|
||||||
|
assert self.command is not None, 'The command member must not be None.'
|
||||||
|
|
||||||
|
kwargs = {'echo': self.echo, 'preexec_fn': preexec_fn}
|
||||||
|
if self.ignore_sighup:
|
||||||
|
def preexec_wrapper():
|
||||||
|
"Set SIGHUP to be ignored, then call the real preexec_fn"
|
||||||
|
signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||||
|
if preexec_fn is not None:
|
||||||
|
preexec_fn()
|
||||||
|
kwargs['preexec_fn'] = preexec_wrapper
|
||||||
|
|
||||||
|
if dimensions is not None:
|
||||||
|
kwargs['dimensions'] = dimensions
|
||||||
|
|
||||||
|
if self.encoding is not None:
|
||||||
|
# Encode command line using the specified encoding
|
||||||
|
self.args = [a if isinstance(a, bytes) else a.encode(self.encoding)
|
||||||
|
for a in self.args]
|
||||||
|
|
||||||
|
self.ptyproc = self._spawnpty(self.args, env=self.env,
|
||||||
|
cwd=self.cwd, **kwargs)
|
||||||
|
|
||||||
|
self.pid = self.ptyproc.pid
|
||||||
|
self.child_fd = self.ptyproc.fd
|
||||||
|
|
||||||
|
|
||||||
|
self.terminated = False
|
||||||
|
self.closed = False
|
||||||
|
|
||||||
|
def _spawnpty(self, args, **kwargs):
|
||||||
|
'''Spawn a pty and return an instance of PtyProcess.'''
|
||||||
|
return ptyprocess.PtyProcess.spawn(args, **kwargs)
|
||||||
|
|
||||||
|
def close(self, force=True):
|
||||||
|
'''This closes the connection with the child application. Note that
|
||||||
|
calling close() more than once is valid. This emulates standard Python
|
||||||
|
behavior with files. Set force to True if you want to make sure that
|
||||||
|
the child is terminated (SIGKILL is sent if the child ignores SIGHUP
|
||||||
|
and SIGINT). '''
|
||||||
|
|
||||||
|
self.flush()
|
||||||
|
with _wrap_ptyprocess_err():
|
||||||
|
# PtyProcessError may be raised if it is not possible to terminate
|
||||||
|
# the child.
|
||||||
|
self.ptyproc.close(force=force)
|
||||||
|
self.isalive() # Update exit status from ptyproc
|
||||||
|
self.child_fd = -1
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
def isatty(self):
|
||||||
|
'''This returns True if the file descriptor is open and connected to a
|
||||||
|
tty(-like) device, else False.
|
||||||
|
|
||||||
|
On SVR4-style platforms implementing streams, such as SunOS and HP-UX,
|
||||||
|
the child pty may not appear as a terminal device. This means
|
||||||
|
methods such as setecho(), setwinsize(), getwinsize() may raise an
|
||||||
|
IOError. '''
|
||||||
|
|
||||||
|
return os.isatty(self.child_fd)
|
||||||
|
|
||||||
|
def waitnoecho(self, timeout=-1):
|
||||||
|
'''This waits until the terminal ECHO flag is set False. This returns
|
||||||
|
True if the echo mode is off. This returns False if the ECHO flag was
|
||||||
|
not set False before the timeout. This can be used to detect when the
|
||||||
|
child is waiting for a password. Usually a child application will turn
|
||||||
|
off echo mode when it is waiting for the user to enter a password. For
|
||||||
|
example, instead of expecting the "password:" prompt you can wait for
|
||||||
|
the child to set ECHO off::
|
||||||
|
|
||||||
|
p = pexpect.spawn('ssh user@example.com')
|
||||||
|
p.waitnoecho()
|
||||||
|
p.sendline(mypassword)
|
||||||
|
|
||||||
|
If timeout==-1 then this method will use the value in self.timeout.
|
||||||
|
If timeout==None then this method to block until ECHO flag is False.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
if timeout is not None:
|
||||||
|
end_time = time.time() + timeout
|
||||||
|
while True:
|
||||||
|
if not self.getecho():
|
||||||
|
return True
|
||||||
|
if timeout < 0 and timeout is not None:
|
||||||
|
return False
|
||||||
|
if timeout is not None:
|
||||||
|
timeout = end_time - time.time()
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
def getecho(self):
|
||||||
|
'''This returns the terminal echo mode. This returns True if echo is
|
||||||
|
on or False if echo is off. Child applications that are expecting you
|
||||||
|
to enter a password often set ECHO False. See waitnoecho().
|
||||||
|
|
||||||
|
Not supported on platforms where ``isatty()`` returns False. '''
|
||||||
|
return self.ptyproc.getecho()
|
||||||
|
|
||||||
|
def setecho(self, state):
|
||||||
|
'''This sets the terminal echo mode on or off. Note that anything the
|
||||||
|
child sent before the echo will be lost, so you should be sure that
|
||||||
|
your input buffer is empty before you call setecho(). For example, the
|
||||||
|
following will work as expected::
|
||||||
|
|
||||||
|
p = pexpect.spawn('cat') # Echo is on by default.
|
||||||
|
p.sendline('1234') # We expect see this twice from the child...
|
||||||
|
p.expect(['1234']) # ... once from the tty echo...
|
||||||
|
p.expect(['1234']) # ... and again from cat itself.
|
||||||
|
p.setecho(False) # Turn off tty echo
|
||||||
|
p.sendline('abcd') # We will set this only once (echoed by cat).
|
||||||
|
p.sendline('wxyz') # We will set this only once (echoed by cat)
|
||||||
|
p.expect(['abcd'])
|
||||||
|
p.expect(['wxyz'])
|
||||||
|
|
||||||
|
The following WILL NOT WORK because the lines sent before the setecho
|
||||||
|
will be lost::
|
||||||
|
|
||||||
|
p = pexpect.spawn('cat')
|
||||||
|
p.sendline('1234')
|
||||||
|
p.setecho(False) # Turn off tty echo
|
||||||
|
p.sendline('abcd') # We will set this only once (echoed by cat).
|
||||||
|
p.sendline('wxyz') # We will set this only once (echoed by cat)
|
||||||
|
p.expect(['1234'])
|
||||||
|
p.expect(['1234'])
|
||||||
|
p.expect(['abcd'])
|
||||||
|
p.expect(['wxyz'])
|
||||||
|
|
||||||
|
|
||||||
|
Not supported on platforms where ``isatty()`` returns False.
|
||||||
|
'''
|
||||||
|
return self.ptyproc.setecho(state)
|
||||||
|
|
||||||
|
def read_nonblocking(self, size=1, timeout=-1):
|
||||||
|
'''This reads at most size characters from the child application. It
|
||||||
|
includes a timeout. If the read does not complete within the timeout
|
||||||
|
period then a TIMEOUT exception is raised. If the end of file is read
|
||||||
|
then an EOF exception will be raised. If a logfile is specified, a
|
||||||
|
copy is written to that log.
|
||||||
|
|
||||||
|
If timeout is None then the read may block indefinitely.
|
||||||
|
If timeout is -1 then the self.timeout value is used. If timeout is 0
|
||||||
|
then the child is polled and if there is no data immediately ready
|
||||||
|
then this will raise a TIMEOUT exception.
|
||||||
|
|
||||||
|
The timeout refers only to the amount of time to read at least one
|
||||||
|
character. This is not affected by the 'size' parameter, so if you call
|
||||||
|
read_nonblocking(size=100, timeout=30) and only one character is
|
||||||
|
available right away then one character will be returned immediately.
|
||||||
|
It will not wait for 30 seconds for another 99 characters to come in.
|
||||||
|
|
||||||
|
On the other hand, if there are bytes available to read immediately,
|
||||||
|
all those bytes will be read (up to the buffer size). So, if the
|
||||||
|
buffer size is 1 megabyte and there is 1 megabyte of data available
|
||||||
|
to read, the buffer will be filled, regardless of timeout.
|
||||||
|
|
||||||
|
This is a wrapper around os.read(). It uses select.select() or
|
||||||
|
select.poll() to implement the timeout. '''
|
||||||
|
|
||||||
|
if self.closed:
|
||||||
|
raise ValueError('I/O operation on closed file.')
|
||||||
|
|
||||||
|
if self.use_poll:
|
||||||
|
def select(timeout):
|
||||||
|
return poll_ignore_interrupts([self.child_fd], timeout)
|
||||||
|
else:
|
||||||
|
def select(timeout):
|
||||||
|
return select_ignore_interrupts([self.child_fd], [], [], timeout)[0]
|
||||||
|
|
||||||
|
# If there is data available to read right now, read as much as
|
||||||
|
# we can. We do this to increase performance if there are a lot
|
||||||
|
# of bytes to be read. This also avoids calling isalive() too
|
||||||
|
# often. See also:
|
||||||
|
# * https://github.com/pexpect/pexpect/pull/304
|
||||||
|
# * http://trac.sagemath.org/ticket/10295
|
||||||
|
if select(0):
|
||||||
|
try:
|
||||||
|
incoming = super(spawn, self).read_nonblocking(size)
|
||||||
|
except EOF:
|
||||||
|
# Maybe the child is dead: update some attributes in that case
|
||||||
|
self.isalive()
|
||||||
|
raise
|
||||||
|
while len(incoming) < size and select(0):
|
||||||
|
try:
|
||||||
|
incoming += super(spawn, self).read_nonblocking(size - len(incoming))
|
||||||
|
except EOF:
|
||||||
|
# Maybe the child is dead: update some attributes in that case
|
||||||
|
self.isalive()
|
||||||
|
# Don't raise EOF, just return what we read so far.
|
||||||
|
return incoming
|
||||||
|
return incoming
|
||||||
|
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
|
||||||
|
if not self.isalive():
|
||||||
|
# The process is dead, but there may or may not be data
|
||||||
|
# available to read. Note that some systems such as Solaris
|
||||||
|
# do not give an EOF when the child dies. In fact, you can
|
||||||
|
# still try to read from the child_fd -- it will block
|
||||||
|
# forever or until TIMEOUT. For that reason, it's important
|
||||||
|
# to do this check before calling select() with timeout.
|
||||||
|
if select(0):
|
||||||
|
return super(spawn, self).read_nonblocking(size)
|
||||||
|
self.flag_eof = True
|
||||||
|
raise EOF('End Of File (EOF). Braindead platform.')
|
||||||
|
elif self.__irix_hack:
|
||||||
|
# Irix takes a long time before it realizes a child was terminated.
|
||||||
|
# Make sure that the timeout is at least 2 seconds.
|
||||||
|
# FIXME So does this mean Irix systems are forced to always have
|
||||||
|
# FIXME a 2 second delay when calling read_nonblocking? That sucks.
|
||||||
|
if timeout is not None and timeout < 2:
|
||||||
|
timeout = 2
|
||||||
|
|
||||||
|
# Because of the select(0) check above, we know that no data
|
||||||
|
# is available right now. But if a non-zero timeout is given
|
||||||
|
# (possibly timeout=None), we call select() with a timeout.
|
||||||
|
if (timeout != 0) and select(timeout):
|
||||||
|
return super(spawn, self).read_nonblocking(size)
|
||||||
|
|
||||||
|
if not self.isalive():
|
||||||
|
# Some platforms, such as Irix, will claim that their
|
||||||
|
# processes are alive; timeout on the select; and
|
||||||
|
# then finally admit that they are not alive.
|
||||||
|
self.flag_eof = True
|
||||||
|
raise EOF('End of File (EOF). Very slow platform.')
|
||||||
|
else:
|
||||||
|
raise TIMEOUT('Timeout exceeded.')
|
||||||
|
|
||||||
|
def write(self, s):
|
||||||
|
'''This is similar to send() except that there is no return value.
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.send(s)
|
||||||
|
|
||||||
|
def writelines(self, sequence):
|
||||||
|
'''This calls write() for each element in the sequence. The sequence
|
||||||
|
can be any iterable object producing strings, typically a list of
|
||||||
|
strings. This does not add line separators. There is no return value.
|
||||||
|
'''
|
||||||
|
|
||||||
|
for s in sequence:
|
||||||
|
self.write(s)
|
||||||
|
|
||||||
|
def send(self, s):
|
||||||
|
'''Sends string ``s`` to the child process, returning the number of
|
||||||
|
bytes written. If a logfile is specified, a copy is written to that
|
||||||
|
log.
|
||||||
|
|
||||||
|
The default terminal input mode is canonical processing unless set
|
||||||
|
otherwise by the child process. This allows backspace and other line
|
||||||
|
processing to be performed prior to transmitting to the receiving
|
||||||
|
program. As this is buffered, there is a limited size of such buffer.
|
||||||
|
|
||||||
|
On Linux systems, this is 4096 (defined by N_TTY_BUF_SIZE). All
|
||||||
|
other systems honor the POSIX.1 definition PC_MAX_CANON -- 1024
|
||||||
|
on OSX, 256 on OpenSolaris, and 1920 on FreeBSD.
|
||||||
|
|
||||||
|
This value may be discovered using fpathconf(3)::
|
||||||
|
|
||||||
|
>>> from os import fpathconf
|
||||||
|
>>> print(fpathconf(0, 'PC_MAX_CANON'))
|
||||||
|
256
|
||||||
|
|
||||||
|
On such a system, only 256 bytes may be received per line. Any
|
||||||
|
subsequent bytes received will be discarded. BEL (``'\a'``) is then
|
||||||
|
sent to output if IMAXBEL (termios.h) is set by the tty driver.
|
||||||
|
This is usually enabled by default. Linux does not honor this as
|
||||||
|
an option -- it behaves as though it is always set on.
|
||||||
|
|
||||||
|
Canonical input processing may be disabled altogether by executing
|
||||||
|
a shell, then stty(1), before executing the final program::
|
||||||
|
|
||||||
|
>>> bash = pexpect.spawn('/bin/bash', echo=False)
|
||||||
|
>>> bash.sendline('stty -icanon')
|
||||||
|
>>> bash.sendline('base64')
|
||||||
|
>>> bash.sendline('x' * 5000)
|
||||||
|
'''
|
||||||
|
|
||||||
|
if self.delaybeforesend is not None:
|
||||||
|
time.sleep(self.delaybeforesend)
|
||||||
|
|
||||||
|
s = self._coerce_send_string(s)
|
||||||
|
self._log(s, 'send')
|
||||||
|
|
||||||
|
b = self._encoder.encode(s, final=False)
|
||||||
|
return os.write(self.child_fd, b)
|
||||||
|
|
||||||
|
def sendline(self, s=''):
|
||||||
|
'''Wraps send(), sending string ``s`` to child process, with
|
||||||
|
``os.linesep`` automatically appended. Returns number of bytes
|
||||||
|
written. Only a limited number of bytes may be sent for each
|
||||||
|
line in the default terminal mode, see docstring of :meth:`send`.
|
||||||
|
'''
|
||||||
|
s = self._coerce_send_string(s)
|
||||||
|
return self.send(s + self.linesep)
|
||||||
|
|
||||||
|
def _log_control(self, s):
|
||||||
|
"""Write control characters to the appropriate log files"""
|
||||||
|
if self.encoding is not None:
|
||||||
|
s = s.decode(self.encoding, 'replace')
|
||||||
|
self._log(s, 'send')
|
||||||
|
|
||||||
|
def sendcontrol(self, char):
|
||||||
|
'''Helper method that wraps send() with mnemonic access for sending control
|
||||||
|
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
|
||||||
|
Ctrl-G (ASCII 7, bell, '\a')::
|
||||||
|
|
||||||
|
child.sendcontrol('g')
|
||||||
|
|
||||||
|
See also, sendintr() and sendeof().
|
||||||
|
'''
|
||||||
|
n, byte = self.ptyproc.sendcontrol(char)
|
||||||
|
self._log_control(byte)
|
||||||
|
return n
|
||||||
|
|
||||||
|
def sendeof(self):
|
||||||
|
'''This sends an EOF to the child. This sends a character which causes
|
||||||
|
the pending parent output buffer to be sent to the waiting child
|
||||||
|
program without waiting for end-of-line. If it is the first character
|
||||||
|
of the line, the read() in the user program returns 0, which signifies
|
||||||
|
end-of-file. This means to work as expected a sendeof() has to be
|
||||||
|
called at the beginning of a line. This method does not send a newline.
|
||||||
|
It is the responsibility of the caller to ensure the eof is sent at the
|
||||||
|
beginning of a line. '''
|
||||||
|
|
||||||
|
n, byte = self.ptyproc.sendeof()
|
||||||
|
self._log_control(byte)
|
||||||
|
|
||||||
|
def sendintr(self):
|
||||||
|
'''This sends a SIGINT to the child. It does not require
|
||||||
|
the SIGINT to be the first character on a line. '''
|
||||||
|
|
||||||
|
n, byte = self.ptyproc.sendintr()
|
||||||
|
self._log_control(byte)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def flag_eof(self):
|
||||||
|
return self.ptyproc.flag_eof
|
||||||
|
|
||||||
|
@flag_eof.setter
|
||||||
|
def flag_eof(self, value):
|
||||||
|
self.ptyproc.flag_eof = value
|
||||||
|
|
||||||
|
def eof(self):
|
||||||
|
'''This returns True if the EOF exception was ever raised.
|
||||||
|
'''
|
||||||
|
return self.flag_eof
|
||||||
|
|
||||||
|
def terminate(self, force=False):
|
||||||
|
'''This forces a child process to terminate. It starts nicely with
|
||||||
|
SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This
|
||||||
|
returns True if the child was terminated. This returns False if the
|
||||||
|
child could not be terminated. '''
|
||||||
|
|
||||||
|
if not self.isalive():
|
||||||
|
return True
|
||||||
|
try:
|
||||||
|
self.kill(signal.SIGHUP)
|
||||||
|
time.sleep(self.delayafterterminate)
|
||||||
|
if not self.isalive():
|
||||||
|
return True
|
||||||
|
self.kill(signal.SIGCONT)
|
||||||
|
time.sleep(self.delayafterterminate)
|
||||||
|
if not self.isalive():
|
||||||
|
return True
|
||||||
|
self.kill(signal.SIGINT)
|
||||||
|
time.sleep(self.delayafterterminate)
|
||||||
|
if not self.isalive():
|
||||||
|
return True
|
||||||
|
if force:
|
||||||
|
self.kill(signal.SIGKILL)
|
||||||
|
time.sleep(self.delayafterterminate)
|
||||||
|
if not self.isalive():
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
except OSError:
|
||||||
|
# I think there are kernel timing issues that sometimes cause
|
||||||
|
# this to happen. I think isalive() reports True, but the
|
||||||
|
# process is dead to the kernel.
|
||||||
|
# Make one last attempt to see if the kernel is up to date.
|
||||||
|
time.sleep(self.delayafterterminate)
|
||||||
|
if not self.isalive():
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def wait(self):
|
||||||
|
'''This waits until the child exits. This is a blocking call. This will
|
||||||
|
not read any data from the child, so this will block forever if the
|
||||||
|
child has unread output and has terminated. In other words, the child
|
||||||
|
may have printed output then called exit(), but, the child is
|
||||||
|
technically still alive until its output is read by the parent.
|
||||||
|
|
||||||
|
This method is non-blocking if :meth:`wait` has already been called
|
||||||
|
previously or :meth:`isalive` method returns False. It simply returns
|
||||||
|
the previously determined exit status.
|
||||||
|
'''
|
||||||
|
|
||||||
|
ptyproc = self.ptyproc
|
||||||
|
with _wrap_ptyprocess_err():
|
||||||
|
# exception may occur if "Is some other process attempting
|
||||||
|
# "job control with our child pid?"
|
||||||
|
exitstatus = ptyproc.wait()
|
||||||
|
self.status = ptyproc.status
|
||||||
|
self.exitstatus = ptyproc.exitstatus
|
||||||
|
self.signalstatus = ptyproc.signalstatus
|
||||||
|
self.terminated = True
|
||||||
|
|
||||||
|
return exitstatus
|
||||||
|
|
||||||
|
def isalive(self):
|
||||||
|
'''This tests if the child process is running or not. This is
|
||||||
|
non-blocking. If the child was terminated then this will read the
|
||||||
|
exitstatus or signalstatus of the child. This returns True if the child
|
||||||
|
process appears to be running or False if not. It can take literally
|
||||||
|
SECONDS for Solaris to return the right status. '''
|
||||||
|
|
||||||
|
ptyproc = self.ptyproc
|
||||||
|
with _wrap_ptyprocess_err():
|
||||||
|
alive = ptyproc.isalive()
|
||||||
|
|
||||||
|
if not alive:
|
||||||
|
self.status = ptyproc.status
|
||||||
|
self.exitstatus = ptyproc.exitstatus
|
||||||
|
self.signalstatus = ptyproc.signalstatus
|
||||||
|
self.terminated = True
|
||||||
|
|
||||||
|
return alive
|
||||||
|
|
||||||
|
def kill(self, sig):
|
||||||
|
|
||||||
|
'''This sends the given signal to the child application. In keeping
|
||||||
|
with UNIX tradition it has a misleading name. It does not necessarily
|
||||||
|
kill the child unless you send the right signal. '''
|
||||||
|
|
||||||
|
# Same as os.kill, but the pid is given for you.
|
||||||
|
if self.isalive():
|
||||||
|
os.kill(self.pid, sig)
|
||||||
|
|
||||||
|
def getwinsize(self):
|
||||||
|
'''This returns the terminal window size of the child tty. The return
|
||||||
|
value is a tuple of (rows, cols). '''
|
||||||
|
return self.ptyproc.getwinsize()
|
||||||
|
|
||||||
|
def setwinsize(self, rows, cols):
|
||||||
|
'''This sets the terminal window size of the child tty. This will cause
|
||||||
|
a SIGWINCH signal to be sent to the child. This does not change the
|
||||||
|
physical window size. It changes the size reported to TTY-aware
|
||||||
|
applications like vi or curses -- applications that respond to the
|
||||||
|
SIGWINCH signal. '''
|
||||||
|
return self.ptyproc.setwinsize(rows, cols)
|
||||||
|
|
||||||
|
|
||||||
|
def interact(self, escape_character=chr(29),
|
||||||
|
input_filter=None, output_filter=None):
|
||||||
|
|
||||||
|
'''This gives control of the child process to the interactive user (the
|
||||||
|
human at the keyboard). Keystrokes are sent to the child process, and
|
||||||
|
the stdout and stderr output of the child process is printed. This
|
||||||
|
simply echos the child stdout and child stderr to the real stdout and
|
||||||
|
it echos the real stdin to the child stdin. When the user types the
|
||||||
|
escape_character this method will return None. The escape_character
|
||||||
|
will not be transmitted. The default for escape_character is
|
||||||
|
entered as ``Ctrl - ]``, the very same as BSD telnet. To prevent
|
||||||
|
escaping, escape_character may be set to None.
|
||||||
|
|
||||||
|
If a logfile is specified, then the data sent and received from the
|
||||||
|
child process in interact mode is duplicated to the given log.
|
||||||
|
|
||||||
|
You may pass in optional input and output filter functions. These
|
||||||
|
functions should take a string and return a string. The output_filter
|
||||||
|
will be passed all the output from the child process. The input_filter
|
||||||
|
will be passed all the keyboard input from the user. The input_filter
|
||||||
|
is run BEFORE the check for the escape_character.
|
||||||
|
|
||||||
|
Note that if you change the window size of the parent the SIGWINCH
|
||||||
|
signal will not be passed through to the child. If you want the child
|
||||||
|
window size to change when the parent's window size changes then do
|
||||||
|
something like the following example::
|
||||||
|
|
||||||
|
import pexpect, struct, fcntl, termios, signal, sys
|
||||||
|
def sigwinch_passthrough (sig, data):
|
||||||
|
s = struct.pack("HHHH", 0, 0, 0, 0)
|
||||||
|
a = struct.unpack('hhhh', fcntl.ioctl(sys.stdout.fileno(),
|
||||||
|
termios.TIOCGWINSZ , s))
|
||||||
|
if not p.closed:
|
||||||
|
p.setwinsize(a[0],a[1])
|
||||||
|
|
||||||
|
# Note this 'p' is global and used in sigwinch_passthrough.
|
||||||
|
p = pexpect.spawn('/bin/bash')
|
||||||
|
signal.signal(signal.SIGWINCH, sigwinch_passthrough)
|
||||||
|
p.interact()
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Flush the buffer.
|
||||||
|
self.write_to_stdout(self.buffer)
|
||||||
|
self.stdout.flush()
|
||||||
|
self._buffer = self.buffer_type()
|
||||||
|
mode = tty.tcgetattr(self.STDIN_FILENO)
|
||||||
|
tty.setraw(self.STDIN_FILENO)
|
||||||
|
if escape_character is not None and PY3:
|
||||||
|
escape_character = escape_character.encode('latin-1')
|
||||||
|
try:
|
||||||
|
self.__interact_copy(escape_character, input_filter, output_filter)
|
||||||
|
finally:
|
||||||
|
tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode)
|
||||||
|
|
||||||
|
def __interact_writen(self, fd, data):
|
||||||
|
'''This is used by the interact() method.
|
||||||
|
'''
|
||||||
|
|
||||||
|
while data != b'' and self.isalive():
|
||||||
|
n = os.write(fd, data)
|
||||||
|
data = data[n:]
|
||||||
|
|
||||||
|
def __interact_read(self, fd):
|
||||||
|
'''This is used by the interact() method.
|
||||||
|
'''
|
||||||
|
|
||||||
|
return os.read(fd, 1000)
|
||||||
|
|
||||||
|
def __interact_copy(
|
||||||
|
self, escape_character=None, input_filter=None, output_filter=None
|
||||||
|
):
|
||||||
|
|
||||||
|
'''This is used by the interact() method.
|
||||||
|
'''
|
||||||
|
|
||||||
|
while self.isalive():
|
||||||
|
if self.use_poll:
|
||||||
|
r = poll_ignore_interrupts([self.child_fd, self.STDIN_FILENO])
|
||||||
|
else:
|
||||||
|
r, w, e = select_ignore_interrupts(
|
||||||
|
[self.child_fd, self.STDIN_FILENO], [], []
|
||||||
|
)
|
||||||
|
if self.child_fd in r:
|
||||||
|
try:
|
||||||
|
data = self.__interact_read(self.child_fd)
|
||||||
|
except OSError as err:
|
||||||
|
if err.args[0] == errno.EIO:
|
||||||
|
# Linux-style EOF
|
||||||
|
break
|
||||||
|
raise
|
||||||
|
if data == b'':
|
||||||
|
# BSD-style EOF
|
||||||
|
break
|
||||||
|
if output_filter:
|
||||||
|
data = output_filter(data)
|
||||||
|
self._log(data, 'read')
|
||||||
|
os.write(self.STDOUT_FILENO, data)
|
||||||
|
if self.STDIN_FILENO in r:
|
||||||
|
data = self.__interact_read(self.STDIN_FILENO)
|
||||||
|
if input_filter:
|
||||||
|
data = input_filter(data)
|
||||||
|
i = -1
|
||||||
|
if escape_character is not None:
|
||||||
|
i = data.rfind(escape_character)
|
||||||
|
if i != -1:
|
||||||
|
data = data[:i]
|
||||||
|
if data:
|
||||||
|
self._log(data, 'send')
|
||||||
|
self.__interact_writen(self.child_fd, data)
|
||||||
|
break
|
||||||
|
self._log(data, 'send')
|
||||||
|
self.__interact_writen(self.child_fd, data)
|
||||||
|
|
||||||
|
|
||||||
|
def spawnu(*args, **kwargs):
|
||||||
|
"""Deprecated: pass encoding to spawn() instead."""
|
||||||
|
kwargs.setdefault('encoding', 'utf-8')
|
||||||
|
return spawn(*args, **kwargs)
|
|
@ -0,0 +1,537 @@
|
||||||
|
'''This class extends pexpect.spawn to specialize setting up SSH connections.
|
||||||
|
This adds methods for login, logout, and expecting the shell prompt.
|
||||||
|
|
||||||
|
PEXPECT LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
|
||||||
|
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
|
||||||
|
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from pexpect import ExceptionPexpect, TIMEOUT, EOF, spawn
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
__all__ = ['ExceptionPxssh', 'pxssh']
|
||||||
|
|
||||||
|
# Exception classes used by this module.
|
||||||
|
class ExceptionPxssh(ExceptionPexpect):
|
||||||
|
'''Raised for pxssh exceptions.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if sys.version_info > (3, 0):
|
||||||
|
from shlex import quote
|
||||||
|
else:
|
||||||
|
_find_unsafe = re.compile(r'[^\w@%+=:,./-]').search
|
||||||
|
|
||||||
|
def quote(s):
|
||||||
|
"""Return a shell-escaped version of the string *s*."""
|
||||||
|
if not s:
|
||||||
|
return "''"
|
||||||
|
if _find_unsafe(s) is None:
|
||||||
|
return s
|
||||||
|
|
||||||
|
# use single quotes, and put single quotes into double quotes
|
||||||
|
# the string $'b is then quoted as '$'"'"'b'
|
||||||
|
return "'" + s.replace("'", "'\"'\"'") + "'"
|
||||||
|
|
||||||
|
class pxssh (spawn):
|
||||||
|
'''This class extends pexpect.spawn to specialize setting up SSH
|
||||||
|
connections. This adds methods for login, logout, and expecting the shell
|
||||||
|
prompt. It does various tricky things to handle many situations in the SSH
|
||||||
|
login process. For example, if the session is your first login, then pxssh
|
||||||
|
automatically accepts the remote certificate; or if you have public key
|
||||||
|
authentication setup then pxssh won't wait for the password prompt.
|
||||||
|
|
||||||
|
pxssh uses the shell prompt to synchronize output from the remote host. In
|
||||||
|
order to make this more robust it sets the shell prompt to something more
|
||||||
|
unique than just $ or #. This should work on most Borne/Bash or Csh style
|
||||||
|
shells.
|
||||||
|
|
||||||
|
Example that runs a few commands on a remote server and prints the result::
|
||||||
|
|
||||||
|
from pexpect import pxssh
|
||||||
|
import getpass
|
||||||
|
try:
|
||||||
|
s = pxssh.pxssh()
|
||||||
|
hostname = raw_input('hostname: ')
|
||||||
|
username = raw_input('username: ')
|
||||||
|
password = getpass.getpass('password: ')
|
||||||
|
s.login(hostname, username, password)
|
||||||
|
s.sendline('uptime') # run a command
|
||||||
|
s.prompt() # match the prompt
|
||||||
|
print(s.before) # print everything before the prompt.
|
||||||
|
s.sendline('ls -l')
|
||||||
|
s.prompt()
|
||||||
|
print(s.before)
|
||||||
|
s.sendline('df')
|
||||||
|
s.prompt()
|
||||||
|
print(s.before)
|
||||||
|
s.logout()
|
||||||
|
except pxssh.ExceptionPxssh as e:
|
||||||
|
print("pxssh failed on login.")
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
Example showing how to specify SSH options::
|
||||||
|
|
||||||
|
from pexpect import pxssh
|
||||||
|
s = pxssh.pxssh(options={
|
||||||
|
"StrictHostKeyChecking": "no",
|
||||||
|
"UserKnownHostsFile": "/dev/null"})
|
||||||
|
...
|
||||||
|
|
||||||
|
Note that if you have ssh-agent running while doing development with pxssh
|
||||||
|
then this can lead to a lot of confusion. Many X display managers (xdm,
|
||||||
|
gdm, kdm, etc.) will automatically start a GUI agent. You may see a GUI
|
||||||
|
dialog box popup asking for a password during development. You should turn
|
||||||
|
off any key agents during testing. The 'force_password' attribute will turn
|
||||||
|
off public key authentication. This will only work if the remote SSH server
|
||||||
|
is configured to allow password logins. Example of using 'force_password'
|
||||||
|
attribute::
|
||||||
|
|
||||||
|
s = pxssh.pxssh()
|
||||||
|
s.force_password = True
|
||||||
|
hostname = raw_input('hostname: ')
|
||||||
|
username = raw_input('username: ')
|
||||||
|
password = getpass.getpass('password: ')
|
||||||
|
s.login (hostname, username, password)
|
||||||
|
|
||||||
|
`debug_command_string` is only for the test suite to confirm that the string
|
||||||
|
generated for SSH is correct, using this will not allow you to do
|
||||||
|
anything other than get a string back from `pxssh.pxssh.login()`.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None,
|
||||||
|
logfile=None, cwd=None, env=None, ignore_sighup=True, echo=True,
|
||||||
|
options={}, encoding=None, codec_errors='strict',
|
||||||
|
debug_command_string=False, use_poll=False):
|
||||||
|
|
||||||
|
spawn.__init__(self, None, timeout=timeout, maxread=maxread,
|
||||||
|
searchwindowsize=searchwindowsize, logfile=logfile,
|
||||||
|
cwd=cwd, env=env, ignore_sighup=ignore_sighup, echo=echo,
|
||||||
|
encoding=encoding, codec_errors=codec_errors, use_poll=use_poll)
|
||||||
|
|
||||||
|
self.name = '<pxssh>'
|
||||||
|
|
||||||
|
#SUBTLE HACK ALERT! Note that the command that SETS the prompt uses a
|
||||||
|
#slightly different string than the regular expression to match it. This
|
||||||
|
#is because when you set the prompt the command will echo back, but we
|
||||||
|
#don't want to match the echoed command. So if we make the set command
|
||||||
|
#slightly different than the regex we eliminate the problem. To make the
|
||||||
|
#set command different we add a backslash in front of $. The $ doesn't
|
||||||
|
#need to be escaped, but it doesn't hurt and serves to make the set
|
||||||
|
#prompt command different than the regex.
|
||||||
|
|
||||||
|
# used to match the command-line prompt
|
||||||
|
self.UNIQUE_PROMPT = r"\[PEXPECT\][\$\#] "
|
||||||
|
self.PROMPT = self.UNIQUE_PROMPT
|
||||||
|
|
||||||
|
# used to set shell command-line prompt to UNIQUE_PROMPT.
|
||||||
|
self.PROMPT_SET_SH = r"PS1='[PEXPECT]\$ '"
|
||||||
|
self.PROMPT_SET_CSH = r"set prompt='[PEXPECT]\$ '"
|
||||||
|
self.SSH_OPTS = ("-o'RSAAuthentication=no'"
|
||||||
|
+ " -o 'PubkeyAuthentication=no'")
|
||||||
|
# Disabling host key checking, makes you vulnerable to MITM attacks.
|
||||||
|
# + " -o 'StrictHostKeyChecking=no'"
|
||||||
|
# + " -o 'UserKnownHostsFile /dev/null' ")
|
||||||
|
# Disabling X11 forwarding gets rid of the annoying SSH_ASKPASS from
|
||||||
|
# displaying a GUI password dialog. I have not figured out how to
|
||||||
|
# disable only SSH_ASKPASS without also disabling X11 forwarding.
|
||||||
|
# Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying!
|
||||||
|
#self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'"
|
||||||
|
self.force_password = False
|
||||||
|
|
||||||
|
self.debug_command_string = debug_command_string
|
||||||
|
|
||||||
|
# User defined SSH options, eg,
|
||||||
|
# ssh.otions = dict(StrictHostKeyChecking="no",UserKnownHostsFile="/dev/null")
|
||||||
|
self.options = options
|
||||||
|
|
||||||
|
def levenshtein_distance(self, a, b):
|
||||||
|
'''This calculates the Levenshtein distance between a and b.
|
||||||
|
'''
|
||||||
|
|
||||||
|
n, m = len(a), len(b)
|
||||||
|
if n > m:
|
||||||
|
a,b = b,a
|
||||||
|
n,m = m,n
|
||||||
|
current = range(n+1)
|
||||||
|
for i in range(1,m+1):
|
||||||
|
previous, current = current, [i]+[0]*n
|
||||||
|
for j in range(1,n+1):
|
||||||
|
add, delete = previous[j]+1, current[j-1]+1
|
||||||
|
change = previous[j-1]
|
||||||
|
if a[j-1] != b[i-1]:
|
||||||
|
change = change + 1
|
||||||
|
current[j] = min(add, delete, change)
|
||||||
|
return current[n]
|
||||||
|
|
||||||
|
def try_read_prompt(self, timeout_multiplier):
|
||||||
|
'''This facilitates using communication timeouts to perform
|
||||||
|
synchronization as quickly as possible, while supporting high latency
|
||||||
|
connections with a tunable worst case performance. Fast connections
|
||||||
|
should be read almost immediately. Worst case performance for this
|
||||||
|
method is timeout_multiplier * 3 seconds.
|
||||||
|
'''
|
||||||
|
|
||||||
|
# maximum time allowed to read the first response
|
||||||
|
first_char_timeout = timeout_multiplier * 0.5
|
||||||
|
|
||||||
|
# maximum time allowed between subsequent characters
|
||||||
|
inter_char_timeout = timeout_multiplier * 0.1
|
||||||
|
|
||||||
|
# maximum time for reading the entire prompt
|
||||||
|
total_timeout = timeout_multiplier * 3.0
|
||||||
|
|
||||||
|
prompt = self.string_type()
|
||||||
|
begin = time.time()
|
||||||
|
expired = 0.0
|
||||||
|
timeout = first_char_timeout
|
||||||
|
|
||||||
|
while expired < total_timeout:
|
||||||
|
try:
|
||||||
|
prompt += self.read_nonblocking(size=1, timeout=timeout)
|
||||||
|
expired = time.time() - begin # updated total time expired
|
||||||
|
timeout = inter_char_timeout
|
||||||
|
except TIMEOUT:
|
||||||
|
break
|
||||||
|
|
||||||
|
return prompt
|
||||||
|
|
||||||
|
def sync_original_prompt (self, sync_multiplier=1.0):
|
||||||
|
'''This attempts to find the prompt. Basically, press enter and record
|
||||||
|
the response; press enter again and record the response; if the two
|
||||||
|
responses are similar then assume we are at the original prompt.
|
||||||
|
This can be a slow function. Worst case with the default sync_multiplier
|
||||||
|
can take 12 seconds. Low latency connections are more likely to fail
|
||||||
|
with a low sync_multiplier. Best case sync time gets worse with a
|
||||||
|
high sync multiplier (500 ms with default). '''
|
||||||
|
|
||||||
|
# All of these timing pace values are magic.
|
||||||
|
# I came up with these based on what seemed reliable for
|
||||||
|
# connecting to a heavily loaded machine I have.
|
||||||
|
self.sendline()
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Clear the buffer before getting the prompt.
|
||||||
|
self.try_read_prompt(sync_multiplier)
|
||||||
|
except TIMEOUT:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.sendline()
|
||||||
|
x = self.try_read_prompt(sync_multiplier)
|
||||||
|
|
||||||
|
self.sendline()
|
||||||
|
a = self.try_read_prompt(sync_multiplier)
|
||||||
|
|
||||||
|
self.sendline()
|
||||||
|
b = self.try_read_prompt(sync_multiplier)
|
||||||
|
|
||||||
|
ld = self.levenshtein_distance(a,b)
|
||||||
|
len_a = len(a)
|
||||||
|
if len_a == 0:
|
||||||
|
return False
|
||||||
|
if float(ld)/len_a < 0.4:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
### TODO: This is getting messy and I'm pretty sure this isn't perfect.
|
||||||
|
### TODO: I need to draw a flow chart for this.
|
||||||
|
### TODO: Unit tests for SSH tunnels, remote SSH command exec, disabling original prompt sync
|
||||||
|
def login (self, server, username=None, password='', terminal_type='ansi',
|
||||||
|
original_prompt=r"[#$]", login_timeout=10, port=None,
|
||||||
|
auto_prompt_reset=True, ssh_key=None, quiet=True,
|
||||||
|
sync_multiplier=1, check_local_ip=True,
|
||||||
|
password_regex=r'(?i)(?:password:)|(?:passphrase for key)',
|
||||||
|
ssh_tunnels={}, spawn_local_ssh=True,
|
||||||
|
sync_original_prompt=True, ssh_config=None, cmd='ssh'):
|
||||||
|
'''This logs the user into the given server.
|
||||||
|
|
||||||
|
It uses 'original_prompt' to try to find the prompt right after login.
|
||||||
|
When it finds the prompt it immediately tries to reset the prompt to
|
||||||
|
something more easily matched. The default 'original_prompt' is very
|
||||||
|
optimistic and is easily fooled. It's more reliable to try to match the original
|
||||||
|
prompt as exactly as possible to prevent false matches by server
|
||||||
|
strings such as the "Message Of The Day". On many systems you can
|
||||||
|
disable the MOTD on the remote server by creating a zero-length file
|
||||||
|
called :file:`~/.hushlogin` on the remote server. If a prompt cannot be found
|
||||||
|
then this will not necessarily cause the login to fail. In the case of
|
||||||
|
a timeout when looking for the prompt we assume that the original
|
||||||
|
prompt was so weird that we could not match it, so we use a few tricks
|
||||||
|
to guess when we have reached the prompt. Then we hope for the best and
|
||||||
|
blindly try to reset the prompt to something more unique. If that fails
|
||||||
|
then login() raises an :class:`ExceptionPxssh` exception.
|
||||||
|
|
||||||
|
In some situations it is not possible or desirable to reset the
|
||||||
|
original prompt. In this case, pass ``auto_prompt_reset=False`` to
|
||||||
|
inhibit setting the prompt to the UNIQUE_PROMPT. Remember that pxssh
|
||||||
|
uses a unique prompt in the :meth:`prompt` method. If the original prompt is
|
||||||
|
not reset then this will disable the :meth:`prompt` method unless you
|
||||||
|
manually set the :attr:`PROMPT` attribute.
|
||||||
|
|
||||||
|
Set ``password_regex`` if there is a MOTD message with `password` in it.
|
||||||
|
Changing this is like playing in traffic, don't (p)expect it to match straight
|
||||||
|
away.
|
||||||
|
|
||||||
|
If you require to connect to another SSH server from the your original SSH
|
||||||
|
connection set ``spawn_local_ssh`` to `False` and this will use your current
|
||||||
|
session to do so. Setting this option to `False` and not having an active session
|
||||||
|
will trigger an error.
|
||||||
|
|
||||||
|
Set ``ssh_key`` to a file path to an SSH private key to use that SSH key
|
||||||
|
for the session authentication.
|
||||||
|
Set ``ssh_key`` to `True` to force passing the current SSH authentication socket
|
||||||
|
to the desired ``hostname``.
|
||||||
|
|
||||||
|
Set ``ssh_config`` to a file path string of an SSH client config file to pass that
|
||||||
|
file to the client to handle itself. You may set any options you wish in here, however
|
||||||
|
doing so will require you to post extra information that you may not want to if you
|
||||||
|
run into issues.
|
||||||
|
|
||||||
|
Alter the ``cmd`` to change the ssh client used, or to prepend it with network
|
||||||
|
namespaces. For example ```cmd="ip netns exec vlan2 ssh"``` to execute the ssh in
|
||||||
|
network namespace named ```vlan```.
|
||||||
|
'''
|
||||||
|
|
||||||
|
session_regex_array = ["(?i)are you sure you want to continue connecting", original_prompt, password_regex, "(?i)permission denied", "(?i)terminal type", TIMEOUT]
|
||||||
|
session_init_regex_array = []
|
||||||
|
session_init_regex_array.extend(session_regex_array)
|
||||||
|
session_init_regex_array.extend(["(?i)connection closed by remote host", EOF])
|
||||||
|
|
||||||
|
ssh_options = ''.join([" -o '%s=%s'" % (o, v) for (o, v) in self.options.items()])
|
||||||
|
if quiet:
|
||||||
|
ssh_options = ssh_options + ' -q'
|
||||||
|
if not check_local_ip:
|
||||||
|
ssh_options = ssh_options + " -o'NoHostAuthenticationForLocalhost=yes'"
|
||||||
|
if self.force_password:
|
||||||
|
ssh_options = ssh_options + ' ' + self.SSH_OPTS
|
||||||
|
if ssh_config is not None:
|
||||||
|
if spawn_local_ssh and not os.path.isfile(ssh_config):
|
||||||
|
raise ExceptionPxssh('SSH config does not exist or is not a file.')
|
||||||
|
ssh_options = ssh_options + ' -F ' + ssh_config
|
||||||
|
if port is not None:
|
||||||
|
ssh_options = ssh_options + ' -p %s'%(str(port))
|
||||||
|
if ssh_key is not None:
|
||||||
|
# Allow forwarding our SSH key to the current session
|
||||||
|
if ssh_key==True:
|
||||||
|
ssh_options = ssh_options + ' -A'
|
||||||
|
else:
|
||||||
|
if spawn_local_ssh and not os.path.isfile(ssh_key):
|
||||||
|
raise ExceptionPxssh('private ssh key does not exist or is not a file.')
|
||||||
|
ssh_options = ssh_options + ' -i %s' % (ssh_key)
|
||||||
|
|
||||||
|
# SSH tunnels, make sure you know what you're putting into the lists
|
||||||
|
# under each heading. Do not expect these to open 100% of the time,
|
||||||
|
# The port you're requesting might be bound.
|
||||||
|
#
|
||||||
|
# The structure should be like this:
|
||||||
|
# { 'local': ['2424:localhost:22'], # Local SSH tunnels
|
||||||
|
# 'remote': ['2525:localhost:22'], # Remote SSH tunnels
|
||||||
|
# 'dynamic': [8888] } # Dynamic/SOCKS tunnels
|
||||||
|
if ssh_tunnels!={} and isinstance({},type(ssh_tunnels)):
|
||||||
|
tunnel_types = {
|
||||||
|
'local':'L',
|
||||||
|
'remote':'R',
|
||||||
|
'dynamic':'D'
|
||||||
|
}
|
||||||
|
for tunnel_type in tunnel_types:
|
||||||
|
cmd_type = tunnel_types[tunnel_type]
|
||||||
|
if tunnel_type in ssh_tunnels:
|
||||||
|
tunnels = ssh_tunnels[tunnel_type]
|
||||||
|
for tunnel in tunnels:
|
||||||
|
if spawn_local_ssh==False:
|
||||||
|
tunnel = quote(str(tunnel))
|
||||||
|
ssh_options = ssh_options + ' -' + cmd_type + ' ' + str(tunnel)
|
||||||
|
|
||||||
|
if username is not None:
|
||||||
|
ssh_options = ssh_options + ' -l ' + username
|
||||||
|
elif ssh_config is None:
|
||||||
|
raise TypeError('login() needs either a username or an ssh_config')
|
||||||
|
else: # make sure ssh_config has an entry for the server with a username
|
||||||
|
with open(ssh_config, 'rt') as f:
|
||||||
|
lines = [l.strip() for l in f.readlines()]
|
||||||
|
|
||||||
|
server_regex = r'^Host\s+%s\s*$' % server
|
||||||
|
user_regex = r'^User\s+\w+\s*$'
|
||||||
|
config_has_server = False
|
||||||
|
server_has_username = False
|
||||||
|
for line in lines:
|
||||||
|
if not config_has_server and re.match(server_regex, line, re.IGNORECASE):
|
||||||
|
config_has_server = True
|
||||||
|
elif config_has_server and 'hostname' in line.lower():
|
||||||
|
pass
|
||||||
|
elif config_has_server and 'host' in line.lower():
|
||||||
|
server_has_username = False # insurance
|
||||||
|
break # we have left the relevant section
|
||||||
|
elif config_has_server and re.match(user_regex, line, re.IGNORECASE):
|
||||||
|
server_has_username = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if lines:
|
||||||
|
del line
|
||||||
|
|
||||||
|
del lines
|
||||||
|
|
||||||
|
if not config_has_server:
|
||||||
|
raise TypeError('login() ssh_config has no Host entry for %s' % server)
|
||||||
|
elif not server_has_username:
|
||||||
|
raise TypeError('login() ssh_config has no user entry for %s' % server)
|
||||||
|
|
||||||
|
cmd += " %s %s" % (ssh_options, server)
|
||||||
|
if self.debug_command_string:
|
||||||
|
return(cmd)
|
||||||
|
|
||||||
|
# Are we asking for a local ssh command or to spawn one in another session?
|
||||||
|
if spawn_local_ssh:
|
||||||
|
spawn._spawn(self, cmd)
|
||||||
|
else:
|
||||||
|
self.sendline(cmd)
|
||||||
|
|
||||||
|
# This does not distinguish between a remote server 'password' prompt
|
||||||
|
# and a local ssh 'passphrase' prompt (for unlocking a private key).
|
||||||
|
i = self.expect(session_init_regex_array, timeout=login_timeout)
|
||||||
|
|
||||||
|
# First phase
|
||||||
|
if i==0:
|
||||||
|
# New certificate -- always accept it.
|
||||||
|
# This is what you get if SSH does not have the remote host's
|
||||||
|
# public key stored in the 'known_hosts' cache.
|
||||||
|
self.sendline("yes")
|
||||||
|
i = self.expect(session_regex_array)
|
||||||
|
if i==2: # password or passphrase
|
||||||
|
self.sendline(password)
|
||||||
|
i = self.expect(session_regex_array)
|
||||||
|
if i==4:
|
||||||
|
self.sendline(terminal_type)
|
||||||
|
i = self.expect(session_regex_array)
|
||||||
|
if i==7:
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('Could not establish connection to host')
|
||||||
|
|
||||||
|
# Second phase
|
||||||
|
if i==0:
|
||||||
|
# This is weird. This should not happen twice in a row.
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('Weird error. Got "are you sure" prompt twice.')
|
||||||
|
elif i==1: # can occur if you have a public key pair set to authenticate.
|
||||||
|
### TODO: May NOT be OK if expect() got tricked and matched a false prompt.
|
||||||
|
pass
|
||||||
|
elif i==2: # password prompt again
|
||||||
|
# For incorrect passwords, some ssh servers will
|
||||||
|
# ask for the password again, others return 'denied' right away.
|
||||||
|
# If we get the password prompt again then this means
|
||||||
|
# we didn't get the password right the first time.
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('password refused')
|
||||||
|
elif i==3: # permission denied -- password was bad.
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('permission denied')
|
||||||
|
elif i==4: # terminal type again? WTF?
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('Weird error. Got "terminal type" prompt twice.')
|
||||||
|
elif i==5: # Timeout
|
||||||
|
#This is tricky... I presume that we are at the command-line prompt.
|
||||||
|
#It may be that the shell prompt was so weird that we couldn't match
|
||||||
|
#it. Or it may be that we couldn't log in for some other reason. I
|
||||||
|
#can't be sure, but it's safe to guess that we did login because if
|
||||||
|
#I presume wrong and we are not logged in then this should be caught
|
||||||
|
#later when I try to set the shell prompt.
|
||||||
|
pass
|
||||||
|
elif i==6: # Connection closed by remote host
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('connection closed')
|
||||||
|
else: # Unexpected
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('unexpected login response')
|
||||||
|
if sync_original_prompt:
|
||||||
|
if not self.sync_original_prompt(sync_multiplier):
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('could not synchronize with original prompt')
|
||||||
|
# We appear to be in.
|
||||||
|
# set shell prompt to something unique.
|
||||||
|
if auto_prompt_reset:
|
||||||
|
if not self.set_unique_prompt():
|
||||||
|
self.close()
|
||||||
|
raise ExceptionPxssh('could not set shell prompt '
|
||||||
|
'(received: %r, expected: %r).' % (
|
||||||
|
self.before, self.PROMPT,))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def logout (self):
|
||||||
|
'''Sends exit to the remote shell.
|
||||||
|
|
||||||
|
If there are stopped jobs then this automatically sends exit twice.
|
||||||
|
'''
|
||||||
|
self.sendline("exit")
|
||||||
|
index = self.expect([EOF, "(?i)there are stopped jobs"])
|
||||||
|
if index==1:
|
||||||
|
self.sendline("exit")
|
||||||
|
self.expect(EOF)
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def prompt(self, timeout=-1):
|
||||||
|
'''Match the next shell prompt.
|
||||||
|
|
||||||
|
This is little more than a short-cut to the :meth:`~pexpect.spawn.expect`
|
||||||
|
method. Note that if you called :meth:`login` with
|
||||||
|
``auto_prompt_reset=False``, then before calling :meth:`prompt` you must
|
||||||
|
set the :attr:`PROMPT` attribute to a regex that it will use for
|
||||||
|
matching the prompt.
|
||||||
|
|
||||||
|
Calling :meth:`prompt` will erase the contents of the :attr:`before`
|
||||||
|
attribute even if no prompt is ever matched. If timeout is not given or
|
||||||
|
it is set to -1 then self.timeout is used.
|
||||||
|
|
||||||
|
:return: True if the shell prompt was matched, False if the timeout was
|
||||||
|
reached.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
i = self.expect([self.PROMPT, TIMEOUT], timeout=timeout)
|
||||||
|
if i==1:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def set_unique_prompt(self):
|
||||||
|
'''This sets the remote prompt to something more unique than ``#`` or ``$``.
|
||||||
|
This makes it easier for the :meth:`prompt` method to match the shell prompt
|
||||||
|
unambiguously. This method is called automatically by the :meth:`login`
|
||||||
|
method, but you may want to call it manually if you somehow reset the
|
||||||
|
shell prompt. For example, if you 'su' to a different user then you
|
||||||
|
will need to manually reset the prompt. This sends shell commands to
|
||||||
|
the remote host to set the prompt, so this assumes the remote host is
|
||||||
|
ready to receive commands.
|
||||||
|
|
||||||
|
Alternatively, you may use your own prompt pattern. In this case you
|
||||||
|
should call :meth:`login` with ``auto_prompt_reset=False``; then set the
|
||||||
|
:attr:`PROMPT` attribute to a regular expression. After that, the
|
||||||
|
:meth:`prompt` method will try to match your prompt pattern.
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.sendline("unset PROMPT_COMMAND")
|
||||||
|
self.sendline(self.PROMPT_SET_SH) # sh-style
|
||||||
|
i = self.expect ([TIMEOUT, self.PROMPT], timeout=10)
|
||||||
|
if i == 0: # csh-style
|
||||||
|
self.sendline(self.PROMPT_SET_CSH)
|
||||||
|
i = self.expect([TIMEOUT, self.PROMPT], timeout=10)
|
||||||
|
if i == 0:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
# vi:ts=4:sw=4:expandtab:ft=python:
|
|
@ -0,0 +1,130 @@
|
||||||
|
"""Generic wrapper for read-eval-print-loops, a.k.a. interactive shells
|
||||||
|
"""
|
||||||
|
import os.path
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pexpect
|
||||||
|
|
||||||
|
PY3 = (sys.version_info[0] >= 3)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
basestring = str
|
||||||
|
|
||||||
|
PEXPECT_PROMPT = u'[PEXPECT_PROMPT>'
|
||||||
|
PEXPECT_CONTINUATION_PROMPT = u'[PEXPECT_PROMPT+'
|
||||||
|
|
||||||
|
class REPLWrapper(object):
|
||||||
|
"""Wrapper for a REPL.
|
||||||
|
|
||||||
|
:param cmd_or_spawn: This can either be an instance of :class:`pexpect.spawn`
|
||||||
|
in which a REPL has already been started, or a str command to start a new
|
||||||
|
REPL process.
|
||||||
|
:param str orig_prompt: The prompt to expect at first.
|
||||||
|
:param str prompt_change: A command to change the prompt to something more
|
||||||
|
unique. If this is ``None``, the prompt will not be changed. This will
|
||||||
|
be formatted with the new and continuation prompts as positional
|
||||||
|
parameters, so you can use ``{}`` style formatting to insert them into
|
||||||
|
the command.
|
||||||
|
:param str new_prompt: The more unique prompt to expect after the change.
|
||||||
|
:param str extra_init_cmd: Commands to do extra initialisation, such as
|
||||||
|
disabling pagers.
|
||||||
|
"""
|
||||||
|
def __init__(self, cmd_or_spawn, orig_prompt, prompt_change,
|
||||||
|
new_prompt=PEXPECT_PROMPT,
|
||||||
|
continuation_prompt=PEXPECT_CONTINUATION_PROMPT,
|
||||||
|
extra_init_cmd=None):
|
||||||
|
if isinstance(cmd_or_spawn, basestring):
|
||||||
|
self.child = pexpect.spawn(cmd_or_spawn, echo=False, encoding='utf-8')
|
||||||
|
else:
|
||||||
|
self.child = cmd_or_spawn
|
||||||
|
if self.child.echo:
|
||||||
|
# Existing spawn instance has echo enabled, disable it
|
||||||
|
# to prevent our input from being repeated to output.
|
||||||
|
self.child.setecho(False)
|
||||||
|
self.child.waitnoecho()
|
||||||
|
|
||||||
|
if prompt_change is None:
|
||||||
|
self.prompt = orig_prompt
|
||||||
|
else:
|
||||||
|
self.set_prompt(orig_prompt,
|
||||||
|
prompt_change.format(new_prompt, continuation_prompt))
|
||||||
|
self.prompt = new_prompt
|
||||||
|
self.continuation_prompt = continuation_prompt
|
||||||
|
|
||||||
|
self._expect_prompt()
|
||||||
|
|
||||||
|
if extra_init_cmd is not None:
|
||||||
|
self.run_command(extra_init_cmd)
|
||||||
|
|
||||||
|
def set_prompt(self, orig_prompt, prompt_change):
|
||||||
|
self.child.expect(orig_prompt)
|
||||||
|
self.child.sendline(prompt_change)
|
||||||
|
|
||||||
|
def _expect_prompt(self, timeout=-1, async_=False):
|
||||||
|
return self.child.expect_exact([self.prompt, self.continuation_prompt],
|
||||||
|
timeout=timeout, async_=async_)
|
||||||
|
|
||||||
|
def run_command(self, command, timeout=-1, async_=False):
|
||||||
|
"""Send a command to the REPL, wait for and return output.
|
||||||
|
|
||||||
|
:param str command: The command to send. Trailing newlines are not needed.
|
||||||
|
This should be a complete block of input that will trigger execution;
|
||||||
|
if a continuation prompt is found after sending input, :exc:`ValueError`
|
||||||
|
will be raised.
|
||||||
|
:param int timeout: How long to wait for the next prompt. -1 means the
|
||||||
|
default from the :class:`pexpect.spawn` object (default 30 seconds).
|
||||||
|
None means to wait indefinitely.
|
||||||
|
:param bool async_: On Python 3.4, or Python 3.3 with asyncio
|
||||||
|
installed, passing ``async_=True`` will make this return an
|
||||||
|
:mod:`asyncio` Future, which you can yield from to get the same
|
||||||
|
result that this method would normally give directly.
|
||||||
|
"""
|
||||||
|
# Split up multiline commands and feed them in bit-by-bit
|
||||||
|
cmdlines = command.splitlines()
|
||||||
|
# splitlines ignores trailing newlines - add it back in manually
|
||||||
|
if command.endswith('\n'):
|
||||||
|
cmdlines.append('')
|
||||||
|
if not cmdlines:
|
||||||
|
raise ValueError("No command was given")
|
||||||
|
|
||||||
|
if async_:
|
||||||
|
from ._async import repl_run_command_async
|
||||||
|
return repl_run_command_async(self, cmdlines, timeout)
|
||||||
|
|
||||||
|
res = []
|
||||||
|
self.child.sendline(cmdlines[0])
|
||||||
|
for line in cmdlines[1:]:
|
||||||
|
self._expect_prompt(timeout=timeout)
|
||||||
|
res.append(self.child.before)
|
||||||
|
self.child.sendline(line)
|
||||||
|
|
||||||
|
# Command was fully submitted, now wait for the next prompt
|
||||||
|
if self._expect_prompt(timeout=timeout) == 1:
|
||||||
|
# We got the continuation prompt - command was incomplete
|
||||||
|
self.child.kill(signal.SIGINT)
|
||||||
|
self._expect_prompt(timeout=1)
|
||||||
|
raise ValueError("Continuation prompt found - input was incomplete:\n"
|
||||||
|
+ command)
|
||||||
|
return u''.join(res + [self.child.before])
|
||||||
|
|
||||||
|
def python(command="python"):
|
||||||
|
"""Start a Python shell and return a :class:`REPLWrapper` object."""
|
||||||
|
return REPLWrapper(command, u">>> ", u"import sys; sys.ps1={0!r}; sys.ps2={1!r}")
|
||||||
|
|
||||||
|
def bash(command="bash"):
|
||||||
|
"""Start a bash shell and return a :class:`REPLWrapper` object."""
|
||||||
|
bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh')
|
||||||
|
child = pexpect.spawn(command, ['--rcfile', bashrc], echo=False,
|
||||||
|
encoding='utf-8')
|
||||||
|
|
||||||
|
# If the user runs 'env', the value of PS1 will be in the output. To avoid
|
||||||
|
# replwrap seeing that as the next prompt, we'll embed the marker characters
|
||||||
|
# for invisible characters in the prompt; these show up when inspecting the
|
||||||
|
# environment variable, but not when bash displays the prompt.
|
||||||
|
ps1 = PEXPECT_PROMPT[:5] + u'\\[\\]' + PEXPECT_PROMPT[5:]
|
||||||
|
ps2 = PEXPECT_CONTINUATION_PROMPT[:5] + u'\\[\\]' + PEXPECT_CONTINUATION_PROMPT[5:]
|
||||||
|
prompt_change = u"PS1='{0}' PS2='{1}' PROMPT_COMMAND=''".format(ps1, ps2)
|
||||||
|
|
||||||
|
return REPLWrapper(child, u'\\$', prompt_change,
|
||||||
|
extra_init_cmd="export PAGER=cat")
|
|
@ -0,0 +1,157 @@
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
from .exceptions import EOF, TIMEOUT
|
||||||
|
from .pty_spawn import spawn
|
||||||
|
|
||||||
|
def run(command, timeout=30, withexitstatus=False, events=None,
|
||||||
|
extra_args=None, logfile=None, cwd=None, env=None, **kwargs):
|
||||||
|
|
||||||
|
'''
|
||||||
|
This function runs the given command; waits for it to finish; then
|
||||||
|
returns all output as a string. STDERR is included in output. If the full
|
||||||
|
path to the command is not given then the path is searched.
|
||||||
|
|
||||||
|
Note that lines are terminated by CR/LF (\\r\\n) combination even on
|
||||||
|
UNIX-like systems because this is the standard for pseudottys. If you set
|
||||||
|
'withexitstatus' to true, then run will return a tuple of (command_output,
|
||||||
|
exitstatus). If 'withexitstatus' is false then this returns just
|
||||||
|
command_output.
|
||||||
|
|
||||||
|
The run() function can often be used instead of creating a spawn instance.
|
||||||
|
For example, the following code uses spawn::
|
||||||
|
|
||||||
|
from pexpect import *
|
||||||
|
child = spawn('scp foo user@example.com:.')
|
||||||
|
child.expect('(?i)password')
|
||||||
|
child.sendline(mypassword)
|
||||||
|
|
||||||
|
The previous code can be replace with the following::
|
||||||
|
|
||||||
|
from pexpect import *
|
||||||
|
run('scp foo user@example.com:.', events={'(?i)password': mypassword})
|
||||||
|
|
||||||
|
**Examples**
|
||||||
|
|
||||||
|
Start the apache daemon on the local machine::
|
||||||
|
|
||||||
|
from pexpect import *
|
||||||
|
run("/usr/local/apache/bin/apachectl start")
|
||||||
|
|
||||||
|
Check in a file using SVN::
|
||||||
|
|
||||||
|
from pexpect import *
|
||||||
|
run("svn ci -m 'automatic commit' my_file.py")
|
||||||
|
|
||||||
|
Run a command and capture exit status::
|
||||||
|
|
||||||
|
from pexpect import *
|
||||||
|
(command_output, exitstatus) = run('ls -l /bin', withexitstatus=1)
|
||||||
|
|
||||||
|
The following will run SSH and execute 'ls -l' on the remote machine. The
|
||||||
|
password 'secret' will be sent if the '(?i)password' pattern is ever seen::
|
||||||
|
|
||||||
|
run("ssh username@machine.example.com 'ls -l'",
|
||||||
|
events={'(?i)password':'secret\\n'})
|
||||||
|
|
||||||
|
This will start mencoder to rip a video from DVD. This will also display
|
||||||
|
progress ticks every 5 seconds as it runs. For example::
|
||||||
|
|
||||||
|
from pexpect import *
|
||||||
|
def print_ticks(d):
|
||||||
|
print d['event_count'],
|
||||||
|
run("mencoder dvd://1 -o video.avi -oac copy -ovc copy",
|
||||||
|
events={TIMEOUT:print_ticks}, timeout=5)
|
||||||
|
|
||||||
|
The 'events' argument should be either a dictionary or a tuple list that
|
||||||
|
contains patterns and responses. Whenever one of the patterns is seen
|
||||||
|
in the command output, run() will send the associated response string.
|
||||||
|
So, run() in the above example can be also written as:
|
||||||
|
|
||||||
|
run("mencoder dvd://1 -o video.avi -oac copy -ovc copy",
|
||||||
|
events=[(TIMEOUT,print_ticks)], timeout=5)
|
||||||
|
|
||||||
|
Use a tuple list for events if the command output requires a delicate
|
||||||
|
control over what pattern should be matched, since the tuple list is passed
|
||||||
|
to pexpect() as its pattern list, with the order of patterns preserved.
|
||||||
|
|
||||||
|
Note that you should put newlines in your string if Enter is necessary.
|
||||||
|
|
||||||
|
Like the example above, the responses may also contain a callback, either
|
||||||
|
a function or method. It should accept a dictionary value as an argument.
|
||||||
|
The dictionary contains all the locals from the run() function, so you can
|
||||||
|
access the child spawn object or any other variable defined in run()
|
||||||
|
(event_count, child, and extra_args are the most useful). A callback may
|
||||||
|
return True to stop the current run process. Otherwise run() continues
|
||||||
|
until the next event. A callback may also return a string which will be
|
||||||
|
sent to the child. 'extra_args' is not used by directly run(). It provides
|
||||||
|
a way to pass data to a callback function through run() through the locals
|
||||||
|
dictionary passed to a callback.
|
||||||
|
|
||||||
|
Like :class:`spawn`, passing *encoding* will make it work with unicode
|
||||||
|
instead of bytes. You can pass *codec_errors* to control how errors in
|
||||||
|
encoding and decoding are handled.
|
||||||
|
'''
|
||||||
|
if timeout == -1:
|
||||||
|
child = spawn(command, maxread=2000, logfile=logfile, cwd=cwd, env=env,
|
||||||
|
**kwargs)
|
||||||
|
else:
|
||||||
|
child = spawn(command, timeout=timeout, maxread=2000, logfile=logfile,
|
||||||
|
cwd=cwd, env=env, **kwargs)
|
||||||
|
if isinstance(events, list):
|
||||||
|
patterns= [x for x,y in events]
|
||||||
|
responses = [y for x,y in events]
|
||||||
|
elif isinstance(events, dict):
|
||||||
|
patterns = list(events.keys())
|
||||||
|
responses = list(events.values())
|
||||||
|
else:
|
||||||
|
# This assumes EOF or TIMEOUT will eventually cause run to terminate.
|
||||||
|
patterns = None
|
||||||
|
responses = None
|
||||||
|
child_result_list = []
|
||||||
|
event_count = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
index = child.expect(patterns)
|
||||||
|
if isinstance(child.after, child.allowed_string_types):
|
||||||
|
child_result_list.append(child.before + child.after)
|
||||||
|
else:
|
||||||
|
# child.after may have been a TIMEOUT or EOF,
|
||||||
|
# which we don't want appended to the list.
|
||||||
|
child_result_list.append(child.before)
|
||||||
|
if isinstance(responses[index], child.allowed_string_types):
|
||||||
|
child.send(responses[index])
|
||||||
|
elif (isinstance(responses[index], types.FunctionType) or
|
||||||
|
isinstance(responses[index], types.MethodType)):
|
||||||
|
callback_result = responses[index](locals())
|
||||||
|
sys.stdout.flush()
|
||||||
|
if isinstance(callback_result, child.allowed_string_types):
|
||||||
|
child.send(callback_result)
|
||||||
|
elif callback_result:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise TypeError("parameter `event' at index {index} must be "
|
||||||
|
"a string, method, or function: {value!r}"
|
||||||
|
.format(index=index, value=responses[index]))
|
||||||
|
event_count = event_count + 1
|
||||||
|
except TIMEOUT:
|
||||||
|
child_result_list.append(child.before)
|
||||||
|
break
|
||||||
|
except EOF:
|
||||||
|
child_result_list.append(child.before)
|
||||||
|
break
|
||||||
|
child_result = child.string_type().join(child_result_list)
|
||||||
|
if withexitstatus:
|
||||||
|
child.close()
|
||||||
|
return (child_result, child.exitstatus)
|
||||||
|
else:
|
||||||
|
return child_result
|
||||||
|
|
||||||
|
def runu(command, timeout=30, withexitstatus=False, events=None,
|
||||||
|
extra_args=None, logfile=None, cwd=None, env=None, **kwargs):
|
||||||
|
"""Deprecated: pass encoding to run() instead.
|
||||||
|
"""
|
||||||
|
kwargs.setdefault('encoding', 'utf-8')
|
||||||
|
return run(command, timeout=timeout, withexitstatus=withexitstatus,
|
||||||
|
events=events, extra_args=extra_args, logfile=logfile, cwd=cwd,
|
||||||
|
env=env, **kwargs)
|
|
@ -0,0 +1,431 @@
|
||||||
|
'''This implements a virtual screen. This is used to support ANSI terminal
|
||||||
|
emulation. The screen representation and state is implemented in this class.
|
||||||
|
Most of the methods are inspired by ANSI screen control codes. The
|
||||||
|
:class:`~pexpect.ANSI.ANSI` class extends this class to add parsing of ANSI
|
||||||
|
escape codes.
|
||||||
|
|
||||||
|
PEXPECT LICENSE
|
||||||
|
|
||||||
|
This license is approved by the OSI and FSF as GPL-compatible.
|
||||||
|
http://opensource.org/licenses/isc-license.txt
|
||||||
|
|
||||||
|
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
|
||||||
|
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
|
||||||
|
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
|
||||||
|
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import copy
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn(("pexpect.screen and pexpect.ANSI are deprecated. "
|
||||||
|
"We recommend using pyte to emulate a terminal screen: "
|
||||||
|
"https://pypi.python.org/pypi/pyte"),
|
||||||
|
stacklevel=2)
|
||||||
|
|
||||||
|
NUL = 0 # Fill character; ignored on input.
|
||||||
|
ENQ = 5 # Transmit answerback message.
|
||||||
|
BEL = 7 # Ring the bell.
|
||||||
|
BS = 8 # Move cursor left.
|
||||||
|
HT = 9 # Move cursor to next tab stop.
|
||||||
|
LF = 10 # Line feed.
|
||||||
|
VT = 11 # Same as LF.
|
||||||
|
FF = 12 # Same as LF.
|
||||||
|
CR = 13 # Move cursor to left margin or newline.
|
||||||
|
SO = 14 # Invoke G1 character set.
|
||||||
|
SI = 15 # Invoke G0 character set.
|
||||||
|
XON = 17 # Resume transmission.
|
||||||
|
XOFF = 19 # Halt transmission.
|
||||||
|
CAN = 24 # Cancel escape sequence.
|
||||||
|
SUB = 26 # Same as CAN.
|
||||||
|
ESC = 27 # Introduce a control sequence.
|
||||||
|
DEL = 127 # Fill character; ignored on input.
|
||||||
|
SPACE = u' ' # Space or blank character.
|
||||||
|
|
||||||
|
PY3 = (sys.version_info[0] >= 3)
|
||||||
|
if PY3:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
def constrain (n, min, max):
|
||||||
|
|
||||||
|
'''This returns a number, n constrained to the min and max bounds. '''
|
||||||
|
|
||||||
|
if n < min:
|
||||||
|
return min
|
||||||
|
if n > max:
|
||||||
|
return max
|
||||||
|
return n
|
||||||
|
|
||||||
|
class screen:
|
||||||
|
'''This object maintains the state of a virtual text screen as a
|
||||||
|
rectangular array. This maintains a virtual cursor position and handles
|
||||||
|
scrolling as characters are added. This supports most of the methods needed
|
||||||
|
by an ANSI text screen. Row and column indexes are 1-based (not zero-based,
|
||||||
|
like arrays).
|
||||||
|
|
||||||
|
Characters are represented internally using unicode. Methods that accept
|
||||||
|
input characters, when passed 'bytes' (which in Python 2 is equivalent to
|
||||||
|
'str'), convert them from the encoding specified in the 'encoding'
|
||||||
|
parameter to the constructor. Methods that return screen contents return
|
||||||
|
unicode strings, with the exception of __str__() under Python 2. Passing
|
||||||
|
``encoding=None`` limits the API to only accept unicode input, so passing
|
||||||
|
bytes in will raise :exc:`TypeError`.
|
||||||
|
'''
|
||||||
|
def __init__(self, r=24, c=80, encoding='latin-1', encoding_errors='replace'):
|
||||||
|
'''This initializes a blank screen of the given dimensions.'''
|
||||||
|
|
||||||
|
self.rows = r
|
||||||
|
self.cols = c
|
||||||
|
self.encoding = encoding
|
||||||
|
self.encoding_errors = encoding_errors
|
||||||
|
if encoding is not None:
|
||||||
|
self.decoder = codecs.getincrementaldecoder(encoding)(encoding_errors)
|
||||||
|
else:
|
||||||
|
self.decoder = None
|
||||||
|
self.cur_r = 1
|
||||||
|
self.cur_c = 1
|
||||||
|
self.cur_saved_r = 1
|
||||||
|
self.cur_saved_c = 1
|
||||||
|
self.scroll_row_start = 1
|
||||||
|
self.scroll_row_end = self.rows
|
||||||
|
self.w = [ [SPACE] * self.cols for _ in range(self.rows)]
|
||||||
|
|
||||||
|
def _decode(self, s):
|
||||||
|
'''This converts from the external coding system (as passed to
|
||||||
|
the constructor) to the internal one (unicode). '''
|
||||||
|
if self.decoder is not None:
|
||||||
|
return self.decoder.decode(s)
|
||||||
|
else:
|
||||||
|
raise TypeError("This screen was constructed with encoding=None, "
|
||||||
|
"so it does not handle bytes.")
|
||||||
|
|
||||||
|
def _unicode(self):
|
||||||
|
'''This returns a printable representation of the screen as a unicode
|
||||||
|
string (which, under Python 3.x, is the same as 'str'). The end of each
|
||||||
|
screen line is terminated by a newline.'''
|
||||||
|
|
||||||
|
return u'\n'.join ([ u''.join(c) for c in self.w ])
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
__str__ = _unicode
|
||||||
|
else:
|
||||||
|
__unicode__ = _unicode
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''This returns a printable representation of the screen. The end of
|
||||||
|
each screen line is terminated by a newline. '''
|
||||||
|
encoding = self.encoding or 'ascii'
|
||||||
|
return self._unicode().encode(encoding, 'replace')
|
||||||
|
|
||||||
|
def dump (self):
|
||||||
|
'''This returns a copy of the screen as a unicode string. This is similar to
|
||||||
|
__str__/__unicode__ except that lines are not terminated with line
|
||||||
|
feeds.'''
|
||||||
|
|
||||||
|
return u''.join ([ u''.join(c) for c in self.w ])
|
||||||
|
|
||||||
|
def pretty (self):
|
||||||
|
'''This returns a copy of the screen as a unicode string with an ASCII
|
||||||
|
text box around the screen border. This is similar to
|
||||||
|
__str__/__unicode__ except that it adds a box.'''
|
||||||
|
|
||||||
|
top_bot = u'+' + u'-'*self.cols + u'+\n'
|
||||||
|
return top_bot + u'\n'.join([u'|'+line+u'|' for line in unicode(self).split(u'\n')]) + u'\n' + top_bot
|
||||||
|
|
||||||
|
def fill (self, ch=SPACE):
|
||||||
|
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)
|
||||||
|
|
||||||
|
self.fill_region (1,1,self.rows,self.cols, ch)
|
||||||
|
|
||||||
|
def fill_region (self, rs,cs, re,ce, ch=SPACE):
|
||||||
|
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)
|
||||||
|
|
||||||
|
rs = constrain (rs, 1, self.rows)
|
||||||
|
re = constrain (re, 1, self.rows)
|
||||||
|
cs = constrain (cs, 1, self.cols)
|
||||||
|
ce = constrain (ce, 1, self.cols)
|
||||||
|
if rs > re:
|
||||||
|
rs, re = re, rs
|
||||||
|
if cs > ce:
|
||||||
|
cs, ce = ce, cs
|
||||||
|
for r in range (rs, re+1):
|
||||||
|
for c in range (cs, ce + 1):
|
||||||
|
self.put_abs (r,c,ch)
|
||||||
|
|
||||||
|
def cr (self):
|
||||||
|
'''This moves the cursor to the beginning (col 1) of the current row.
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.cursor_home (self.cur_r, 1)
|
||||||
|
|
||||||
|
def lf (self):
|
||||||
|
'''This moves the cursor down with scrolling.
|
||||||
|
'''
|
||||||
|
|
||||||
|
old_r = self.cur_r
|
||||||
|
self.cursor_down()
|
||||||
|
if old_r == self.cur_r:
|
||||||
|
self.scroll_up ()
|
||||||
|
self.erase_line()
|
||||||
|
|
||||||
|
def crlf (self):
|
||||||
|
'''This advances the cursor with CRLF properties.
|
||||||
|
The cursor will line wrap and the screen may scroll.
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.cr ()
|
||||||
|
self.lf ()
|
||||||
|
|
||||||
|
def newline (self):
|
||||||
|
'''This is an alias for crlf().
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.crlf()
|
||||||
|
|
||||||
|
def put_abs (self, r, c, ch):
|
||||||
|
'''Screen array starts at 1 index.'''
|
||||||
|
|
||||||
|
r = constrain (r, 1, self.rows)
|
||||||
|
c = constrain (c, 1, self.cols)
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)[0]
|
||||||
|
else:
|
||||||
|
ch = ch[0]
|
||||||
|
self.w[r-1][c-1] = ch
|
||||||
|
|
||||||
|
def put (self, ch):
|
||||||
|
'''This puts a characters at the current cursor position.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)
|
||||||
|
|
||||||
|
self.put_abs (self.cur_r, self.cur_c, ch)
|
||||||
|
|
||||||
|
def insert_abs (self, r, c, ch):
|
||||||
|
'''This inserts a character at (r,c). Everything under
|
||||||
|
and to the right is shifted right one character.
|
||||||
|
The last character of the line is lost.
|
||||||
|
'''
|
||||||
|
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)
|
||||||
|
|
||||||
|
r = constrain (r, 1, self.rows)
|
||||||
|
c = constrain (c, 1, self.cols)
|
||||||
|
for ci in range (self.cols, c, -1):
|
||||||
|
self.put_abs (r,ci, self.get_abs(r,ci-1))
|
||||||
|
self.put_abs (r,c,ch)
|
||||||
|
|
||||||
|
def insert (self, ch):
|
||||||
|
|
||||||
|
if isinstance(ch, bytes):
|
||||||
|
ch = self._decode(ch)
|
||||||
|
|
||||||
|
self.insert_abs (self.cur_r, self.cur_c, ch)
|
||||||
|
|
||||||
|
def get_abs (self, r, c):
|
||||||
|
|
||||||
|
r = constrain (r, 1, self.rows)
|
||||||
|
c = constrain (c, 1, self.cols)
|
||||||
|
return self.w[r-1][c-1]
|
||||||
|
|
||||||
|
def get (self):
|
||||||
|
|
||||||
|
self.get_abs (self.cur_r, self.cur_c)
|
||||||
|
|
||||||
|
def get_region (self, rs,cs, re,ce):
|
||||||
|
'''This returns a list of lines representing the region.
|
||||||
|
'''
|
||||||
|
|
||||||
|
rs = constrain (rs, 1, self.rows)
|
||||||
|
re = constrain (re, 1, self.rows)
|
||||||
|
cs = constrain (cs, 1, self.cols)
|
||||||
|
ce = constrain (ce, 1, self.cols)
|
||||||
|
if rs > re:
|
||||||
|
rs, re = re, rs
|
||||||
|
if cs > ce:
|
||||||
|
cs, ce = ce, cs
|
||||||
|
sc = []
|
||||||
|
for r in range (rs, re+1):
|
||||||
|
line = u''
|
||||||
|
for c in range (cs, ce + 1):
|
||||||
|
ch = self.get_abs (r,c)
|
||||||
|
line = line + ch
|
||||||
|
sc.append (line)
|
||||||
|
return sc
|
||||||
|
|
||||||
|
def cursor_constrain (self):
|
||||||
|
'''This keeps the cursor within the screen area.
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.cur_r = constrain (self.cur_r, 1, self.rows)
|
||||||
|
self.cur_c = constrain (self.cur_c, 1, self.cols)
|
||||||
|
|
||||||
|
def cursor_home (self, r=1, c=1): # <ESC>[{ROW};{COLUMN}H
|
||||||
|
|
||||||
|
self.cur_r = r
|
||||||
|
self.cur_c = c
|
||||||
|
self.cursor_constrain ()
|
||||||
|
|
||||||
|
def cursor_back (self,count=1): # <ESC>[{COUNT}D (not confused with down)
|
||||||
|
|
||||||
|
self.cur_c = self.cur_c - count
|
||||||
|
self.cursor_constrain ()
|
||||||
|
|
||||||
|
def cursor_down (self,count=1): # <ESC>[{COUNT}B (not confused with back)
|
||||||
|
|
||||||
|
self.cur_r = self.cur_r + count
|
||||||
|
self.cursor_constrain ()
|
||||||
|
|
||||||
|
def cursor_forward (self,count=1): # <ESC>[{COUNT}C
|
||||||
|
|
||||||
|
self.cur_c = self.cur_c + count
|
||||||
|
self.cursor_constrain ()
|
||||||
|
|
||||||
|
def cursor_up (self,count=1): # <ESC>[{COUNT}A
|
||||||
|
|
||||||
|
self.cur_r = self.cur_r - count
|
||||||
|
self.cursor_constrain ()
|
||||||
|
|
||||||
|
def cursor_up_reverse (self): # <ESC> M (called RI -- Reverse Index)
|
||||||
|
|
||||||
|
old_r = self.cur_r
|
||||||
|
self.cursor_up()
|
||||||
|
if old_r == self.cur_r:
|
||||||
|
self.scroll_up()
|
||||||
|
|
||||||
|
def cursor_force_position (self, r, c): # <ESC>[{ROW};{COLUMN}f
|
||||||
|
'''Identical to Cursor Home.'''
|
||||||
|
|
||||||
|
self.cursor_home (r, c)
|
||||||
|
|
||||||
|
def cursor_save (self): # <ESC>[s
|
||||||
|
'''Save current cursor position.'''
|
||||||
|
|
||||||
|
self.cursor_save_attrs()
|
||||||
|
|
||||||
|
def cursor_unsave (self): # <ESC>[u
|
||||||
|
'''Restores cursor position after a Save Cursor.'''
|
||||||
|
|
||||||
|
self.cursor_restore_attrs()
|
||||||
|
|
||||||
|
def cursor_save_attrs (self): # <ESC>7
|
||||||
|
'''Save current cursor position.'''
|
||||||
|
|
||||||
|
self.cur_saved_r = self.cur_r
|
||||||
|
self.cur_saved_c = self.cur_c
|
||||||
|
|
||||||
|
def cursor_restore_attrs (self): # <ESC>8
|
||||||
|
'''Restores cursor position after a Save Cursor.'''
|
||||||
|
|
||||||
|
self.cursor_home (self.cur_saved_r, self.cur_saved_c)
|
||||||
|
|
||||||
|
def scroll_constrain (self):
|
||||||
|
'''This keeps the scroll region within the screen region.'''
|
||||||
|
|
||||||
|
if self.scroll_row_start <= 0:
|
||||||
|
self.scroll_row_start = 1
|
||||||
|
if self.scroll_row_end > self.rows:
|
||||||
|
self.scroll_row_end = self.rows
|
||||||
|
|
||||||
|
def scroll_screen (self): # <ESC>[r
|
||||||
|
'''Enable scrolling for entire display.'''
|
||||||
|
|
||||||
|
self.scroll_row_start = 1
|
||||||
|
self.scroll_row_end = self.rows
|
||||||
|
|
||||||
|
def scroll_screen_rows (self, rs, re): # <ESC>[{start};{end}r
|
||||||
|
'''Enable scrolling from row {start} to row {end}.'''
|
||||||
|
|
||||||
|
self.scroll_row_start = rs
|
||||||
|
self.scroll_row_end = re
|
||||||
|
self.scroll_constrain()
|
||||||
|
|
||||||
|
def scroll_down (self): # <ESC>D
|
||||||
|
'''Scroll display down one line.'''
|
||||||
|
|
||||||
|
# Screen is indexed from 1, but arrays are indexed from 0.
|
||||||
|
s = self.scroll_row_start - 1
|
||||||
|
e = self.scroll_row_end - 1
|
||||||
|
self.w[s+1:e+1] = copy.deepcopy(self.w[s:e])
|
||||||
|
|
||||||
|
def scroll_up (self): # <ESC>M
|
||||||
|
'''Scroll display up one line.'''
|
||||||
|
|
||||||
|
# Screen is indexed from 1, but arrays are indexed from 0.
|
||||||
|
s = self.scroll_row_start - 1
|
||||||
|
e = self.scroll_row_end - 1
|
||||||
|
self.w[s:e] = copy.deepcopy(self.w[s+1:e+1])
|
||||||
|
|
||||||
|
def erase_end_of_line (self): # <ESC>[0K -or- <ESC>[K
|
||||||
|
'''Erases from the current cursor position to the end of the current
|
||||||
|
line.'''
|
||||||
|
|
||||||
|
self.fill_region (self.cur_r, self.cur_c, self.cur_r, self.cols)
|
||||||
|
|
||||||
|
def erase_start_of_line (self): # <ESC>[1K
|
||||||
|
'''Erases from the current cursor position to the start of the current
|
||||||
|
line.'''
|
||||||
|
|
||||||
|
self.fill_region (self.cur_r, 1, self.cur_r, self.cur_c)
|
||||||
|
|
||||||
|
def erase_line (self): # <ESC>[2K
|
||||||
|
'''Erases the entire current line.'''
|
||||||
|
|
||||||
|
self.fill_region (self.cur_r, 1, self.cur_r, self.cols)
|
||||||
|
|
||||||
|
def erase_down (self): # <ESC>[0J -or- <ESC>[J
|
||||||
|
'''Erases the screen from the current line down to the bottom of the
|
||||||
|
screen.'''
|
||||||
|
|
||||||
|
self.erase_end_of_line ()
|
||||||
|
self.fill_region (self.cur_r + 1, 1, self.rows, self.cols)
|
||||||
|
|
||||||
|
def erase_up (self): # <ESC>[1J
|
||||||
|
'''Erases the screen from the current line up to the top of the
|
||||||
|
screen.'''
|
||||||
|
|
||||||
|
self.erase_start_of_line ()
|
||||||
|
self.fill_region (self.cur_r-1, 1, 1, self.cols)
|
||||||
|
|
||||||
|
def erase_screen (self): # <ESC>[2J
|
||||||
|
'''Erases the screen with the background color.'''
|
||||||
|
|
||||||
|
self.fill ()
|
||||||
|
|
||||||
|
def set_tab (self): # <ESC>H
|
||||||
|
'''Sets a tab at the current position.'''
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
def clear_tab (self): # <ESC>[g
|
||||||
|
'''Clears tab at the current position.'''
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
def clear_all_tabs (self): # <ESC>[3g
|
||||||
|
'''Clears all tabs.'''
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Insert line Esc [ Pn L
|
||||||
|
# Delete line Esc [ Pn M
|
||||||
|
# Delete character Esc [ Pn P
|
||||||
|
# Scrolling region Esc [ Pn(top);Pn(bot) r
|
||||||
|
|
|
@ -0,0 +1,522 @@
|
||||||
|
from io import StringIO, BytesIO
|
||||||
|
import codecs
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import errno
|
||||||
|
from .exceptions import ExceptionPexpect, EOF, TIMEOUT
|
||||||
|
from .expect import Expecter, searcher_string, searcher_re
|
||||||
|
|
||||||
|
PY3 = (sys.version_info[0] >= 3)
|
||||||
|
text_type = str if PY3 else unicode
|
||||||
|
|
||||||
|
class _NullCoder(object):
|
||||||
|
"""Pass bytes through unchanged."""
|
||||||
|
@staticmethod
|
||||||
|
def encode(b, final=False):
|
||||||
|
return b
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decode(b, final=False):
|
||||||
|
return b
|
||||||
|
|
||||||
|
class SpawnBase(object):
|
||||||
|
"""A base class providing the backwards-compatible spawn API for Pexpect.
|
||||||
|
|
||||||
|
This should not be instantiated directly: use :class:`pexpect.spawn` or
|
||||||
|
:class:`pexpect.fdpexpect.fdspawn`.
|
||||||
|
"""
|
||||||
|
encoding = None
|
||||||
|
pid = None
|
||||||
|
flag_eof = False
|
||||||
|
|
||||||
|
def __init__(self, timeout=30, maxread=2000, searchwindowsize=None,
|
||||||
|
logfile=None, encoding=None, codec_errors='strict'):
|
||||||
|
self.stdin = sys.stdin
|
||||||
|
self.stdout = sys.stdout
|
||||||
|
self.stderr = sys.stderr
|
||||||
|
|
||||||
|
self.searcher = None
|
||||||
|
self.ignorecase = False
|
||||||
|
self.before = None
|
||||||
|
self.after = None
|
||||||
|
self.match = None
|
||||||
|
self.match_index = None
|
||||||
|
self.terminated = True
|
||||||
|
self.exitstatus = None
|
||||||
|
self.signalstatus = None
|
||||||
|
# status returned by os.waitpid
|
||||||
|
self.status = None
|
||||||
|
# the child file descriptor is initially closed
|
||||||
|
self.child_fd = -1
|
||||||
|
self.timeout = timeout
|
||||||
|
self.delimiter = EOF
|
||||||
|
self.logfile = logfile
|
||||||
|
# input from child (read_nonblocking)
|
||||||
|
self.logfile_read = None
|
||||||
|
# output to send (send, sendline)
|
||||||
|
self.logfile_send = None
|
||||||
|
# max bytes to read at one time into buffer
|
||||||
|
self.maxread = maxread
|
||||||
|
# Data before searchwindowsize point is preserved, but not searched.
|
||||||
|
self.searchwindowsize = searchwindowsize
|
||||||
|
# Delay used before sending data to child. Time in seconds.
|
||||||
|
# Set this to None to skip the time.sleep() call completely.
|
||||||
|
self.delaybeforesend = 0.05
|
||||||
|
# Used by close() to give kernel time to update process status.
|
||||||
|
# Time in seconds.
|
||||||
|
self.delayafterclose = 0.1
|
||||||
|
# Used by terminate() to give kernel time to update process status.
|
||||||
|
# Time in seconds.
|
||||||
|
self.delayafterterminate = 0.1
|
||||||
|
# Delay in seconds to sleep after each call to read_nonblocking().
|
||||||
|
# Set this to None to skip the time.sleep() call completely: that
|
||||||
|
# would restore the behavior from pexpect-2.0 (for performance
|
||||||
|
# reasons or because you don't want to release Python's global
|
||||||
|
# interpreter lock).
|
||||||
|
self.delayafterread = 0.0001
|
||||||
|
self.softspace = False
|
||||||
|
self.name = '<' + repr(self) + '>'
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
# Unicode interface
|
||||||
|
self.encoding = encoding
|
||||||
|
self.codec_errors = codec_errors
|
||||||
|
if encoding is None:
|
||||||
|
# bytes mode (accepts some unicode for backwards compatibility)
|
||||||
|
self._encoder = self._decoder = _NullCoder()
|
||||||
|
self.string_type = bytes
|
||||||
|
self.buffer_type = BytesIO
|
||||||
|
self.crlf = b'\r\n'
|
||||||
|
if PY3:
|
||||||
|
self.allowed_string_types = (bytes, str)
|
||||||
|
self.linesep = os.linesep.encode('ascii')
|
||||||
|
def write_to_stdout(b):
|
||||||
|
try:
|
||||||
|
return sys.stdout.buffer.write(b)
|
||||||
|
except AttributeError:
|
||||||
|
# If stdout has been replaced, it may not have .buffer
|
||||||
|
return sys.stdout.write(b.decode('ascii', 'replace'))
|
||||||
|
self.write_to_stdout = write_to_stdout
|
||||||
|
else:
|
||||||
|
self.allowed_string_types = (basestring,) # analysis:ignore
|
||||||
|
self.linesep = os.linesep
|
||||||
|
self.write_to_stdout = sys.stdout.write
|
||||||
|
else:
|
||||||
|
# unicode mode
|
||||||
|
self._encoder = codecs.getincrementalencoder(encoding)(codec_errors)
|
||||||
|
self._decoder = codecs.getincrementaldecoder(encoding)(codec_errors)
|
||||||
|
self.string_type = text_type
|
||||||
|
self.buffer_type = StringIO
|
||||||
|
self.crlf = u'\r\n'
|
||||||
|
self.allowed_string_types = (text_type, )
|
||||||
|
if PY3:
|
||||||
|
self.linesep = os.linesep
|
||||||
|
else:
|
||||||
|
self.linesep = os.linesep.decode('ascii')
|
||||||
|
# This can handle unicode in both Python 2 and 3
|
||||||
|
self.write_to_stdout = sys.stdout.write
|
||||||
|
# storage for async transport
|
||||||
|
self.async_pw_transport = None
|
||||||
|
# This is the read buffer. See maxread.
|
||||||
|
self._buffer = self.buffer_type()
|
||||||
|
|
||||||
|
def _log(self, s, direction):
|
||||||
|
if self.logfile is not None:
|
||||||
|
self.logfile.write(s)
|
||||||
|
self.logfile.flush()
|
||||||
|
second_log = self.logfile_send if (direction=='send') else self.logfile_read
|
||||||
|
if second_log is not None:
|
||||||
|
second_log.write(s)
|
||||||
|
second_log.flush()
|
||||||
|
|
||||||
|
# For backwards compatibility, in bytes mode (when encoding is None)
|
||||||
|
# unicode is accepted for send and expect. Unicode mode is strictly unicode
|
||||||
|
# only.
|
||||||
|
def _coerce_expect_string(self, s):
|
||||||
|
if self.encoding is None and not isinstance(s, bytes):
|
||||||
|
return s.encode('ascii')
|
||||||
|
return s
|
||||||
|
|
||||||
|
def _coerce_send_string(self, s):
|
||||||
|
if self.encoding is None and not isinstance(s, bytes):
|
||||||
|
return s.encode('utf-8')
|
||||||
|
return s
|
||||||
|
|
||||||
|
def _get_buffer(self):
|
||||||
|
return self._buffer.getvalue()
|
||||||
|
|
||||||
|
def _set_buffer(self, value):
|
||||||
|
self._buffer = self.buffer_type()
|
||||||
|
self._buffer.write(value)
|
||||||
|
|
||||||
|
# This property is provided for backwards compatability (self.buffer used
|
||||||
|
# to be a string/bytes object)
|
||||||
|
buffer = property(_get_buffer, _set_buffer)
|
||||||
|
|
||||||
|
def read_nonblocking(self, size=1, timeout=None):
|
||||||
|
"""This reads data from the file descriptor.
|
||||||
|
|
||||||
|
This is a simple implementation suitable for a regular file. Subclasses using ptys or pipes should override it.
|
||||||
|
|
||||||
|
The timeout parameter is ignored.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
s = os.read(self.child_fd, size)
|
||||||
|
except OSError as err:
|
||||||
|
if err.args[0] == errno.EIO:
|
||||||
|
# Linux-style EOF
|
||||||
|
self.flag_eof = True
|
||||||
|
raise EOF('End Of File (EOF). Exception style platform.')
|
||||||
|
raise
|
||||||
|
if s == b'':
|
||||||
|
# BSD-style EOF
|
||||||
|
self.flag_eof = True
|
||||||
|
raise EOF('End Of File (EOF). Empty string style platform.')
|
||||||
|
|
||||||
|
s = self._decoder.decode(s, final=False)
|
||||||
|
self._log(s, 'read')
|
||||||
|
return s
|
||||||
|
|
||||||
|
def _pattern_type_err(self, pattern):
|
||||||
|
raise TypeError('got {badtype} ({badobj!r}) as pattern, must be one'
|
||||||
|
' of: {goodtypes}, pexpect.EOF, pexpect.TIMEOUT'\
|
||||||
|
.format(badtype=type(pattern),
|
||||||
|
badobj=pattern,
|
||||||
|
goodtypes=', '.join([str(ast)\
|
||||||
|
for ast in self.allowed_string_types])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def compile_pattern_list(self, patterns):
|
||||||
|
'''This compiles a pattern-string or a list of pattern-strings.
|
||||||
|
Patterns must be a StringType, EOF, TIMEOUT, SRE_Pattern, or a list of
|
||||||
|
those. Patterns may also be None which results in an empty list (you
|
||||||
|
might do this if waiting for an EOF or TIMEOUT condition without
|
||||||
|
expecting any pattern).
|
||||||
|
|
||||||
|
This is used by expect() when calling expect_list(). Thus expect() is
|
||||||
|
nothing more than::
|
||||||
|
|
||||||
|
cpl = self.compile_pattern_list(pl)
|
||||||
|
return self.expect_list(cpl, timeout)
|
||||||
|
|
||||||
|
If you are using expect() within a loop it may be more
|
||||||
|
efficient to compile the patterns first and then call expect_list().
|
||||||
|
This avoid calls in a loop to compile_pattern_list()::
|
||||||
|
|
||||||
|
cpl = self.compile_pattern_list(my_pattern)
|
||||||
|
while some_condition:
|
||||||
|
...
|
||||||
|
i = self.expect_list(cpl, timeout)
|
||||||
|
...
|
||||||
|
'''
|
||||||
|
|
||||||
|
if patterns is None:
|
||||||
|
return []
|
||||||
|
if not isinstance(patterns, list):
|
||||||
|
patterns = [patterns]
|
||||||
|
|
||||||
|
# Allow dot to match \n
|
||||||
|
compile_flags = re.DOTALL
|
||||||
|
if self.ignorecase:
|
||||||
|
compile_flags = compile_flags | re.IGNORECASE
|
||||||
|
compiled_pattern_list = []
|
||||||
|
for idx, p in enumerate(patterns):
|
||||||
|
if isinstance(p, self.allowed_string_types):
|
||||||
|
p = self._coerce_expect_string(p)
|
||||||
|
compiled_pattern_list.append(re.compile(p, compile_flags))
|
||||||
|
elif p is EOF:
|
||||||
|
compiled_pattern_list.append(EOF)
|
||||||
|
elif p is TIMEOUT:
|
||||||
|
compiled_pattern_list.append(TIMEOUT)
|
||||||
|
elif isinstance(p, type(re.compile(''))):
|
||||||
|
compiled_pattern_list.append(p)
|
||||||
|
else:
|
||||||
|
self._pattern_type_err(p)
|
||||||
|
return compiled_pattern_list
|
||||||
|
|
||||||
|
def expect(self, pattern, timeout=-1, searchwindowsize=-1, async_=False, **kw):
|
||||||
|
'''This seeks through the stream until a pattern is matched. The
|
||||||
|
pattern is overloaded and may take several types. The pattern can be a
|
||||||
|
StringType, EOF, a compiled re, or a list of any of those types.
|
||||||
|
Strings will be compiled to re types. This returns the index into the
|
||||||
|
pattern list. If the pattern was not a list this returns index 0 on a
|
||||||
|
successful match. This may raise exceptions for EOF or TIMEOUT. To
|
||||||
|
avoid the EOF or TIMEOUT exceptions add EOF or TIMEOUT to the pattern
|
||||||
|
list. That will cause expect to match an EOF or TIMEOUT condition
|
||||||
|
instead of raising an exception.
|
||||||
|
|
||||||
|
If you pass a list of patterns and more than one matches, the first
|
||||||
|
match in the stream is chosen. If more than one pattern matches at that
|
||||||
|
point, the leftmost in the pattern list is chosen. For example::
|
||||||
|
|
||||||
|
# the input is 'foobar'
|
||||||
|
index = p.expect(['bar', 'foo', 'foobar'])
|
||||||
|
# returns 1('foo') even though 'foobar' is a "better" match
|
||||||
|
|
||||||
|
Please note, however, that buffering can affect this behavior, since
|
||||||
|
input arrives in unpredictable chunks. For example::
|
||||||
|
|
||||||
|
# the input is 'foobar'
|
||||||
|
index = p.expect(['foobar', 'foo'])
|
||||||
|
# returns 0('foobar') if all input is available at once,
|
||||||
|
# but returns 1('foo') if parts of the final 'bar' arrive late
|
||||||
|
|
||||||
|
When a match is found for the given pattern, the class instance
|
||||||
|
attribute *match* becomes an re.MatchObject result. Should an EOF
|
||||||
|
or TIMEOUT pattern match, then the match attribute will be an instance
|
||||||
|
of that exception class. The pairing before and after class
|
||||||
|
instance attributes are views of the data preceding and following
|
||||||
|
the matching pattern. On general exception, class attribute
|
||||||
|
*before* is all data received up to the exception, while *match* and
|
||||||
|
*after* attributes are value None.
|
||||||
|
|
||||||
|
When the keyword argument timeout is -1 (default), then TIMEOUT will
|
||||||
|
raise after the default value specified by the class timeout
|
||||||
|
attribute. When None, TIMEOUT will not be raised and may block
|
||||||
|
indefinitely until match.
|
||||||
|
|
||||||
|
When the keyword argument searchwindowsize is -1 (default), then the
|
||||||
|
value specified by the class maxread attribute is used.
|
||||||
|
|
||||||
|
A list entry may be EOF or TIMEOUT instead of a string. This will
|
||||||
|
catch these exceptions and return the index of the list entry instead
|
||||||
|
of raising the exception. The attribute 'after' will be set to the
|
||||||
|
exception type. The attribute 'match' will be None. This allows you to
|
||||||
|
write code like this::
|
||||||
|
|
||||||
|
index = p.expect(['good', 'bad', pexpect.EOF, pexpect.TIMEOUT])
|
||||||
|
if index == 0:
|
||||||
|
do_something()
|
||||||
|
elif index == 1:
|
||||||
|
do_something_else()
|
||||||
|
elif index == 2:
|
||||||
|
do_some_other_thing()
|
||||||
|
elif index == 3:
|
||||||
|
do_something_completely_different()
|
||||||
|
|
||||||
|
instead of code like this::
|
||||||
|
|
||||||
|
try:
|
||||||
|
index = p.expect(['good', 'bad'])
|
||||||
|
if index == 0:
|
||||||
|
do_something()
|
||||||
|
elif index == 1:
|
||||||
|
do_something_else()
|
||||||
|
except EOF:
|
||||||
|
do_some_other_thing()
|
||||||
|
except TIMEOUT:
|
||||||
|
do_something_completely_different()
|
||||||
|
|
||||||
|
These two forms are equivalent. It all depends on what you want. You
|
||||||
|
can also just expect the EOF if you are waiting for all output of a
|
||||||
|
child to finish. For example::
|
||||||
|
|
||||||
|
p = pexpect.spawn('/bin/ls')
|
||||||
|
p.expect(pexpect.EOF)
|
||||||
|
print p.before
|
||||||
|
|
||||||
|
If you are trying to optimize for speed then see expect_list().
|
||||||
|
|
||||||
|
On Python 3.4, or Python 3.3 with asyncio installed, passing
|
||||||
|
``async_=True`` will make this return an :mod:`asyncio` coroutine,
|
||||||
|
which you can yield from to get the same result that this method would
|
||||||
|
normally give directly. So, inside a coroutine, you can replace this code::
|
||||||
|
|
||||||
|
index = p.expect(patterns)
|
||||||
|
|
||||||
|
With this non-blocking form::
|
||||||
|
|
||||||
|
index = yield from p.expect(patterns, async_=True)
|
||||||
|
'''
|
||||||
|
if 'async' in kw:
|
||||||
|
async_ = kw.pop('async')
|
||||||
|
if kw:
|
||||||
|
raise TypeError("Unknown keyword arguments: {}".format(kw))
|
||||||
|
|
||||||
|
compiled_pattern_list = self.compile_pattern_list(pattern)
|
||||||
|
return self.expect_list(compiled_pattern_list,
|
||||||
|
timeout, searchwindowsize, async_)
|
||||||
|
|
||||||
|
def expect_list(self, pattern_list, timeout=-1, searchwindowsize=-1,
|
||||||
|
async_=False, **kw):
|
||||||
|
'''This takes a list of compiled regular expressions and returns the
|
||||||
|
index into the pattern_list that matched the child output. The list may
|
||||||
|
also contain EOF or TIMEOUT(which are not compiled regular
|
||||||
|
expressions). This method is similar to the expect() method except that
|
||||||
|
expect_list() does not recompile the pattern list on every call. This
|
||||||
|
may help if you are trying to optimize for speed, otherwise just use
|
||||||
|
the expect() method. This is called by expect().
|
||||||
|
|
||||||
|
|
||||||
|
Like :meth:`expect`, passing ``async_=True`` will make this return an
|
||||||
|
asyncio coroutine.
|
||||||
|
'''
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
if 'async' in kw:
|
||||||
|
async_ = kw.pop('async')
|
||||||
|
if kw:
|
||||||
|
raise TypeError("Unknown keyword arguments: {}".format(kw))
|
||||||
|
|
||||||
|
exp = Expecter(self, searcher_re(pattern_list), searchwindowsize)
|
||||||
|
if async_:
|
||||||
|
from ._async import expect_async
|
||||||
|
return expect_async(exp, timeout)
|
||||||
|
else:
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
|
||||||
|
def expect_exact(self, pattern_list, timeout=-1, searchwindowsize=-1,
|
||||||
|
async_=False, **kw):
|
||||||
|
|
||||||
|
'''This is similar to expect(), but uses plain string matching instead
|
||||||
|
of compiled regular expressions in 'pattern_list'. The 'pattern_list'
|
||||||
|
may be a string; a list or other sequence of strings; or TIMEOUT and
|
||||||
|
EOF.
|
||||||
|
|
||||||
|
This call might be faster than expect() for two reasons: string
|
||||||
|
searching is faster than RE matching and it is possible to limit the
|
||||||
|
search to just the end of the input buffer.
|
||||||
|
|
||||||
|
This method is also useful when you don't want to have to worry about
|
||||||
|
escaping regular expression characters that you want to match.
|
||||||
|
|
||||||
|
Like :meth:`expect`, passing ``async_=True`` will make this return an
|
||||||
|
asyncio coroutine.
|
||||||
|
'''
|
||||||
|
if timeout == -1:
|
||||||
|
timeout = self.timeout
|
||||||
|
if 'async' in kw:
|
||||||
|
async_ = kw.pop('async')
|
||||||
|
if kw:
|
||||||
|
raise TypeError("Unknown keyword arguments: {}".format(kw))
|
||||||
|
|
||||||
|
if (isinstance(pattern_list, self.allowed_string_types) or
|
||||||
|
pattern_list in (TIMEOUT, EOF)):
|
||||||
|
pattern_list = [pattern_list]
|
||||||
|
|
||||||
|
def prepare_pattern(pattern):
|
||||||
|
if pattern in (TIMEOUT, EOF):
|
||||||
|
return pattern
|
||||||
|
if isinstance(pattern, self.allowed_string_types):
|
||||||
|
return self._coerce_expect_string(pattern)
|
||||||
|
self._pattern_type_err(pattern)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pattern_list = iter(pattern_list)
|
||||||
|
except TypeError:
|
||||||
|
self._pattern_type_err(pattern_list)
|
||||||
|
pattern_list = [prepare_pattern(p) for p in pattern_list]
|
||||||
|
|
||||||
|
exp = Expecter(self, searcher_string(pattern_list), searchwindowsize)
|
||||||
|
if async_:
|
||||||
|
from ._async import expect_async
|
||||||
|
return expect_async(exp, timeout)
|
||||||
|
else:
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
|
||||||
|
def expect_loop(self, searcher, timeout=-1, searchwindowsize=-1):
|
||||||
|
'''This is the common loop used inside expect. The 'searcher' should be
|
||||||
|
an instance of searcher_re or searcher_string, which describes how and
|
||||||
|
what to search for in the input.
|
||||||
|
|
||||||
|
See expect() for other arguments, return value and exceptions. '''
|
||||||
|
|
||||||
|
exp = Expecter(self, searcher, searchwindowsize)
|
||||||
|
return exp.expect_loop(timeout)
|
||||||
|
|
||||||
|
def read(self, size=-1):
|
||||||
|
'''This reads at most "size" bytes from the file (less if the read hits
|
||||||
|
EOF before obtaining size bytes). If the size argument is negative or
|
||||||
|
omitted, read all data until EOF is reached. The bytes are returned as
|
||||||
|
a string object. An empty string is returned when EOF is encountered
|
||||||
|
immediately. '''
|
||||||
|
|
||||||
|
if size == 0:
|
||||||
|
return self.string_type()
|
||||||
|
if size < 0:
|
||||||
|
# delimiter default is EOF
|
||||||
|
self.expect(self.delimiter)
|
||||||
|
return self.before
|
||||||
|
|
||||||
|
# I could have done this more directly by not using expect(), but
|
||||||
|
# I deliberately decided to couple read() to expect() so that
|
||||||
|
# I would catch any bugs early and ensure consistent behavior.
|
||||||
|
# It's a little less efficient, but there is less for me to
|
||||||
|
# worry about if I have to later modify read() or expect().
|
||||||
|
# Note, it's OK if size==-1 in the regex. That just means it
|
||||||
|
# will never match anything in which case we stop only on EOF.
|
||||||
|
cre = re.compile(self._coerce_expect_string('.{%d}' % size), re.DOTALL)
|
||||||
|
# delimiter default is EOF
|
||||||
|
index = self.expect([cre, self.delimiter])
|
||||||
|
if index == 0:
|
||||||
|
### FIXME self.before should be ''. Should I assert this?
|
||||||
|
return self.after
|
||||||
|
return self.before
|
||||||
|
|
||||||
|
def readline(self, size=-1):
|
||||||
|
'''This reads and returns one entire line. The newline at the end of
|
||||||
|
line is returned as part of the string, unless the file ends without a
|
||||||
|
newline. An empty string is returned if EOF is encountered immediately.
|
||||||
|
This looks for a newline as a CR/LF pair (\\r\\n) even on UNIX because
|
||||||
|
this is what the pseudotty device returns. So contrary to what you may
|
||||||
|
expect you will receive newlines as \\r\\n.
|
||||||
|
|
||||||
|
If the size argument is 0 then an empty string is returned. In all
|
||||||
|
other cases the size argument is ignored, which is not standard
|
||||||
|
behavior for a file-like object. '''
|
||||||
|
|
||||||
|
if size == 0:
|
||||||
|
return self.string_type()
|
||||||
|
# delimiter default is EOF
|
||||||
|
index = self.expect([self.crlf, self.delimiter])
|
||||||
|
if index == 0:
|
||||||
|
return self.before + self.crlf
|
||||||
|
else:
|
||||||
|
return self.before
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
'''This is to support iterators over a file-like object.
|
||||||
|
'''
|
||||||
|
return iter(self.readline, self.string_type())
|
||||||
|
|
||||||
|
def readlines(self, sizehint=-1):
|
||||||
|
'''This reads until EOF using readline() and returns a list containing
|
||||||
|
the lines thus read. The optional 'sizehint' argument is ignored.
|
||||||
|
Remember, because this reads until EOF that means the child
|
||||||
|
process should have closed its stdout. If you run this method on
|
||||||
|
a child that is still running with its stdout open then this
|
||||||
|
method will block until it timesout.'''
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
while True:
|
||||||
|
line = self.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
lines.append(line)
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
'''Expose file descriptor for a file-like interface
|
||||||
|
'''
|
||||||
|
return self.child_fd
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
'''This does nothing. It is here to support the interface for a
|
||||||
|
File-like object. '''
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isatty(self):
|
||||||
|
"""Overridden in subclass using tty"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
# For 'with spawn(...) as child:'
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, etype, evalue, tb):
|
||||||
|
# We rely on subclasses to implement close(). If they don't, it's not
|
||||||
|
# clear what a context manager should do.
|
||||||
|
self.close()
|
|
@ -0,0 +1,187 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
import select
|
||||||
|
import time
|
||||||
|
import errno
|
||||||
|
|
||||||
|
try:
|
||||||
|
InterruptedError
|
||||||
|
except NameError:
|
||||||
|
# Alias Python2 exception to Python3
|
||||||
|
InterruptedError = select.error
|
||||||
|
|
||||||
|
if sys.version_info[0] >= 3:
|
||||||
|
string_types = (str,)
|
||||||
|
else:
|
||||||
|
string_types = (unicode, str)
|
||||||
|
|
||||||
|
|
||||||
|
def is_executable_file(path):
|
||||||
|
"""Checks that path is an executable regular file, or a symlink towards one.
|
||||||
|
|
||||||
|
This is roughly ``os.path isfile(path) and os.access(path, os.X_OK)``.
|
||||||
|
"""
|
||||||
|
# follow symlinks,
|
||||||
|
fpath = os.path.realpath(path)
|
||||||
|
|
||||||
|
if not os.path.isfile(fpath):
|
||||||
|
# non-files (directories, fifo, etc.)
|
||||||
|
return False
|
||||||
|
|
||||||
|
mode = os.stat(fpath).st_mode
|
||||||
|
|
||||||
|
if (sys.platform.startswith('sunos')
|
||||||
|
and os.getuid() == 0):
|
||||||
|
# When root on Solaris, os.X_OK is True for *all* files, irregardless
|
||||||
|
# of their executability -- instead, any permission bit of any user,
|
||||||
|
# group, or other is fine enough.
|
||||||
|
#
|
||||||
|
# (This may be true for other "Unix98" OS's such as HP-UX and AIX)
|
||||||
|
return bool(mode & (stat.S_IXUSR |
|
||||||
|
stat.S_IXGRP |
|
||||||
|
stat.S_IXOTH))
|
||||||
|
|
||||||
|
return os.access(fpath, os.X_OK)
|
||||||
|
|
||||||
|
|
||||||
|
def which(filename, env=None):
|
||||||
|
'''This takes a given filename; tries to find it in the environment path;
|
||||||
|
then checks if it is executable. This returns the full path to the filename
|
||||||
|
if found and executable. Otherwise this returns None.'''
|
||||||
|
|
||||||
|
# Special case where filename contains an explicit path.
|
||||||
|
if os.path.dirname(filename) != '' and is_executable_file(filename):
|
||||||
|
return filename
|
||||||
|
if env is None:
|
||||||
|
env = os.environ
|
||||||
|
p = env.get('PATH')
|
||||||
|
if not p:
|
||||||
|
p = os.defpath
|
||||||
|
pathlist = p.split(os.pathsep)
|
||||||
|
for path in pathlist:
|
||||||
|
ff = os.path.join(path, filename)
|
||||||
|
if is_executable_file(ff):
|
||||||
|
return ff
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def split_command_line(command_line):
|
||||||
|
|
||||||
|
'''This splits a command line into a list of arguments. It splits arguments
|
||||||
|
on spaces, but handles embedded quotes, doublequotes, and escaped
|
||||||
|
characters. It's impossible to do this with a regular expression, so I
|
||||||
|
wrote a little state machine to parse the command line. '''
|
||||||
|
|
||||||
|
arg_list = []
|
||||||
|
arg = ''
|
||||||
|
|
||||||
|
# Constants to name the states we can be in.
|
||||||
|
state_basic = 0
|
||||||
|
state_esc = 1
|
||||||
|
state_singlequote = 2
|
||||||
|
state_doublequote = 3
|
||||||
|
# The state when consuming whitespace between commands.
|
||||||
|
state_whitespace = 4
|
||||||
|
state = state_basic
|
||||||
|
|
||||||
|
for c in command_line:
|
||||||
|
if state == state_basic or state == state_whitespace:
|
||||||
|
if c == '\\':
|
||||||
|
# Escape the next character
|
||||||
|
state = state_esc
|
||||||
|
elif c == r"'":
|
||||||
|
# Handle single quote
|
||||||
|
state = state_singlequote
|
||||||
|
elif c == r'"':
|
||||||
|
# Handle double quote
|
||||||
|
state = state_doublequote
|
||||||
|
elif c.isspace():
|
||||||
|
# Add arg to arg_list if we aren't in the middle of whitespace.
|
||||||
|
if state == state_whitespace:
|
||||||
|
# Do nothing.
|
||||||
|
None
|
||||||
|
else:
|
||||||
|
arg_list.append(arg)
|
||||||
|
arg = ''
|
||||||
|
state = state_whitespace
|
||||||
|
else:
|
||||||
|
arg = arg + c
|
||||||
|
state = state_basic
|
||||||
|
elif state == state_esc:
|
||||||
|
arg = arg + c
|
||||||
|
state = state_basic
|
||||||
|
elif state == state_singlequote:
|
||||||
|
if c == r"'":
|
||||||
|
state = state_basic
|
||||||
|
else:
|
||||||
|
arg = arg + c
|
||||||
|
elif state == state_doublequote:
|
||||||
|
if c == r'"':
|
||||||
|
state = state_basic
|
||||||
|
else:
|
||||||
|
arg = arg + c
|
||||||
|
|
||||||
|
if arg != '':
|
||||||
|
arg_list.append(arg)
|
||||||
|
return arg_list
|
||||||
|
|
||||||
|
|
||||||
|
def select_ignore_interrupts(iwtd, owtd, ewtd, timeout=None):
|
||||||
|
|
||||||
|
'''This is a wrapper around select.select() that ignores signals. If
|
||||||
|
select.select raises a select.error exception and errno is an EINTR
|
||||||
|
error then it is ignored. Mainly this is used to ignore sigwinch
|
||||||
|
(terminal resize). '''
|
||||||
|
|
||||||
|
# if select() is interrupted by a signal (errno==EINTR) then
|
||||||
|
# we loop back and enter the select() again.
|
||||||
|
if timeout is not None:
|
||||||
|
end_time = time.time() + timeout
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
return select.select(iwtd, owtd, ewtd, timeout)
|
||||||
|
except InterruptedError:
|
||||||
|
err = sys.exc_info()[1]
|
||||||
|
if err.args[0] == errno.EINTR:
|
||||||
|
# if we loop back we have to subtract the
|
||||||
|
# amount of time we already waited.
|
||||||
|
if timeout is not None:
|
||||||
|
timeout = end_time - time.time()
|
||||||
|
if timeout < 0:
|
||||||
|
return([], [], [])
|
||||||
|
else:
|
||||||
|
# something else caused the select.error, so
|
||||||
|
# this actually is an exception.
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def poll_ignore_interrupts(fds, timeout=None):
|
||||||
|
'''Simple wrapper around poll to register file descriptors and
|
||||||
|
ignore signals.'''
|
||||||
|
|
||||||
|
if timeout is not None:
|
||||||
|
end_time = time.time() + timeout
|
||||||
|
|
||||||
|
poller = select.poll()
|
||||||
|
for fd in fds:
|
||||||
|
poller.register(fd, select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
timeout_ms = None if timeout is None else timeout * 1000
|
||||||
|
results = poller.poll(timeout_ms)
|
||||||
|
return [afd for afd, _ in results]
|
||||||
|
except InterruptedError:
|
||||||
|
err = sys.exc_info()[1]
|
||||||
|
if err.args[0] == errno.EINTR:
|
||||||
|
# if we loop back we have to subtract the
|
||||||
|
# amount of time we already waited.
|
||||||
|
if timeout is not None:
|
||||||
|
timeout = end_time - time.time()
|
||||||
|
if timeout < 0:
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
# something else caused the select.error, so
|
||||||
|
# this actually is an exception.
|
||||||
|
raise
|
|
@ -0,0 +1,73 @@
|
||||||
|
Metadata-Version: 1.2
|
||||||
|
Name: pip
|
||||||
|
Version: 19.0.3
|
||||||
|
Summary: The PyPA recommended tool for installing Python packages.
|
||||||
|
Home-page: https://pip.pypa.io/
|
||||||
|
Author: The pip developers
|
||||||
|
Author-email: pypa-dev@groups.google.com
|
||||||
|
License: MIT
|
||||||
|
Description: pip - The Python Package Installer
|
||||||
|
==================================
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/pip.svg
|
||||||
|
:target: https://pypi.org/project/pip/
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
||||||
|
:target: https://pip.pypa.io/en/latest
|
||||||
|
|
||||||
|
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
||||||
|
|
||||||
|
Please take a look at our documentation for how to install and use pip:
|
||||||
|
|
||||||
|
* `Installation`_
|
||||||
|
* `Usage`_
|
||||||
|
* `Release notes`_
|
||||||
|
|
||||||
|
If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
|
||||||
|
|
||||||
|
* `Issue tracking`_
|
||||||
|
* `Discourse channel`_
|
||||||
|
* `User IRC`_
|
||||||
|
|
||||||
|
If you want to get involved head over to GitHub to get the source code and feel free to jump on the developer mailing lists and chat rooms:
|
||||||
|
|
||||||
|
* `GitHub page`_
|
||||||
|
* `Dev mailing list`_
|
||||||
|
* `Dev IRC`_
|
||||||
|
|
||||||
|
Code of Conduct
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Everyone interacting in the pip project's codebases, issue trackers, chat
|
||||||
|
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
||||||
|
|
||||||
|
.. _package installer: https://packaging.python.org/en/latest/current/
|
||||||
|
.. _Python Package Index: https://pypi.org
|
||||||
|
.. _Installation: https://pip.pypa.io/en/stable/installing.html
|
||||||
|
.. _Usage: https://pip.pypa.io/en/stable/
|
||||||
|
.. _Release notes: https://pip.pypa.io/en/stable/news.html
|
||||||
|
.. _GitHub page: https://github.com/pypa/pip
|
||||||
|
.. _Issue tracking: https://github.com/pypa/pip/issues
|
||||||
|
.. _Discourse channel: https://discuss.python.org/c/packaging
|
||||||
|
.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
|
||||||
|
.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
|
||||||
|
.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
|
||||||
|
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
|
||||||
|
|
||||||
|
Keywords: distutils easy_install egg setuptools wheel virtualenv
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Topic :: Software Development :: Build Tools
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
|
|
@ -0,0 +1,391 @@
|
||||||
|
AUTHORS.txt
|
||||||
|
LICENSE.txt
|
||||||
|
MANIFEST.in
|
||||||
|
NEWS.rst
|
||||||
|
README.rst
|
||||||
|
pyproject.toml
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
docs/pip_sphinxext.py
|
||||||
|
docs/html/conf.py
|
||||||
|
docs/html/cookbook.rst
|
||||||
|
docs/html/index.rst
|
||||||
|
docs/html/installing.rst
|
||||||
|
docs/html/logic.rst
|
||||||
|
docs/html/news.rst
|
||||||
|
docs/html/quickstart.rst
|
||||||
|
docs/html/usage.rst
|
||||||
|
docs/html/user_guide.rst
|
||||||
|
docs/html/development/configuration.rst
|
||||||
|
docs/html/development/contributing.rst
|
||||||
|
docs/html/development/getting-started.rst
|
||||||
|
docs/html/development/index.rst
|
||||||
|
docs/html/development/release-process.rst
|
||||||
|
docs/html/development/vendoring-policy.rst
|
||||||
|
docs/html/reference/index.rst
|
||||||
|
docs/html/reference/pip.rst
|
||||||
|
docs/html/reference/pip_check.rst
|
||||||
|
docs/html/reference/pip_config.rst
|
||||||
|
docs/html/reference/pip_download.rst
|
||||||
|
docs/html/reference/pip_freeze.rst
|
||||||
|
docs/html/reference/pip_hash.rst
|
||||||
|
docs/html/reference/pip_install.rst
|
||||||
|
docs/html/reference/pip_list.rst
|
||||||
|
docs/html/reference/pip_search.rst
|
||||||
|
docs/html/reference/pip_show.rst
|
||||||
|
docs/html/reference/pip_uninstall.rst
|
||||||
|
docs/html/reference/pip_wheel.rst
|
||||||
|
docs/man/index.rst
|
||||||
|
docs/man/commands/check.rst
|
||||||
|
docs/man/commands/config.rst
|
||||||
|
docs/man/commands/download.rst
|
||||||
|
docs/man/commands/freeze.rst
|
||||||
|
docs/man/commands/hash.rst
|
||||||
|
docs/man/commands/help.rst
|
||||||
|
docs/man/commands/install.rst
|
||||||
|
docs/man/commands/list.rst
|
||||||
|
docs/man/commands/search.rst
|
||||||
|
docs/man/commands/show.rst
|
||||||
|
docs/man/commands/uninstall.rst
|
||||||
|
docs/man/commands/wheel.rst
|
||||||
|
src/pip/__init__.py
|
||||||
|
src/pip/__main__.py
|
||||||
|
src/pip.egg-info/PKG-INFO
|
||||||
|
src/pip.egg-info/SOURCES.txt
|
||||||
|
src/pip.egg-info/dependency_links.txt
|
||||||
|
src/pip.egg-info/entry_points.txt
|
||||||
|
src/pip.egg-info/not-zip-safe
|
||||||
|
src/pip.egg-info/top_level.txt
|
||||||
|
src/pip/_internal/__init__.py
|
||||||
|
src/pip/_internal/build_env.py
|
||||||
|
src/pip/_internal/cache.py
|
||||||
|
src/pip/_internal/configuration.py
|
||||||
|
src/pip/_internal/download.py
|
||||||
|
src/pip/_internal/exceptions.py
|
||||||
|
src/pip/_internal/index.py
|
||||||
|
src/pip/_internal/locations.py
|
||||||
|
src/pip/_internal/pep425tags.py
|
||||||
|
src/pip/_internal/pyproject.py
|
||||||
|
src/pip/_internal/resolve.py
|
||||||
|
src/pip/_internal/wheel.py
|
||||||
|
src/pip/_internal/cli/__init__.py
|
||||||
|
src/pip/_internal/cli/autocompletion.py
|
||||||
|
src/pip/_internal/cli/base_command.py
|
||||||
|
src/pip/_internal/cli/cmdoptions.py
|
||||||
|
src/pip/_internal/cli/main_parser.py
|
||||||
|
src/pip/_internal/cli/parser.py
|
||||||
|
src/pip/_internal/cli/status_codes.py
|
||||||
|
src/pip/_internal/commands/__init__.py
|
||||||
|
src/pip/_internal/commands/check.py
|
||||||
|
src/pip/_internal/commands/completion.py
|
||||||
|
src/pip/_internal/commands/configuration.py
|
||||||
|
src/pip/_internal/commands/download.py
|
||||||
|
src/pip/_internal/commands/freeze.py
|
||||||
|
src/pip/_internal/commands/hash.py
|
||||||
|
src/pip/_internal/commands/help.py
|
||||||
|
src/pip/_internal/commands/install.py
|
||||||
|
src/pip/_internal/commands/list.py
|
||||||
|
src/pip/_internal/commands/search.py
|
||||||
|
src/pip/_internal/commands/show.py
|
||||||
|
src/pip/_internal/commands/uninstall.py
|
||||||
|
src/pip/_internal/commands/wheel.py
|
||||||
|
src/pip/_internal/models/__init__.py
|
||||||
|
src/pip/_internal/models/candidate.py
|
||||||
|
src/pip/_internal/models/format_control.py
|
||||||
|
src/pip/_internal/models/index.py
|
||||||
|
src/pip/_internal/models/link.py
|
||||||
|
src/pip/_internal/operations/__init__.py
|
||||||
|
src/pip/_internal/operations/check.py
|
||||||
|
src/pip/_internal/operations/freeze.py
|
||||||
|
src/pip/_internal/operations/prepare.py
|
||||||
|
src/pip/_internal/req/__init__.py
|
||||||
|
src/pip/_internal/req/constructors.py
|
||||||
|
src/pip/_internal/req/req_file.py
|
||||||
|
src/pip/_internal/req/req_install.py
|
||||||
|
src/pip/_internal/req/req_set.py
|
||||||
|
src/pip/_internal/req/req_tracker.py
|
||||||
|
src/pip/_internal/req/req_uninstall.py
|
||||||
|
src/pip/_internal/utils/__init__.py
|
||||||
|
src/pip/_internal/utils/appdirs.py
|
||||||
|
src/pip/_internal/utils/compat.py
|
||||||
|
src/pip/_internal/utils/deprecation.py
|
||||||
|
src/pip/_internal/utils/encoding.py
|
||||||
|
src/pip/_internal/utils/filesystem.py
|
||||||
|
src/pip/_internal/utils/glibc.py
|
||||||
|
src/pip/_internal/utils/hashes.py
|
||||||
|
src/pip/_internal/utils/logging.py
|
||||||
|
src/pip/_internal/utils/misc.py
|
||||||
|
src/pip/_internal/utils/models.py
|
||||||
|
src/pip/_internal/utils/outdated.py
|
||||||
|
src/pip/_internal/utils/packaging.py
|
||||||
|
src/pip/_internal/utils/setuptools_build.py
|
||||||
|
src/pip/_internal/utils/temp_dir.py
|
||||||
|
src/pip/_internal/utils/typing.py
|
||||||
|
src/pip/_internal/utils/ui.py
|
||||||
|
src/pip/_internal/vcs/__init__.py
|
||||||
|
src/pip/_internal/vcs/bazaar.py
|
||||||
|
src/pip/_internal/vcs/git.py
|
||||||
|
src/pip/_internal/vcs/mercurial.py
|
||||||
|
src/pip/_internal/vcs/subversion.py
|
||||||
|
src/pip/_vendor/README.rst
|
||||||
|
src/pip/_vendor/__init__.py
|
||||||
|
src/pip/_vendor/appdirs.LICENSE.txt
|
||||||
|
src/pip/_vendor/appdirs.py
|
||||||
|
src/pip/_vendor/distro.LICENSE
|
||||||
|
src/pip/_vendor/distro.py
|
||||||
|
src/pip/_vendor/ipaddress.LICENSE
|
||||||
|
src/pip/_vendor/ipaddress.py
|
||||||
|
src/pip/_vendor/pyparsing.LICENSE
|
||||||
|
src/pip/_vendor/pyparsing.py
|
||||||
|
src/pip/_vendor/retrying.LICENSE
|
||||||
|
src/pip/_vendor/retrying.py
|
||||||
|
src/pip/_vendor/six.LICENSE
|
||||||
|
src/pip/_vendor/six.py
|
||||||
|
src/pip/_vendor/vendor.txt
|
||||||
|
src/pip/_vendor/cachecontrol/LICENSE.txt
|
||||||
|
src/pip/_vendor/cachecontrol/__init__.py
|
||||||
|
src/pip/_vendor/cachecontrol/_cmd.py
|
||||||
|
src/pip/_vendor/cachecontrol/adapter.py
|
||||||
|
src/pip/_vendor/cachecontrol/cache.py
|
||||||
|
src/pip/_vendor/cachecontrol/compat.py
|
||||||
|
src/pip/_vendor/cachecontrol/controller.py
|
||||||
|
src/pip/_vendor/cachecontrol/filewrapper.py
|
||||||
|
src/pip/_vendor/cachecontrol/heuristics.py
|
||||||
|
src/pip/_vendor/cachecontrol/serialize.py
|
||||||
|
src/pip/_vendor/cachecontrol/wrapper.py
|
||||||
|
src/pip/_vendor/cachecontrol/caches/__init__.py
|
||||||
|
src/pip/_vendor/cachecontrol/caches/file_cache.py
|
||||||
|
src/pip/_vendor/cachecontrol/caches/redis_cache.py
|
||||||
|
src/pip/_vendor/certifi/LICENSE
|
||||||
|
src/pip/_vendor/certifi/__init__.py
|
||||||
|
src/pip/_vendor/certifi/__main__.py
|
||||||
|
src/pip/_vendor/certifi/cacert.pem
|
||||||
|
src/pip/_vendor/certifi/core.py
|
||||||
|
src/pip/_vendor/chardet/LICENSE
|
||||||
|
src/pip/_vendor/chardet/__init__.py
|
||||||
|
src/pip/_vendor/chardet/big5freq.py
|
||||||
|
src/pip/_vendor/chardet/big5prober.py
|
||||||
|
src/pip/_vendor/chardet/chardistribution.py
|
||||||
|
src/pip/_vendor/chardet/charsetgroupprober.py
|
||||||
|
src/pip/_vendor/chardet/charsetprober.py
|
||||||
|
src/pip/_vendor/chardet/codingstatemachine.py
|
||||||
|
src/pip/_vendor/chardet/compat.py
|
||||||
|
src/pip/_vendor/chardet/cp949prober.py
|
||||||
|
src/pip/_vendor/chardet/enums.py
|
||||||
|
src/pip/_vendor/chardet/escprober.py
|
||||||
|
src/pip/_vendor/chardet/escsm.py
|
||||||
|
src/pip/_vendor/chardet/eucjpprober.py
|
||||||
|
src/pip/_vendor/chardet/euckrfreq.py
|
||||||
|
src/pip/_vendor/chardet/euckrprober.py
|
||||||
|
src/pip/_vendor/chardet/euctwfreq.py
|
||||||
|
src/pip/_vendor/chardet/euctwprober.py
|
||||||
|
src/pip/_vendor/chardet/gb2312freq.py
|
||||||
|
src/pip/_vendor/chardet/gb2312prober.py
|
||||||
|
src/pip/_vendor/chardet/hebrewprober.py
|
||||||
|
src/pip/_vendor/chardet/jisfreq.py
|
||||||
|
src/pip/_vendor/chardet/jpcntx.py
|
||||||
|
src/pip/_vendor/chardet/langbulgarianmodel.py
|
||||||
|
src/pip/_vendor/chardet/langcyrillicmodel.py
|
||||||
|
src/pip/_vendor/chardet/langgreekmodel.py
|
||||||
|
src/pip/_vendor/chardet/langhebrewmodel.py
|
||||||
|
src/pip/_vendor/chardet/langhungarianmodel.py
|
||||||
|
src/pip/_vendor/chardet/langthaimodel.py
|
||||||
|
src/pip/_vendor/chardet/langturkishmodel.py
|
||||||
|
src/pip/_vendor/chardet/latin1prober.py
|
||||||
|
src/pip/_vendor/chardet/mbcharsetprober.py
|
||||||
|
src/pip/_vendor/chardet/mbcsgroupprober.py
|
||||||
|
src/pip/_vendor/chardet/mbcssm.py
|
||||||
|
src/pip/_vendor/chardet/sbcharsetprober.py
|
||||||
|
src/pip/_vendor/chardet/sbcsgroupprober.py
|
||||||
|
src/pip/_vendor/chardet/sjisprober.py
|
||||||
|
src/pip/_vendor/chardet/universaldetector.py
|
||||||
|
src/pip/_vendor/chardet/utf8prober.py
|
||||||
|
src/pip/_vendor/chardet/version.py
|
||||||
|
src/pip/_vendor/chardet/cli/__init__.py
|
||||||
|
src/pip/_vendor/chardet/cli/chardetect.py
|
||||||
|
src/pip/_vendor/colorama/LICENSE.txt
|
||||||
|
src/pip/_vendor/colorama/__init__.py
|
||||||
|
src/pip/_vendor/colorama/ansi.py
|
||||||
|
src/pip/_vendor/colorama/ansitowin32.py
|
||||||
|
src/pip/_vendor/colorama/initialise.py
|
||||||
|
src/pip/_vendor/colorama/win32.py
|
||||||
|
src/pip/_vendor/colorama/winterm.py
|
||||||
|
src/pip/_vendor/distlib/LICENSE.txt
|
||||||
|
src/pip/_vendor/distlib/__init__.py
|
||||||
|
src/pip/_vendor/distlib/compat.py
|
||||||
|
src/pip/_vendor/distlib/database.py
|
||||||
|
src/pip/_vendor/distlib/index.py
|
||||||
|
src/pip/_vendor/distlib/locators.py
|
||||||
|
src/pip/_vendor/distlib/manifest.py
|
||||||
|
src/pip/_vendor/distlib/markers.py
|
||||||
|
src/pip/_vendor/distlib/metadata.py
|
||||||
|
src/pip/_vendor/distlib/resources.py
|
||||||
|
src/pip/_vendor/distlib/scripts.py
|
||||||
|
src/pip/_vendor/distlib/t32.exe
|
||||||
|
src/pip/_vendor/distlib/t64.exe
|
||||||
|
src/pip/_vendor/distlib/util.py
|
||||||
|
src/pip/_vendor/distlib/version.py
|
||||||
|
src/pip/_vendor/distlib/w32.exe
|
||||||
|
src/pip/_vendor/distlib/w64.exe
|
||||||
|
src/pip/_vendor/distlib/wheel.py
|
||||||
|
src/pip/_vendor/distlib/_backport/__init__.py
|
||||||
|
src/pip/_vendor/distlib/_backport/misc.py
|
||||||
|
src/pip/_vendor/distlib/_backport/shutil.py
|
||||||
|
src/pip/_vendor/distlib/_backport/sysconfig.cfg
|
||||||
|
src/pip/_vendor/distlib/_backport/sysconfig.py
|
||||||
|
src/pip/_vendor/distlib/_backport/tarfile.py
|
||||||
|
src/pip/_vendor/html5lib/LICENSE
|
||||||
|
src/pip/_vendor/html5lib/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/_ihatexml.py
|
||||||
|
src/pip/_vendor/html5lib/_inputstream.py
|
||||||
|
src/pip/_vendor/html5lib/_tokenizer.py
|
||||||
|
src/pip/_vendor/html5lib/_utils.py
|
||||||
|
src/pip/_vendor/html5lib/constants.py
|
||||||
|
src/pip/_vendor/html5lib/html5parser.py
|
||||||
|
src/pip/_vendor/html5lib/serializer.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/_base.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/datrie.py
|
||||||
|
src/pip/_vendor/html5lib/_trie/py.py
|
||||||
|
src/pip/_vendor/html5lib/filters/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/filters/alphabeticalattributes.py
|
||||||
|
src/pip/_vendor/html5lib/filters/base.py
|
||||||
|
src/pip/_vendor/html5lib/filters/inject_meta_charset.py
|
||||||
|
src/pip/_vendor/html5lib/filters/lint.py
|
||||||
|
src/pip/_vendor/html5lib/filters/optionaltags.py
|
||||||
|
src/pip/_vendor/html5lib/filters/sanitizer.py
|
||||||
|
src/pip/_vendor/html5lib/filters/whitespace.py
|
||||||
|
src/pip/_vendor/html5lib/treeadapters/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/treeadapters/genshi.py
|
||||||
|
src/pip/_vendor/html5lib/treeadapters/sax.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/base.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/dom.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/etree.py
|
||||||
|
src/pip/_vendor/html5lib/treebuilders/etree_lxml.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/__init__.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/base.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/dom.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/etree.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/etree_lxml.py
|
||||||
|
src/pip/_vendor/html5lib/treewalkers/genshi.py
|
||||||
|
src/pip/_vendor/idna/LICENSE.rst
|
||||||
|
src/pip/_vendor/idna/__init__.py
|
||||||
|
src/pip/_vendor/idna/codec.py
|
||||||
|
src/pip/_vendor/idna/compat.py
|
||||||
|
src/pip/_vendor/idna/core.py
|
||||||
|
src/pip/_vendor/idna/idnadata.py
|
||||||
|
src/pip/_vendor/idna/intranges.py
|
||||||
|
src/pip/_vendor/idna/package_data.py
|
||||||
|
src/pip/_vendor/idna/uts46data.py
|
||||||
|
src/pip/_vendor/lockfile/LICENSE
|
||||||
|
src/pip/_vendor/lockfile/__init__.py
|
||||||
|
src/pip/_vendor/lockfile/linklockfile.py
|
||||||
|
src/pip/_vendor/lockfile/mkdirlockfile.py
|
||||||
|
src/pip/_vendor/lockfile/pidlockfile.py
|
||||||
|
src/pip/_vendor/lockfile/sqlitelockfile.py
|
||||||
|
src/pip/_vendor/lockfile/symlinklockfile.py
|
||||||
|
src/pip/_vendor/msgpack/COPYING
|
||||||
|
src/pip/_vendor/msgpack/__init__.py
|
||||||
|
src/pip/_vendor/msgpack/_version.py
|
||||||
|
src/pip/_vendor/msgpack/exceptions.py
|
||||||
|
src/pip/_vendor/msgpack/fallback.py
|
||||||
|
src/pip/_vendor/packaging/LICENSE
|
||||||
|
src/pip/_vendor/packaging/LICENSE.APACHE
|
||||||
|
src/pip/_vendor/packaging/LICENSE.BSD
|
||||||
|
src/pip/_vendor/packaging/__about__.py
|
||||||
|
src/pip/_vendor/packaging/__init__.py
|
||||||
|
src/pip/_vendor/packaging/_compat.py
|
||||||
|
src/pip/_vendor/packaging/_structures.py
|
||||||
|
src/pip/_vendor/packaging/markers.py
|
||||||
|
src/pip/_vendor/packaging/requirements.py
|
||||||
|
src/pip/_vendor/packaging/specifiers.py
|
||||||
|
src/pip/_vendor/packaging/utils.py
|
||||||
|
src/pip/_vendor/packaging/version.py
|
||||||
|
src/pip/_vendor/pep517/LICENSE
|
||||||
|
src/pip/_vendor/pep517/__init__.py
|
||||||
|
src/pip/_vendor/pep517/_in_process.py
|
||||||
|
src/pip/_vendor/pep517/build.py
|
||||||
|
src/pip/_vendor/pep517/check.py
|
||||||
|
src/pip/_vendor/pep517/colorlog.py
|
||||||
|
src/pip/_vendor/pep517/compat.py
|
||||||
|
src/pip/_vendor/pep517/envbuild.py
|
||||||
|
src/pip/_vendor/pep517/wrappers.py
|
||||||
|
src/pip/_vendor/pkg_resources/LICENSE
|
||||||
|
src/pip/_vendor/pkg_resources/__init__.py
|
||||||
|
src/pip/_vendor/pkg_resources/py31compat.py
|
||||||
|
src/pip/_vendor/progress/LICENSE
|
||||||
|
src/pip/_vendor/progress/__init__.py
|
||||||
|
src/pip/_vendor/progress/bar.py
|
||||||
|
src/pip/_vendor/progress/counter.py
|
||||||
|
src/pip/_vendor/progress/helpers.py
|
||||||
|
src/pip/_vendor/progress/spinner.py
|
||||||
|
src/pip/_vendor/pytoml/LICENSE
|
||||||
|
src/pip/_vendor/pytoml/__init__.py
|
||||||
|
src/pip/_vendor/pytoml/core.py
|
||||||
|
src/pip/_vendor/pytoml/parser.py
|
||||||
|
src/pip/_vendor/pytoml/test.py
|
||||||
|
src/pip/_vendor/pytoml/utils.py
|
||||||
|
src/pip/_vendor/pytoml/writer.py
|
||||||
|
src/pip/_vendor/requests/LICENSE
|
||||||
|
src/pip/_vendor/requests/__init__.py
|
||||||
|
src/pip/_vendor/requests/__version__.py
|
||||||
|
src/pip/_vendor/requests/_internal_utils.py
|
||||||
|
src/pip/_vendor/requests/adapters.py
|
||||||
|
src/pip/_vendor/requests/api.py
|
||||||
|
src/pip/_vendor/requests/auth.py
|
||||||
|
src/pip/_vendor/requests/certs.py
|
||||||
|
src/pip/_vendor/requests/compat.py
|
||||||
|
src/pip/_vendor/requests/cookies.py
|
||||||
|
src/pip/_vendor/requests/exceptions.py
|
||||||
|
src/pip/_vendor/requests/help.py
|
||||||
|
src/pip/_vendor/requests/hooks.py
|
||||||
|
src/pip/_vendor/requests/models.py
|
||||||
|
src/pip/_vendor/requests/packages.py
|
||||||
|
src/pip/_vendor/requests/sessions.py
|
||||||
|
src/pip/_vendor/requests/status_codes.py
|
||||||
|
src/pip/_vendor/requests/structures.py
|
||||||
|
src/pip/_vendor/requests/utils.py
|
||||||
|
src/pip/_vendor/urllib3/LICENSE.txt
|
||||||
|
src/pip/_vendor/urllib3/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/_collections.py
|
||||||
|
src/pip/_vendor/urllib3/connection.py
|
||||||
|
src/pip/_vendor/urllib3/connectionpool.py
|
||||||
|
src/pip/_vendor/urllib3/exceptions.py
|
||||||
|
src/pip/_vendor/urllib3/fields.py
|
||||||
|
src/pip/_vendor/urllib3/filepost.py
|
||||||
|
src/pip/_vendor/urllib3/poolmanager.py
|
||||||
|
src/pip/_vendor/urllib3/request.py
|
||||||
|
src/pip/_vendor/urllib3/response.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_appengine_environ.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/appengine.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/ntlmpool.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/pyopenssl.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/securetransport.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/socks.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
|
||||||
|
src/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
|
||||||
|
src/pip/_vendor/urllib3/packages/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/packages/six.py
|
||||||
|
src/pip/_vendor/urllib3/packages/backports/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/packages/backports/makefile.py
|
||||||
|
src/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
|
||||||
|
src/pip/_vendor/urllib3/util/__init__.py
|
||||||
|
src/pip/_vendor/urllib3/util/connection.py
|
||||||
|
src/pip/_vendor/urllib3/util/queue.py
|
||||||
|
src/pip/_vendor/urllib3/util/request.py
|
||||||
|
src/pip/_vendor/urllib3/util/response.py
|
||||||
|
src/pip/_vendor/urllib3/util/retry.py
|
||||||
|
src/pip/_vendor/urllib3/util/ssl_.py
|
||||||
|
src/pip/_vendor/urllib3/util/timeout.py
|
||||||
|
src/pip/_vendor/urllib3/util/url.py
|
||||||
|
src/pip/_vendor/urllib3/util/wait.py
|
||||||
|
src/pip/_vendor/webencodings/LICENSE
|
||||||
|
src/pip/_vendor/webencodings/__init__.py
|
||||||
|
src/pip/_vendor/webencodings/labels.py
|
||||||
|
src/pip/_vendor/webencodings/mklabels.py
|
||||||
|
src/pip/_vendor/webencodings/tests.py
|
||||||
|
src/pip/_vendor/webencodings/x_user_defined.py
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
[console_scripts]
|
||||||
|
pip = pip._internal:main
|
||||||
|
pip3 = pip._internal:main
|
||||||
|
pip3.6 = pip._internal:main
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
pip
|
|
@ -0,0 +1 @@
|
||||||
|
__version__ = "19.0.3"
|
|
@ -0,0 +1,19 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# If we are running from a wheel, add the wheel to sys.path
|
||||||
|
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
||||||
|
if __package__ == '':
|
||||||
|
# __file__ is pip-*.whl/pip/__main__.py
|
||||||
|
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
||||||
|
# Resulting path is the name of the wheel itself
|
||||||
|
# Add that to sys.path so we can import pip
|
||||||
|
path = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
sys.path.insert(0, path)
|
||||||
|
|
||||||
|
from pip._internal import main as _main # isort:skip # noqa
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(_main())
|
Binary file not shown.
|
@ -0,0 +1,78 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
|
||||||
|
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
|
||||||
|
# isn't available. requests unconditionally imports urllib3's socks contrib
|
||||||
|
# module, triggering this warning. The warning breaks DEP-8 tests (because of
|
||||||
|
# the stderr output) and is just plain annoying in normal usage. I don't want
|
||||||
|
# to add socks as yet another dependency for pip, nor do I want to allow-stder
|
||||||
|
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
|
||||||
|
# be done before the import of pip.vcs.
|
||||||
|
from pip._vendor.urllib3.exceptions import DependencyWarning
|
||||||
|
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
|
||||||
|
|
||||||
|
# We want to inject the use of SecureTransport as early as possible so that any
|
||||||
|
# references or sessions or what have you are ensured to have it, however we
|
||||||
|
# only want to do this in the case that we're running on macOS and the linked
|
||||||
|
# OpenSSL is too old to handle TLSv1.2
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Checks for OpenSSL 1.0.1 on MacOS
|
||||||
|
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
|
||||||
|
try:
|
||||||
|
from pip._vendor.urllib3.contrib import securetransport
|
||||||
|
except (ImportError, OSError):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
securetransport.inject_into_urllib3()
|
||||||
|
|
||||||
|
from pip._internal.cli.autocompletion import autocomplete
|
||||||
|
from pip._internal.cli.main_parser import parse_command
|
||||||
|
from pip._internal.commands import commands_dict
|
||||||
|
from pip._internal.exceptions import PipError
|
||||||
|
from pip._internal.utils import deprecation
|
||||||
|
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
|
||||||
|
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Hide the InsecureRequestWarning from urllib3
|
||||||
|
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
if args is None:
|
||||||
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
# Configure our deprecation warnings to be sent through loggers
|
||||||
|
deprecation.install_warning_logger()
|
||||||
|
|
||||||
|
autocomplete()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cmd_name, cmd_args = parse_command(args)
|
||||||
|
except PipError as exc:
|
||||||
|
sys.stderr.write("ERROR: %s" % exc)
|
||||||
|
sys.stderr.write(os.linesep)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Needed for locale.getpreferredencoding(False) to work
|
||||||
|
# in pip._internal.utils.encoding.auto_decode
|
||||||
|
try:
|
||||||
|
locale.setlocale(locale.LC_ALL, '')
|
||||||
|
except locale.Error as e:
|
||||||
|
# setlocale can apparently crash if locale are uninitialized
|
||||||
|
logger.debug("Ignoring error %s when setting locale", e)
|
||||||
|
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
|
||||||
|
return command.main(cmd_args)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,215 @@
|
||||||
|
"""Build Environment used for isolation during sdist building
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from collections import OrderedDict
|
||||||
|
from distutils.sysconfig import get_python_lib
|
||||||
|
from sysconfig import get_paths
|
||||||
|
|
||||||
|
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
|
||||||
|
|
||||||
|
from pip import __file__ as pip_location
|
||||||
|
from pip._internal.utils.misc import call_subprocess
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.ui import open_spinner
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Tuple, Set, Iterable, Optional, List # noqa: F401
|
||||||
|
from pip._internal.index import PackageFinder # noqa: F401
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class _Prefix:
|
||||||
|
|
||||||
|
def __init__(self, path):
|
||||||
|
# type: (str) -> None
|
||||||
|
self.path = path
|
||||||
|
self.setup = False
|
||||||
|
self.bin_dir = get_paths(
|
||||||
|
'nt' if os.name == 'nt' else 'posix_prefix',
|
||||||
|
vars={'base': path, 'platbase': path}
|
||||||
|
)['scripts']
|
||||||
|
# Note: prefer distutils' sysconfig to get the
|
||||||
|
# library paths so PyPy is correctly supported.
|
||||||
|
purelib = get_python_lib(plat_specific=False, prefix=path)
|
||||||
|
platlib = get_python_lib(plat_specific=True, prefix=path)
|
||||||
|
if purelib == platlib:
|
||||||
|
self.lib_dirs = [purelib]
|
||||||
|
else:
|
||||||
|
self.lib_dirs = [purelib, platlib]
|
||||||
|
|
||||||
|
|
||||||
|
class BuildEnvironment(object):
|
||||||
|
"""Creates and manages an isolated environment to install build deps
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# type: () -> None
|
||||||
|
self._temp_dir = TempDirectory(kind="build-env")
|
||||||
|
self._temp_dir.create()
|
||||||
|
|
||||||
|
self._prefixes = OrderedDict((
|
||||||
|
(name, _Prefix(os.path.join(self._temp_dir.path, name)))
|
||||||
|
for name in ('normal', 'overlay')
|
||||||
|
))
|
||||||
|
|
||||||
|
self._bin_dirs = [] # type: List[str]
|
||||||
|
self._lib_dirs = [] # type: List[str]
|
||||||
|
for prefix in reversed(list(self._prefixes.values())):
|
||||||
|
self._bin_dirs.append(prefix.bin_dir)
|
||||||
|
self._lib_dirs.extend(prefix.lib_dirs)
|
||||||
|
|
||||||
|
# Customize site to:
|
||||||
|
# - ensure .pth files are honored
|
||||||
|
# - prevent access to system site packages
|
||||||
|
system_sites = {
|
||||||
|
os.path.normcase(site) for site in (
|
||||||
|
get_python_lib(plat_specific=False),
|
||||||
|
get_python_lib(plat_specific=True),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
self._site_dir = os.path.join(self._temp_dir.path, 'site')
|
||||||
|
if not os.path.exists(self._site_dir):
|
||||||
|
os.mkdir(self._site_dir)
|
||||||
|
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
||||||
|
fp.write(textwrap.dedent(
|
||||||
|
'''
|
||||||
|
import os, site, sys
|
||||||
|
|
||||||
|
# First, drop system-sites related paths.
|
||||||
|
original_sys_path = sys.path[:]
|
||||||
|
known_paths = set()
|
||||||
|
for path in {system_sites!r}:
|
||||||
|
site.addsitedir(path, known_paths=known_paths)
|
||||||
|
system_paths = set(
|
||||||
|
os.path.normcase(path)
|
||||||
|
for path in sys.path[len(original_sys_path):]
|
||||||
|
)
|
||||||
|
original_sys_path = [
|
||||||
|
path for path in original_sys_path
|
||||||
|
if os.path.normcase(path) not in system_paths
|
||||||
|
]
|
||||||
|
sys.path = original_sys_path
|
||||||
|
|
||||||
|
# Second, add lib directories.
|
||||||
|
# ensuring .pth file are processed.
|
||||||
|
for path in {lib_dirs!r}:
|
||||||
|
assert not path in sys.path
|
||||||
|
site.addsitedir(path)
|
||||||
|
'''
|
||||||
|
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self._save_env = {
|
||||||
|
name: os.environ.get(name, None)
|
||||||
|
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
||||||
|
}
|
||||||
|
|
||||||
|
path = self._bin_dirs[:]
|
||||||
|
old_path = self._save_env['PATH']
|
||||||
|
if old_path:
|
||||||
|
path.extend(old_path.split(os.pathsep))
|
||||||
|
|
||||||
|
pythonpath = [self._site_dir]
|
||||||
|
|
||||||
|
os.environ.update({
|
||||||
|
'PATH': os.pathsep.join(path),
|
||||||
|
'PYTHONNOUSERSITE': '1',
|
||||||
|
'PYTHONPATH': os.pathsep.join(pythonpath),
|
||||||
|
})
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
for varname, old_value in self._save_env.items():
|
||||||
|
if old_value is None:
|
||||||
|
os.environ.pop(varname, None)
|
||||||
|
else:
|
||||||
|
os.environ[varname] = old_value
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
# type: () -> None
|
||||||
|
self._temp_dir.cleanup()
|
||||||
|
|
||||||
|
def check_requirements(self, reqs):
|
||||||
|
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
||||||
|
"""Return 2 sets:
|
||||||
|
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||||
|
- missing requirements: set of reqs
|
||||||
|
"""
|
||||||
|
missing = set()
|
||||||
|
conflicting = set()
|
||||||
|
if reqs:
|
||||||
|
ws = WorkingSet(self._lib_dirs)
|
||||||
|
for req in reqs:
|
||||||
|
try:
|
||||||
|
if ws.find(Requirement.parse(req)) is None:
|
||||||
|
missing.add(req)
|
||||||
|
except VersionConflict as e:
|
||||||
|
conflicting.add((str(e.args[0].as_requirement()),
|
||||||
|
str(e.args[1])))
|
||||||
|
return conflicting, missing
|
||||||
|
|
||||||
|
def install_requirements(
|
||||||
|
self,
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
requirements, # type: Iterable[str]
|
||||||
|
prefix_as_string, # type: str
|
||||||
|
message # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
prefix = self._prefixes[prefix_as_string]
|
||||||
|
assert not prefix.setup
|
||||||
|
prefix.setup = True
|
||||||
|
if not requirements:
|
||||||
|
return
|
||||||
|
args = [
|
||||||
|
sys.executable, os.path.dirname(pip_location), 'install',
|
||||||
|
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
||||||
|
'--no-warn-script-location',
|
||||||
|
] # type: List[str]
|
||||||
|
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||||
|
args.append('-v')
|
||||||
|
for format_control in ('no_binary', 'only_binary'):
|
||||||
|
formats = getattr(finder.format_control, format_control)
|
||||||
|
args.extend(('--' + format_control.replace('_', '-'),
|
||||||
|
','.join(sorted(formats or {':none:'}))))
|
||||||
|
if finder.index_urls:
|
||||||
|
args.extend(['-i', finder.index_urls[0]])
|
||||||
|
for extra_index in finder.index_urls[1:]:
|
||||||
|
args.extend(['--extra-index-url', extra_index])
|
||||||
|
else:
|
||||||
|
args.append('--no-index')
|
||||||
|
for link in finder.find_links:
|
||||||
|
args.extend(['--find-links', link])
|
||||||
|
for _, host, _ in finder.secure_origins:
|
||||||
|
args.extend(['--trusted-host', host])
|
||||||
|
if finder.allow_all_prereleases:
|
||||||
|
args.append('--pre')
|
||||||
|
args.append('--')
|
||||||
|
args.extend(requirements)
|
||||||
|
with open_spinner(message) as spinner:
|
||||||
|
call_subprocess(args, show_stdout=False, spinner=spinner)
|
||||||
|
|
||||||
|
|
||||||
|
class NoOpBuildEnvironment(BuildEnvironment):
|
||||||
|
"""A no-op drop-in replacement for BuildEnvironment
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def install_requirements(self, finder, requirements, prefix, message):
|
||||||
|
raise NotImplementedError()
|
|
@ -0,0 +1,224 @@
|
||||||
|
"""Cache Management
|
||||||
|
"""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.download import path_to_url
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.utils.compat import expanduser
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.wheel import InvalidWheelFilename, Wheel
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, Set, List, Any # noqa: F401
|
||||||
|
from pip._internal.index import FormatControl # noqa: F401
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Cache(object):
|
||||||
|
"""An abstract class - provides cache directories for data from links
|
||||||
|
|
||||||
|
|
||||||
|
:param cache_dir: The root of the cache.
|
||||||
|
:param format_control: An object of FormatControl class to limit
|
||||||
|
binaries being read from the cache.
|
||||||
|
:param allowed_formats: which formats of files the cache should store.
|
||||||
|
('binary' and 'source' are the only allowed values)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control, allowed_formats):
|
||||||
|
# type: (str, FormatControl, Set[str]) -> None
|
||||||
|
super(Cache, self).__init__()
|
||||||
|
self.cache_dir = expanduser(cache_dir) if cache_dir else None
|
||||||
|
self.format_control = format_control
|
||||||
|
self.allowed_formats = allowed_formats
|
||||||
|
|
||||||
|
_valid_formats = {"source", "binary"}
|
||||||
|
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||||
|
|
||||||
|
def _get_cache_path_parts(self, link):
|
||||||
|
# type: (Link) -> List[str]
|
||||||
|
"""Get parts of part that must be os.path.joined with cache_dir
|
||||||
|
"""
|
||||||
|
|
||||||
|
# We want to generate an url to use as our cache key, we don't want to
|
||||||
|
# just re-use the URL because it might have other items in the fragment
|
||||||
|
# and we don't care about those.
|
||||||
|
key_parts = [link.url_without_fragment]
|
||||||
|
if link.hash_name is not None and link.hash is not None:
|
||||||
|
key_parts.append("=".join([link.hash_name, link.hash]))
|
||||||
|
key_url = "#".join(key_parts)
|
||||||
|
|
||||||
|
# Encode our key url with sha224, we'll use this because it has similar
|
||||||
|
# security properties to sha256, but with a shorter total output (and
|
||||||
|
# thus less secure). However the differences don't make a lot of
|
||||||
|
# difference for our use case here.
|
||||||
|
hashed = hashlib.sha224(key_url.encode()).hexdigest()
|
||||||
|
|
||||||
|
# We want to nest the directories some to prevent having a ton of top
|
||||||
|
# level directories where we might run out of sub directories on some
|
||||||
|
# FS.
|
||||||
|
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||||
|
|
||||||
|
return parts
|
||||||
|
|
||||||
|
def _get_candidates(self, link, package_name):
|
||||||
|
# type: (Link, Optional[str]) -> List[Any]
|
||||||
|
can_not_cache = (
|
||||||
|
not self.cache_dir or
|
||||||
|
not package_name or
|
||||||
|
not link
|
||||||
|
)
|
||||||
|
if can_not_cache:
|
||||||
|
return []
|
||||||
|
|
||||||
|
canonical_name = canonicalize_name(package_name)
|
||||||
|
formats = self.format_control.get_allowed_formats(
|
||||||
|
canonical_name
|
||||||
|
)
|
||||||
|
if not self.allowed_formats.intersection(formats):
|
||||||
|
return []
|
||||||
|
|
||||||
|
root = self.get_path_for_link(link)
|
||||||
|
try:
|
||||||
|
return os.listdir(root)
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
|
||||||
|
return []
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
"""Return a directory to store cached items in for link.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get(self, link, package_name):
|
||||||
|
# type: (Link, Optional[str]) -> Link
|
||||||
|
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||||
|
passed link.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _link_for_candidate(self, link, candidate):
|
||||||
|
# type: (Link, str) -> Link
|
||||||
|
root = self.get_path_for_link(link)
|
||||||
|
path = os.path.join(root, candidate)
|
||||||
|
|
||||||
|
return Link(path_to_url(path))
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
# type: () -> None
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleWheelCache(Cache):
|
||||||
|
"""A cache of wheels for future installs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control):
|
||||||
|
# type: (str, FormatControl) -> None
|
||||||
|
super(SimpleWheelCache, self).__init__(
|
||||||
|
cache_dir, format_control, {"binary"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
"""Return a directory to store cached wheels for link
|
||||||
|
|
||||||
|
Because there are M wheels for any one sdist, we provide a directory
|
||||||
|
to cache them in, and then consult that directory when looking up
|
||||||
|
cache hits.
|
||||||
|
|
||||||
|
We only insert things into the cache if they have plausible version
|
||||||
|
numbers, so that we don't contaminate the cache with things that were
|
||||||
|
not unique. E.g. ./package might have dozens of installs done for it
|
||||||
|
and build a version of 0.0...and if we built and cached a wheel, we'd
|
||||||
|
end up using the same wheel even if the source has been edited.
|
||||||
|
|
||||||
|
:param link: The link of the sdist for which this will cache wheels.
|
||||||
|
"""
|
||||||
|
parts = self._get_cache_path_parts(link)
|
||||||
|
|
||||||
|
# Store wheels within the root cache_dir
|
||||||
|
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||||
|
|
||||||
|
def get(self, link, package_name):
|
||||||
|
# type: (Link, Optional[str]) -> Link
|
||||||
|
candidates = []
|
||||||
|
|
||||||
|
for wheel_name in self._get_candidates(link, package_name):
|
||||||
|
try:
|
||||||
|
wheel = Wheel(wheel_name)
|
||||||
|
except InvalidWheelFilename:
|
||||||
|
continue
|
||||||
|
if not wheel.supported():
|
||||||
|
# Built for a different python/arch/etc
|
||||||
|
continue
|
||||||
|
candidates.append((wheel.support_index_min(), wheel_name))
|
||||||
|
|
||||||
|
if not candidates:
|
||||||
|
return link
|
||||||
|
|
||||||
|
return self._link_for_candidate(link, min(candidates)[1])
|
||||||
|
|
||||||
|
|
||||||
|
class EphemWheelCache(SimpleWheelCache):
|
||||||
|
"""A SimpleWheelCache that creates it's own temporary cache directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, format_control):
|
||||||
|
# type: (FormatControl) -> None
|
||||||
|
self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
|
||||||
|
self._temp_dir.create()
|
||||||
|
|
||||||
|
super(EphemWheelCache, self).__init__(
|
||||||
|
self._temp_dir.path, format_control
|
||||||
|
)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
# type: () -> None
|
||||||
|
self._temp_dir.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
class WheelCache(Cache):
|
||||||
|
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
||||||
|
|
||||||
|
This Cache allows for gracefully degradation, using the ephem wheel cache
|
||||||
|
when a certain link is not found in the simple wheel cache first.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control):
|
||||||
|
# type: (str, FormatControl) -> None
|
||||||
|
super(WheelCache, self).__init__(
|
||||||
|
cache_dir, format_control, {'binary'}
|
||||||
|
)
|
||||||
|
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||||
|
self._ephem_cache = EphemWheelCache(format_control)
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
return self._wheel_cache.get_path_for_link(link)
|
||||||
|
|
||||||
|
def get_ephem_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
return self._ephem_cache.get_path_for_link(link)
|
||||||
|
|
||||||
|
def get(self, link, package_name):
|
||||||
|
# type: (Link, Optional[str]) -> Link
|
||||||
|
retval = self._wheel_cache.get(link, package_name)
|
||||||
|
if retval is link:
|
||||||
|
retval = self._ephem_cache.get(link, package_name)
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
# type: () -> None
|
||||||
|
self._wheel_cache.cleanup()
|
||||||
|
self._ephem_cache.cleanup()
|
|
@ -0,0 +1,4 @@
|
||||||
|
"""Subpackage containing all of pip's command line interface related code
|
||||||
|
"""
|
||||||
|
|
||||||
|
# This file intentionally does not import submodules
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,152 @@
|
||||||
|
"""Logic that powers autocompletion installed by ``pip completion``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.cli.main_parser import create_main_parser
|
||||||
|
from pip._internal.commands import commands_dict, get_summaries
|
||||||
|
from pip._internal.utils.misc import get_installed_distributions
|
||||||
|
|
||||||
|
|
||||||
|
def autocomplete():
|
||||||
|
"""Entry Point for completion of main and subcommand options.
|
||||||
|
"""
|
||||||
|
# Don't complete if user hasn't sourced bash_completion file.
|
||||||
|
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
||||||
|
return
|
||||||
|
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||||
|
cword = int(os.environ['COMP_CWORD'])
|
||||||
|
try:
|
||||||
|
current = cwords[cword - 1]
|
||||||
|
except IndexError:
|
||||||
|
current = ''
|
||||||
|
|
||||||
|
subcommands = [cmd for cmd, summary in get_summaries()]
|
||||||
|
options = []
|
||||||
|
# subcommand
|
||||||
|
try:
|
||||||
|
subcommand_name = [w for w in cwords if w in subcommands][0]
|
||||||
|
except IndexError:
|
||||||
|
subcommand_name = None
|
||||||
|
|
||||||
|
parser = create_main_parser()
|
||||||
|
# subcommand options
|
||||||
|
if subcommand_name:
|
||||||
|
# special case: 'help' subcommand has no options
|
||||||
|
if subcommand_name == 'help':
|
||||||
|
sys.exit(1)
|
||||||
|
# special case: list locally installed dists for show and uninstall
|
||||||
|
should_list_installed = (
|
||||||
|
subcommand_name in ['show', 'uninstall'] and
|
||||||
|
not current.startswith('-')
|
||||||
|
)
|
||||||
|
if should_list_installed:
|
||||||
|
installed = []
|
||||||
|
lc = current.lower()
|
||||||
|
for dist in get_installed_distributions(local_only=True):
|
||||||
|
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
||||||
|
installed.append(dist.key)
|
||||||
|
# if there are no dists installed, fall back to option completion
|
||||||
|
if installed:
|
||||||
|
for dist in installed:
|
||||||
|
print(dist)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
subcommand = commands_dict[subcommand_name]()
|
||||||
|
|
||||||
|
for opt in subcommand.parser.option_list_all:
|
||||||
|
if opt.help != optparse.SUPPRESS_HELP:
|
||||||
|
for opt_str in opt._long_opts + opt._short_opts:
|
||||||
|
options.append((opt_str, opt.nargs))
|
||||||
|
|
||||||
|
# filter out previously specified options from available options
|
||||||
|
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||||
|
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||||
|
# filter options by current input
|
||||||
|
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||||
|
# get completion type given cwords and available subcommand options
|
||||||
|
completion_type = get_path_completion_type(
|
||||||
|
cwords, cword, subcommand.parser.option_list_all,
|
||||||
|
)
|
||||||
|
# get completion files and directories if ``completion_type`` is
|
||||||
|
# ``<file>``, ``<dir>`` or ``<path>``
|
||||||
|
if completion_type:
|
||||||
|
options = auto_complete_paths(current, completion_type)
|
||||||
|
options = ((opt, 0) for opt in options)
|
||||||
|
for option in options:
|
||||||
|
opt_label = option[0]
|
||||||
|
# append '=' to options which require args
|
||||||
|
if option[1] and option[0][:2] == "--":
|
||||||
|
opt_label += '='
|
||||||
|
print(opt_label)
|
||||||
|
else:
|
||||||
|
# show main parser options only when necessary
|
||||||
|
|
||||||
|
opts = [i.option_list for i in parser.option_groups]
|
||||||
|
opts.append(parser.option_list)
|
||||||
|
opts = (o for it in opts for o in it)
|
||||||
|
if current.startswith('-'):
|
||||||
|
for opt in opts:
|
||||||
|
if opt.help != optparse.SUPPRESS_HELP:
|
||||||
|
subcommands += opt._long_opts + opt._short_opts
|
||||||
|
else:
|
||||||
|
# get completion type given cwords and all available options
|
||||||
|
completion_type = get_path_completion_type(cwords, cword, opts)
|
||||||
|
if completion_type:
|
||||||
|
subcommands = auto_complete_paths(current, completion_type)
|
||||||
|
|
||||||
|
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_path_completion_type(cwords, cword, opts):
|
||||||
|
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
||||||
|
|
||||||
|
:param cwords: same as the environmental variable ``COMP_WORDS``
|
||||||
|
:param cword: same as the environmental variable ``COMP_CWORD``
|
||||||
|
:param opts: The available options to check
|
||||||
|
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
||||||
|
"""
|
||||||
|
if cword < 2 or not cwords[cword - 2].startswith('-'):
|
||||||
|
return
|
||||||
|
for opt in opts:
|
||||||
|
if opt.help == optparse.SUPPRESS_HELP:
|
||||||
|
continue
|
||||||
|
for o in str(opt).split('/'):
|
||||||
|
if cwords[cword - 2].split('=')[0] == o:
|
||||||
|
if not opt.metavar or any(
|
||||||
|
x in ('path', 'file', 'dir')
|
||||||
|
for x in opt.metavar.split('/')):
|
||||||
|
return opt.metavar
|
||||||
|
|
||||||
|
|
||||||
|
def auto_complete_paths(current, completion_type):
|
||||||
|
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
||||||
|
and directories starting with ``current``; otherwise only list directories
|
||||||
|
starting with ``current``.
|
||||||
|
|
||||||
|
:param current: The word to be completed
|
||||||
|
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
||||||
|
:return: A generator of regular files and/or directories
|
||||||
|
"""
|
||||||
|
directory, filename = os.path.split(current)
|
||||||
|
current_path = os.path.abspath(directory)
|
||||||
|
# Don't complete paths if they can't be accessed
|
||||||
|
if not os.access(current_path, os.R_OK):
|
||||||
|
return
|
||||||
|
filename = os.path.normcase(filename)
|
||||||
|
# list all files that start with ``filename``
|
||||||
|
file_list = (x for x in os.listdir(current_path)
|
||||||
|
if os.path.normcase(x).startswith(filename))
|
||||||
|
for f in file_list:
|
||||||
|
opt = os.path.join(current_path, f)
|
||||||
|
comp_file = os.path.normcase(os.path.join(directory, f))
|
||||||
|
# complete regular files when there is not ``<dir>`` after option
|
||||||
|
# complete directories when there is ``<file>``, ``<path>`` or
|
||||||
|
# ``<dir>``after option
|
||||||
|
if completion_type != 'dir' and os.path.isfile(opt):
|
||||||
|
yield comp_file
|
||||||
|
elif os.path.isdir(opt):
|
||||||
|
yield os.path.join(comp_file, '')
|
|
@ -0,0 +1,341 @@
|
||||||
|
"""Base Command class, and related routines"""
|
||||||
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.parser import (
|
||||||
|
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
from pip._internal.cli.status_codes import (
|
||||||
|
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
|
||||||
|
VIRTUALENV_NOT_FOUND,
|
||||||
|
)
|
||||||
|
from pip._internal.download import PipSession
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
BadCommand, CommandError, InstallationError, PreviousBuildDirError,
|
||||||
|
UninstallationError,
|
||||||
|
)
|
||||||
|
from pip._internal.index import PackageFinder
|
||||||
|
from pip._internal.locations import running_under_virtualenv
|
||||||
|
from pip._internal.req.constructors import (
|
||||||
|
install_req_from_editable, install_req_from_line,
|
||||||
|
)
|
||||||
|
from pip._internal.req.req_file import parse_requirements
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
|
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
get_prog, normalize_path, redact_password_from_url,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.outdated import pip_version_check
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, List, Tuple, Any # noqa: F401
|
||||||
|
from optparse import Values # noqa: F401
|
||||||
|
from pip._internal.cache import WheelCache # noqa: F401
|
||||||
|
from pip._internal.req.req_set import RequirementSet # noqa: F401
|
||||||
|
|
||||||
|
__all__ = ['Command']
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(object):
|
||||||
|
name = None # type: Optional[str]
|
||||||
|
usage = None # type: Optional[str]
|
||||||
|
hidden = False # type: bool
|
||||||
|
ignore_require_venv = False # type: bool
|
||||||
|
|
||||||
|
def __init__(self, isolated=False):
|
||||||
|
# type: (bool) -> None
|
||||||
|
parser_kw = {
|
||||||
|
'usage': self.usage,
|
||||||
|
'prog': '%s %s' % (get_prog(), self.name),
|
||||||
|
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||||
|
'add_help_option': False,
|
||||||
|
'name': self.name,
|
||||||
|
'description': self.__doc__,
|
||||||
|
'isolated': isolated,
|
||||||
|
}
|
||||||
|
|
||||||
|
self.parser = ConfigOptionParser(**parser_kw)
|
||||||
|
|
||||||
|
# Commands should add options to this option group
|
||||||
|
optgroup_name = '%s Options' % self.name.capitalize()
|
||||||
|
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||||
|
|
||||||
|
# Add the general options
|
||||||
|
gen_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.general_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
self.parser.add_option_group(gen_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> Any
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _build_session(self, options, retries=None, timeout=None):
|
||||||
|
# type: (Values, Optional[int], Optional[int]) -> PipSession
|
||||||
|
session = PipSession(
|
||||||
|
cache=(
|
||||||
|
normalize_path(os.path.join(options.cache_dir, "http"))
|
||||||
|
if options.cache_dir else None
|
||||||
|
),
|
||||||
|
retries=retries if retries is not None else options.retries,
|
||||||
|
insecure_hosts=options.trusted_hosts,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle custom ca-bundles from the user
|
||||||
|
if options.cert:
|
||||||
|
session.verify = options.cert
|
||||||
|
|
||||||
|
# Handle SSL client certificate
|
||||||
|
if options.client_cert:
|
||||||
|
session.cert = options.client_cert
|
||||||
|
|
||||||
|
# Handle timeouts
|
||||||
|
if options.timeout or timeout:
|
||||||
|
session.timeout = (
|
||||||
|
timeout if timeout is not None else options.timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle configured proxies
|
||||||
|
if options.proxy:
|
||||||
|
session.proxies = {
|
||||||
|
"http": options.proxy,
|
||||||
|
"https": options.proxy,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine if we can prompt the user for authentication or not
|
||||||
|
session.auth.prompting = not options.no_input
|
||||||
|
|
||||||
|
return session
|
||||||
|
|
||||||
|
def parse_args(self, args):
|
||||||
|
# type: (List[str]) -> Tuple
|
||||||
|
# factored out for testability
|
||||||
|
return self.parser.parse_args(args)
|
||||||
|
|
||||||
|
def main(self, args):
|
||||||
|
# type: (List[str]) -> int
|
||||||
|
options, args = self.parse_args(args)
|
||||||
|
|
||||||
|
# Set verbosity so that it can be used elsewhere.
|
||||||
|
self.verbosity = options.verbose - options.quiet
|
||||||
|
|
||||||
|
level_number = setup_logging(
|
||||||
|
verbosity=self.verbosity,
|
||||||
|
no_color=options.no_color,
|
||||||
|
user_log_file=options.log,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (3, 4):
|
||||||
|
deprecated(
|
||||||
|
"Python 3.4 support has been deprecated. pip 19.1 will be the "
|
||||||
|
"last one supporting it. Please upgrade your Python as Python "
|
||||||
|
"3.4 won't be maintained after March 2019 (cf PEP 429).",
|
||||||
|
replacement=None,
|
||||||
|
gone_in='19.2',
|
||||||
|
)
|
||||||
|
elif sys.version_info[:2] == (2, 7):
|
||||||
|
message = (
|
||||||
|
"A future version of pip will drop support for Python 2.7."
|
||||||
|
)
|
||||||
|
if platform.python_implementation() == "CPython":
|
||||||
|
message = (
|
||||||
|
"Python 2.7 will reach the end of its life on January "
|
||||||
|
"1st, 2020. Please upgrade your Python as Python 2.7 "
|
||||||
|
"won't be maintained after that date. "
|
||||||
|
) + message
|
||||||
|
deprecated(message, replacement=None, gone_in=None)
|
||||||
|
|
||||||
|
# TODO: Try to get these passing down from the command?
|
||||||
|
# without resorting to os.environ to hold these.
|
||||||
|
# This also affects isolated builds and it should.
|
||||||
|
|
||||||
|
if options.no_input:
|
||||||
|
os.environ['PIP_NO_INPUT'] = '1'
|
||||||
|
|
||||||
|
if options.exists_action:
|
||||||
|
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
||||||
|
|
||||||
|
if options.require_venv and not self.ignore_require_venv:
|
||||||
|
# If a venv is required check if it can really be found
|
||||||
|
if not running_under_virtualenv():
|
||||||
|
logger.critical(
|
||||||
|
'Could not find an activated virtualenv (required).'
|
||||||
|
)
|
||||||
|
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||||
|
|
||||||
|
try:
|
||||||
|
status = self.run(options, args)
|
||||||
|
# FIXME: all commands should return an exit status
|
||||||
|
# and when it is done, isinstance is not needed anymore
|
||||||
|
if isinstance(status, int):
|
||||||
|
return status
|
||||||
|
except PreviousBuildDirError as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return PREVIOUS_BUILD_DIR_ERROR
|
||||||
|
except (InstallationError, UninstallationError, BadCommand) as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except CommandError as exc:
|
||||||
|
logger.critical('ERROR: %s', exc)
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except BrokenStdoutLoggingError:
|
||||||
|
# Bypass our logger and write any remaining messages to stderr
|
||||||
|
# because stdout no longer works.
|
||||||
|
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
|
||||||
|
if level_number <= logging.DEBUG:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.critical('Operation cancelled by user')
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except BaseException:
|
||||||
|
logger.critical('Exception:', exc_info=True)
|
||||||
|
|
||||||
|
return UNKNOWN_ERROR
|
||||||
|
finally:
|
||||||
|
allow_version_check = (
|
||||||
|
# Does this command have the index_group options?
|
||||||
|
hasattr(options, "no_index") and
|
||||||
|
# Is this command allowed to perform this check?
|
||||||
|
not (options.disable_pip_version_check or options.no_index)
|
||||||
|
)
|
||||||
|
# Check if we're using the latest version of pip available
|
||||||
|
if allow_version_check:
|
||||||
|
session = self._build_session(
|
||||||
|
options,
|
||||||
|
retries=0,
|
||||||
|
timeout=min(5, options.timeout)
|
||||||
|
)
|
||||||
|
with session:
|
||||||
|
pip_version_check(session, options)
|
||||||
|
|
||||||
|
# Shutdown the logging module
|
||||||
|
logging.shutdown()
|
||||||
|
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementCommand(Command):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def populate_requirement_set(requirement_set, # type: RequirementSet
|
||||||
|
args, # type: List[str]
|
||||||
|
options, # type: Values
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
session, # type: PipSession
|
||||||
|
name, # type: str
|
||||||
|
wheel_cache # type: Optional[WheelCache]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""
|
||||||
|
Marshal cmd line args into a requirement set.
|
||||||
|
"""
|
||||||
|
# NOTE: As a side-effect, options.require_hashes and
|
||||||
|
# requirement_set.require_hashes may be updated
|
||||||
|
|
||||||
|
for filename in options.constraints:
|
||||||
|
for req_to_add in parse_requirements(
|
||||||
|
filename,
|
||||||
|
constraint=True, finder=finder, options=options,
|
||||||
|
session=session, wheel_cache=wheel_cache):
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for req in args:
|
||||||
|
req_to_add = install_req_from_line(
|
||||||
|
req, None, isolated=options.isolated_mode,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
|
wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for req in options.editables:
|
||||||
|
req_to_add = install_req_from_editable(
|
||||||
|
req,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
|
wheel_cache=wheel_cache
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for filename in options.requirements:
|
||||||
|
for req_to_add in parse_requirements(
|
||||||
|
filename,
|
||||||
|
finder=finder, options=options, session=session,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
use_pep517=options.use_pep517):
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
# If --require-hashes was a line in a requirements file, tell
|
||||||
|
# RequirementSet about it:
|
||||||
|
requirement_set.require_hashes = options.require_hashes
|
||||||
|
|
||||||
|
if not (args or options.editables or options.requirements):
|
||||||
|
opts = {'name': name}
|
||||||
|
if options.find_links:
|
||||||
|
raise CommandError(
|
||||||
|
'You must give at least one requirement to %(name)s '
|
||||||
|
'(maybe you meant "pip %(name)s %(links)s"?)' %
|
||||||
|
dict(opts, links=' '.join(options.find_links)))
|
||||||
|
else:
|
||||||
|
raise CommandError(
|
||||||
|
'You must give at least one requirement to %(name)s '
|
||||||
|
'(see "pip help %(name)s")' % opts)
|
||||||
|
|
||||||
|
def _build_package_finder(
|
||||||
|
self,
|
||||||
|
options, # type: Values
|
||||||
|
session, # type: PipSession
|
||||||
|
platform=None, # type: Optional[str]
|
||||||
|
python_versions=None, # type: Optional[List[str]]
|
||||||
|
abi=None, # type: Optional[str]
|
||||||
|
implementation=None # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> PackageFinder
|
||||||
|
"""
|
||||||
|
Create a package finder appropriate to this requirement command.
|
||||||
|
"""
|
||||||
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
|
if options.no_index:
|
||||||
|
logger.debug(
|
||||||
|
'Ignoring indexes: %s',
|
||||||
|
','.join(redact_password_from_url(url) for url in index_urls),
|
||||||
|
)
|
||||||
|
index_urls = []
|
||||||
|
|
||||||
|
return PackageFinder(
|
||||||
|
find_links=options.find_links,
|
||||||
|
format_control=options.format_control,
|
||||||
|
index_urls=index_urls,
|
||||||
|
trusted_hosts=options.trusted_hosts,
|
||||||
|
allow_all_prereleases=options.pre,
|
||||||
|
session=session,
|
||||||
|
platform=platform,
|
||||||
|
versions=python_versions,
|
||||||
|
abi=abi,
|
||||||
|
implementation=implementation,
|
||||||
|
prefer_binary=options.prefer_binary,
|
||||||
|
)
|
|
@ -0,0 +1,809 @@
|
||||||
|
"""
|
||||||
|
shared options and groups
|
||||||
|
|
||||||
|
The principle here is to define options once, but *not* instantiate them
|
||||||
|
globally. One reason being that options with action='append' can carry state
|
||||||
|
between parses. pip parses general options twice internally, and shouldn't
|
||||||
|
pass on state. To be consistent, all options will follow this design.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import textwrap
|
||||||
|
import warnings
|
||||||
|
from distutils.util import strtobool
|
||||||
|
from functools import partial
|
||||||
|
from optparse import SUPPRESS_HELP, Option, OptionGroup
|
||||||
|
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.locations import USER_CACHE_DIR, src_prefix
|
||||||
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
from pip._internal.models.index import PyPI
|
||||||
|
from pip._internal.utils.hashes import STRONG_HASHES
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.ui import BAR_TYPES
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, Callable, Dict, List, Optional, Union # noqa: F401
|
||||||
|
from optparse import OptionParser, Values # noqa: F401
|
||||||
|
from pip._internal.cli.parser import ConfigOptionParser # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
|
def raise_option_error(parser, option, msg):
|
||||||
|
"""
|
||||||
|
Raise an option parsing error using parser.error().
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser: an OptionParser instance.
|
||||||
|
option: an Option instance.
|
||||||
|
msg: the error text.
|
||||||
|
"""
|
||||||
|
msg = '{} error: {}'.format(option, msg)
|
||||||
|
msg = textwrap.fill(' '.join(msg.split()))
|
||||||
|
parser.error(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def make_option_group(group, parser):
|
||||||
|
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
|
||||||
|
"""
|
||||||
|
Return an OptionGroup object
|
||||||
|
group -- assumed to be dict with 'name' and 'options' keys
|
||||||
|
parser -- an optparse Parser
|
||||||
|
"""
|
||||||
|
option_group = OptionGroup(parser, group['name'])
|
||||||
|
for option in group['options']:
|
||||||
|
option_group.add_option(option())
|
||||||
|
return option_group
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_build_global(options, check_options=None):
|
||||||
|
# type: (Values, Optional[Values]) -> None
|
||||||
|
"""Disable wheels if per-setup.py call options are set.
|
||||||
|
|
||||||
|
:param options: The OptionParser options to update.
|
||||||
|
:param check_options: The options to check, if not supplied defaults to
|
||||||
|
options.
|
||||||
|
"""
|
||||||
|
if check_options is None:
|
||||||
|
check_options = options
|
||||||
|
|
||||||
|
def getname(n):
|
||||||
|
return getattr(check_options, n, None)
|
||||||
|
names = ["build_options", "global_options", "install_options"]
|
||||||
|
if any(map(getname, names)):
|
||||||
|
control = options.format_control
|
||||||
|
control.disallow_binaries()
|
||||||
|
warnings.warn(
|
||||||
|
'Disabling all use of wheels due to the use of --build-options '
|
||||||
|
'/ --global-options / --install-options.', stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_dist_restriction(options, check_target=False):
|
||||||
|
# type: (Values, bool) -> None
|
||||||
|
"""Function for determining if custom platform options are allowed.
|
||||||
|
|
||||||
|
:param options: The OptionParser options.
|
||||||
|
:param check_target: Whether or not to check if --target is being used.
|
||||||
|
"""
|
||||||
|
dist_restriction_set = any([
|
||||||
|
options.python_version,
|
||||||
|
options.platform,
|
||||||
|
options.abi,
|
||||||
|
options.implementation,
|
||||||
|
])
|
||||||
|
|
||||||
|
binary_only = FormatControl(set(), {':all:'})
|
||||||
|
sdist_dependencies_allowed = (
|
||||||
|
options.format_control != binary_only and
|
||||||
|
not options.ignore_dependencies
|
||||||
|
)
|
||||||
|
|
||||||
|
# Installations or downloads using dist restrictions must not combine
|
||||||
|
# source distributions and dist-specific wheels, as they are not
|
||||||
|
# gauranteed to be locally compatible.
|
||||||
|
if dist_restriction_set and sdist_dependencies_allowed:
|
||||||
|
raise CommandError(
|
||||||
|
"When restricting platform and interpreter constraints using "
|
||||||
|
"--python-version, --platform, --abi, or --implementation, "
|
||||||
|
"either --no-deps must be set, or --only-binary=:all: must be "
|
||||||
|
"set and --no-binary must not be set (or must be set to "
|
||||||
|
":none:)."
|
||||||
|
)
|
||||||
|
|
||||||
|
if check_target:
|
||||||
|
if dist_restriction_set and not options.target_dir:
|
||||||
|
raise CommandError(
|
||||||
|
"Can not use any platform or abi specific options unless "
|
||||||
|
"installing via '--target'"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
###########
|
||||||
|
# options #
|
||||||
|
###########
|
||||||
|
|
||||||
|
help_ = partial(
|
||||||
|
Option,
|
||||||
|
'-h', '--help',
|
||||||
|
dest='help',
|
||||||
|
action='help',
|
||||||
|
help='Show help.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
isolated_mode = partial(
|
||||||
|
Option,
|
||||||
|
"--isolated",
|
||||||
|
dest="isolated_mode",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Run pip in an isolated mode, ignoring environment variables and user "
|
||||||
|
"configuration."
|
||||||
|
),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
require_virtualenv = partial(
|
||||||
|
Option,
|
||||||
|
# Run only if inside a virtualenv, bail if not.
|
||||||
|
'--require-virtualenv', '--require-venv',
|
||||||
|
dest='require_venv',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
verbose = partial(
|
||||||
|
Option,
|
||||||
|
'-v', '--verbose',
|
||||||
|
dest='verbose',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help='Give more output. Option is additive, and can be used up to 3 times.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_color = partial(
|
||||||
|
Option,
|
||||||
|
'--no-color',
|
||||||
|
dest='no_color',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Suppress colored output",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
version = partial(
|
||||||
|
Option,
|
||||||
|
'-V', '--version',
|
||||||
|
dest='version',
|
||||||
|
action='store_true',
|
||||||
|
help='Show version and exit.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
quiet = partial(
|
||||||
|
Option,
|
||||||
|
'-q', '--quiet',
|
||||||
|
dest='quiet',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help=(
|
||||||
|
'Give less output. Option is additive, and can be used up to 3'
|
||||||
|
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
|
||||||
|
' levels).'
|
||||||
|
),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
progress_bar = partial(
|
||||||
|
Option,
|
||||||
|
'--progress-bar',
|
||||||
|
dest='progress_bar',
|
||||||
|
type='choice',
|
||||||
|
choices=list(BAR_TYPES.keys()),
|
||||||
|
default='on',
|
||||||
|
help=(
|
||||||
|
'Specify type of progress to be displayed [' +
|
||||||
|
'|'.join(BAR_TYPES.keys()) + '] (default: %default)'
|
||||||
|
),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
log = partial(
|
||||||
|
Option,
|
||||||
|
"--log", "--log-file", "--local-log",
|
||||||
|
dest="log",
|
||||||
|
metavar="path",
|
||||||
|
help="Path to a verbose appending log."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_input = partial(
|
||||||
|
Option,
|
||||||
|
# Don't ask for input
|
||||||
|
'--no-input',
|
||||||
|
dest='no_input',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
proxy = partial(
|
||||||
|
Option,
|
||||||
|
'--proxy',
|
||||||
|
dest='proxy',
|
||||||
|
type='str',
|
||||||
|
default='',
|
||||||
|
help="Specify a proxy in the form [user:passwd@]proxy.server:port."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
retries = partial(
|
||||||
|
Option,
|
||||||
|
'--retries',
|
||||||
|
dest='retries',
|
||||||
|
type='int',
|
||||||
|
default=5,
|
||||||
|
help="Maximum number of retries each connection should attempt "
|
||||||
|
"(default %default times).",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
timeout = partial(
|
||||||
|
Option,
|
||||||
|
'--timeout', '--default-timeout',
|
||||||
|
metavar='sec',
|
||||||
|
dest='timeout',
|
||||||
|
type='float',
|
||||||
|
default=15,
|
||||||
|
help='Set the socket timeout (default %default seconds).',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
skip_requirements_regex = partial(
|
||||||
|
Option,
|
||||||
|
# A regex to be used to skip requirements
|
||||||
|
'--skip-requirements-regex',
|
||||||
|
dest='skip_requirements_regex',
|
||||||
|
type='str',
|
||||||
|
default='',
|
||||||
|
help=SUPPRESS_HELP,
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def exists_action():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
# Option when path already exist
|
||||||
|
'--exists-action',
|
||||||
|
dest='exists_action',
|
||||||
|
type='choice',
|
||||||
|
choices=['s', 'i', 'w', 'b', 'a'],
|
||||||
|
default=[],
|
||||||
|
action='append',
|
||||||
|
metavar='action',
|
||||||
|
help="Default action when a path already exists: "
|
||||||
|
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
cert = partial(
|
||||||
|
Option,
|
||||||
|
'--cert',
|
||||||
|
dest='cert',
|
||||||
|
type='str',
|
||||||
|
metavar='path',
|
||||||
|
help="Path to alternate CA bundle.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
client_cert = partial(
|
||||||
|
Option,
|
||||||
|
'--client-cert',
|
||||||
|
dest='client_cert',
|
||||||
|
type='str',
|
||||||
|
default=None,
|
||||||
|
metavar='path',
|
||||||
|
help="Path to SSL client certificate, a single file containing the "
|
||||||
|
"private key and the certificate in PEM format.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
index_url = partial(
|
||||||
|
Option,
|
||||||
|
'-i', '--index-url', '--pypi-url',
|
||||||
|
dest='index_url',
|
||||||
|
metavar='URL',
|
||||||
|
default=PyPI.simple_url,
|
||||||
|
help="Base URL of Python Package Index (default %default). "
|
||||||
|
"This should point to a repository compliant with PEP 503 "
|
||||||
|
"(the simple repository API) or a local directory laid out "
|
||||||
|
"in the same format.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def extra_index_url():
|
||||||
|
return Option(
|
||||||
|
'--extra-index-url',
|
||||||
|
dest='extra_index_urls',
|
||||||
|
metavar='URL',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help="Extra URLs of package indexes to use in addition to "
|
||||||
|
"--index-url. Should follow the same rules as "
|
||||||
|
"--index-url.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
no_index = partial(
|
||||||
|
Option,
|
||||||
|
'--no-index',
|
||||||
|
dest='no_index',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Ignore package index (only looking at --find-links URLs instead).',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def find_links():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-f', '--find-links',
|
||||||
|
dest='find_links',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='url',
|
||||||
|
help="If a url or path to an html file, then parse for links to "
|
||||||
|
"archives. If a local path or file:// url that's a directory, "
|
||||||
|
"then look for archives in the directory listing.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def trusted_host():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
"--trusted-host",
|
||||||
|
dest="trusted_hosts",
|
||||||
|
action="append",
|
||||||
|
metavar="HOSTNAME",
|
||||||
|
default=[],
|
||||||
|
help="Mark this host as trusted, even though it does not have valid "
|
||||||
|
"or any HTTPS.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def constraints():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-c', '--constraint',
|
||||||
|
dest='constraints',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Constrain versions using the given constraints file. '
|
||||||
|
'This option can be used multiple times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def requirements():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Install from the given requirements file. '
|
||||||
|
'This option can be used multiple times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def editable():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-e', '--editable',
|
||||||
|
dest='editables',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='path/url',
|
||||||
|
help=('Install a project in editable mode (i.e. setuptools '
|
||||||
|
'"develop mode") from a local project path or a VCS url.'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
src = partial(
|
||||||
|
Option,
|
||||||
|
'--src', '--source', '--source-dir', '--source-directory',
|
||||||
|
dest='src_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=src_prefix,
|
||||||
|
help='Directory to check out editable projects into. '
|
||||||
|
'The default in a virtualenv is "<venv path>/src". '
|
||||||
|
'The default for global installs is "<current dir>/src".'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _get_format_control(values, option):
|
||||||
|
# type: (Values, Option) -> Any
|
||||||
|
"""Get a format_control object."""
|
||||||
|
return getattr(values, option.dest)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_no_binary(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
existing = _get_format_control(parser.values, option)
|
||||||
|
FormatControl.handle_mutual_excludes(
|
||||||
|
value, existing.no_binary, existing.only_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_only_binary(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
existing = _get_format_control(parser.values, option)
|
||||||
|
FormatControl.handle_mutual_excludes(
|
||||||
|
value, existing.only_binary, existing.no_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def no_binary():
|
||||||
|
# type: () -> Option
|
||||||
|
format_control = FormatControl(set(), set())
|
||||||
|
return Option(
|
||||||
|
"--no-binary", dest="format_control", action="callback",
|
||||||
|
callback=_handle_no_binary, type="str",
|
||||||
|
default=format_control,
|
||||||
|
help="Do not use binary packages. Can be supplied multiple times, and "
|
||||||
|
"each time adds to the existing value. Accepts either :all: to "
|
||||||
|
"disable all binary packages, :none: to empty the set, or one or "
|
||||||
|
"more package names with commas between them. Note that some "
|
||||||
|
"packages are tricky to compile and may fail to install when "
|
||||||
|
"this option is used on them.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def only_binary():
|
||||||
|
# type: () -> Option
|
||||||
|
format_control = FormatControl(set(), set())
|
||||||
|
return Option(
|
||||||
|
"--only-binary", dest="format_control", action="callback",
|
||||||
|
callback=_handle_only_binary, type="str",
|
||||||
|
default=format_control,
|
||||||
|
help="Do not use source packages. Can be supplied multiple times, and "
|
||||||
|
"each time adds to the existing value. Accepts either :all: to "
|
||||||
|
"disable all source packages, :none: to empty the set, or one or "
|
||||||
|
"more package names with commas between them. Packages without "
|
||||||
|
"binary distributions will fail to install when this option is "
|
||||||
|
"used on them.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
platform = partial(
|
||||||
|
Option,
|
||||||
|
'--platform',
|
||||||
|
dest='platform',
|
||||||
|
metavar='platform',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with <platform>. "
|
||||||
|
"Defaults to the platform of the running system."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
python_version = partial(
|
||||||
|
Option,
|
||||||
|
'--python-version',
|
||||||
|
dest='python_version',
|
||||||
|
metavar='python_version',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with Python "
|
||||||
|
"interpreter version <version>. If not specified, then the "
|
||||||
|
"current system interpreter minor version is used. A major "
|
||||||
|
"version (e.g. '2') can be specified to match all "
|
||||||
|
"minor revs of that major version. A minor version "
|
||||||
|
"(e.g. '34') can also be specified."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
implementation = partial(
|
||||||
|
Option,
|
||||||
|
'--implementation',
|
||||||
|
dest='implementation',
|
||||||
|
metavar='implementation',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with Python "
|
||||||
|
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
||||||
|
" or 'ip'. If not specified, then the current "
|
||||||
|
"interpreter implementation is used. Use 'py' to force "
|
||||||
|
"implementation-agnostic wheels."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
abi = partial(
|
||||||
|
Option,
|
||||||
|
'--abi',
|
||||||
|
dest='abi',
|
||||||
|
metavar='abi',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with Python "
|
||||||
|
"abi <abi>, e.g. 'pypy_41'. If not specified, then the "
|
||||||
|
"current interpreter abi tag is used. Generally "
|
||||||
|
"you will need to specify --implementation, "
|
||||||
|
"--platform, and --python-version when using "
|
||||||
|
"this option."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def prefer_binary():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
"--prefer-binary",
|
||||||
|
dest="prefer_binary",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Prefer older binary packages over newer source packages."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
cache_dir = partial(
|
||||||
|
Option,
|
||||||
|
"--cache-dir",
|
||||||
|
dest="cache_dir",
|
||||||
|
default=USER_CACHE_DIR,
|
||||||
|
metavar="dir",
|
||||||
|
help="Store the cache data in <dir>."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def no_cache_dir_callback(option, opt, value, parser):
|
||||||
|
"""
|
||||||
|
Process a value provided for the --no-cache-dir option.
|
||||||
|
|
||||||
|
This is an optparse.Option callback for the --no-cache-dir option.
|
||||||
|
"""
|
||||||
|
# The value argument will be None if --no-cache-dir is passed via the
|
||||||
|
# command-line, since the option doesn't accept arguments. However,
|
||||||
|
# the value can be non-None if the option is triggered e.g. by an
|
||||||
|
# environment variable, like PIP_NO_CACHE_DIR=true.
|
||||||
|
if value is not None:
|
||||||
|
# Then parse the string value to get argument error-checking.
|
||||||
|
try:
|
||||||
|
strtobool(value)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise_option_error(parser, option=option, msg=str(exc))
|
||||||
|
|
||||||
|
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
||||||
|
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
||||||
|
# rather than enabled (logic would say the latter). Thus, we disable
|
||||||
|
# the cache directory not just on values that parse to True, but (for
|
||||||
|
# backwards compatibility reasons) also on values that parse to False.
|
||||||
|
# In other words, always set it to False if the option is provided in
|
||||||
|
# some (valid) form.
|
||||||
|
parser.values.cache_dir = False
|
||||||
|
|
||||||
|
|
||||||
|
no_cache = partial(
|
||||||
|
Option,
|
||||||
|
"--no-cache-dir",
|
||||||
|
dest="cache_dir",
|
||||||
|
action="callback",
|
||||||
|
callback=no_cache_dir_callback,
|
||||||
|
help="Disable the cache.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_deps = partial(
|
||||||
|
Option,
|
||||||
|
'--no-deps', '--no-dependencies',
|
||||||
|
dest='ignore_dependencies',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Don't install package dependencies.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
build_dir = partial(
|
||||||
|
Option,
|
||||||
|
'-b', '--build', '--build-dir', '--build-directory',
|
||||||
|
dest='build_dir',
|
||||||
|
metavar='dir',
|
||||||
|
help='Directory to unpack packages into and build in. Note that '
|
||||||
|
'an initial build still takes place in a temporary directory. '
|
||||||
|
'The location of temporary directories can be controlled by setting '
|
||||||
|
'the TMPDIR environment variable (TEMP on Windows) appropriately. '
|
||||||
|
'When passed, build directories are not cleaned in case of failures.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
ignore_requires_python = partial(
|
||||||
|
Option,
|
||||||
|
'--ignore-requires-python',
|
||||||
|
dest='ignore_requires_python',
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore the Requires-Python information.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_build_isolation = partial(
|
||||||
|
Option,
|
||||||
|
'--no-build-isolation',
|
||||||
|
dest='build_isolation',
|
||||||
|
action='store_false',
|
||||||
|
default=True,
|
||||||
|
help='Disable isolation when building a modern source distribution. '
|
||||||
|
'Build dependencies specified by PEP 518 must be already installed '
|
||||||
|
'if this option is used.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def no_use_pep517_callback(option, opt, value, parser):
|
||||||
|
"""
|
||||||
|
Process a value provided for the --no-use-pep517 option.
|
||||||
|
|
||||||
|
This is an optparse.Option callback for the no_use_pep517 option.
|
||||||
|
"""
|
||||||
|
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
||||||
|
# will be None if --no-use-pep517 is passed via the command-line.
|
||||||
|
# However, the value can be non-None if the option is triggered e.g.
|
||||||
|
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
||||||
|
if value is not None:
|
||||||
|
msg = """A value was passed for --no-use-pep517,
|
||||||
|
probably using either the PIP_NO_USE_PEP517 environment variable
|
||||||
|
or the "no-use-pep517" config file option. Use an appropriate value
|
||||||
|
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
||||||
|
config file option instead.
|
||||||
|
"""
|
||||||
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
|
# Otherwise, --no-use-pep517 was passed via the command-line.
|
||||||
|
parser.values.use_pep517 = False
|
||||||
|
|
||||||
|
|
||||||
|
use_pep517 = partial(
|
||||||
|
Option,
|
||||||
|
'--use-pep517',
|
||||||
|
dest='use_pep517',
|
||||||
|
action='store_true',
|
||||||
|
default=None,
|
||||||
|
help='Use PEP 517 for building source distributions '
|
||||||
|
'(use --no-use-pep517 to force legacy behaviour).'
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
no_use_pep517 = partial(
|
||||||
|
Option,
|
||||||
|
'--no-use-pep517',
|
||||||
|
dest='use_pep517',
|
||||||
|
action='callback',
|
||||||
|
callback=no_use_pep517_callback,
|
||||||
|
default=None,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
install_options = partial(
|
||||||
|
Option,
|
||||||
|
'--install-option',
|
||||||
|
dest='install_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra arguments to be supplied to the setup.py install "
|
||||||
|
"command (use like --install-option=\"--install-scripts=/usr/local/"
|
||||||
|
"bin\"). Use multiple --install-option options to pass multiple "
|
||||||
|
"options to setup.py install. If you are using an option with a "
|
||||||
|
"directory path, be sure to use absolute path.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
global_options = partial(
|
||||||
|
Option,
|
||||||
|
'--global-option',
|
||||||
|
dest='global_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra global options to be supplied to the setup.py "
|
||||||
|
"call before the install command.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_clean = partial(
|
||||||
|
Option,
|
||||||
|
'--no-clean',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Don't clean up build directories."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
pre = partial(
|
||||||
|
Option,
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
disable_pip_version_check = partial(
|
||||||
|
Option,
|
||||||
|
"--disable-pip-version-check",
|
||||||
|
dest="disable_pip_version_check",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Don't periodically check PyPI to determine whether a new version "
|
||||||
|
"of pip is available for download. Implied with --no-index.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
# Deprecated, Remove later
|
||||||
|
always_unzip = partial(
|
||||||
|
Option,
|
||||||
|
'-Z', '--always-unzip',
|
||||||
|
dest='always_unzip',
|
||||||
|
action='store_true',
|
||||||
|
help=SUPPRESS_HELP,
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _merge_hash(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
"""Given a value spelled "algo:digest", append the digest to a list
|
||||||
|
pointed to in a dict by the algo name."""
|
||||||
|
if not parser.values.hashes:
|
||||||
|
parser.values.hashes = {} # type: ignore
|
||||||
|
try:
|
||||||
|
algo, digest = value.split(':', 1)
|
||||||
|
except ValueError:
|
||||||
|
parser.error('Arguments to %s must be a hash name '
|
||||||
|
'followed by a value, like --hash=sha256:abcde...' %
|
||||||
|
opt_str)
|
||||||
|
if algo not in STRONG_HASHES:
|
||||||
|
parser.error('Allowed hash algorithms for %s are %s.' %
|
||||||
|
(opt_str, ', '.join(STRONG_HASHES)))
|
||||||
|
parser.values.hashes.setdefault(algo, []).append(digest)
|
||||||
|
|
||||||
|
|
||||||
|
hash = partial(
|
||||||
|
Option,
|
||||||
|
'--hash',
|
||||||
|
# Hash values eventually end up in InstallRequirement.hashes due to
|
||||||
|
# __dict__ copying in process_line().
|
||||||
|
dest='hashes',
|
||||||
|
action='callback',
|
||||||
|
callback=_merge_hash,
|
||||||
|
type='string',
|
||||||
|
help="Verify that the package's archive matches this "
|
||||||
|
'hash before installing. Example: --hash=sha256:abcdef...',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
require_hashes = partial(
|
||||||
|
Option,
|
||||||
|
'--require-hashes',
|
||||||
|
dest='require_hashes',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Require a hash to check each requirement against, for '
|
||||||
|
'repeatable installs. This option is implied when any package in a '
|
||||||
|
'requirements file has a --hash option.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
##########
|
||||||
|
# groups #
|
||||||
|
##########
|
||||||
|
|
||||||
|
general_group = {
|
||||||
|
'name': 'General Options',
|
||||||
|
'options': [
|
||||||
|
help_,
|
||||||
|
isolated_mode,
|
||||||
|
require_virtualenv,
|
||||||
|
verbose,
|
||||||
|
version,
|
||||||
|
quiet,
|
||||||
|
log,
|
||||||
|
no_input,
|
||||||
|
proxy,
|
||||||
|
retries,
|
||||||
|
timeout,
|
||||||
|
skip_requirements_regex,
|
||||||
|
exists_action,
|
||||||
|
trusted_host,
|
||||||
|
cert,
|
||||||
|
client_cert,
|
||||||
|
cache_dir,
|
||||||
|
no_cache,
|
||||||
|
disable_pip_version_check,
|
||||||
|
no_color,
|
||||||
|
]
|
||||||
|
} # type: Dict[str, Any]
|
||||||
|
|
||||||
|
index_group = {
|
||||||
|
'name': 'Package Index Options',
|
||||||
|
'options': [
|
||||||
|
index_url,
|
||||||
|
extra_index_url,
|
||||||
|
no_index,
|
||||||
|
find_links,
|
||||||
|
]
|
||||||
|
} # type: Dict[str, Any]
|
|
@ -0,0 +1,104 @@
|
||||||
|
"""A single place for constructing and exposing the main parser
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip import __version__
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.parser import (
|
||||||
|
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
from pip._internal.commands import (
|
||||||
|
commands_dict, get_similar_commands, get_summaries,
|
||||||
|
)
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.utils.misc import get_prog
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Tuple, List # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["create_main_parser", "parse_command"]
|
||||||
|
|
||||||
|
|
||||||
|
def create_main_parser():
|
||||||
|
# type: () -> ConfigOptionParser
|
||||||
|
"""Creates and returns the main parser for pip's CLI
|
||||||
|
"""
|
||||||
|
|
||||||
|
parser_kw = {
|
||||||
|
'usage': '\n%prog <command> [options]',
|
||||||
|
'add_help_option': False,
|
||||||
|
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||||
|
'name': 'global',
|
||||||
|
'prog': get_prog(),
|
||||||
|
}
|
||||||
|
|
||||||
|
parser = ConfigOptionParser(**parser_kw)
|
||||||
|
parser.disable_interspersed_args()
|
||||||
|
|
||||||
|
pip_pkg_dir = os.path.abspath(os.path.join(
|
||||||
|
os.path.dirname(__file__), "..", "..",
|
||||||
|
))
|
||||||
|
parser.version = 'pip %s from %s (python %s)' % (
|
||||||
|
__version__, pip_pkg_dir, sys.version[:3],
|
||||||
|
)
|
||||||
|
|
||||||
|
# add the general options
|
||||||
|
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||||
|
parser.add_option_group(gen_opts)
|
||||||
|
|
||||||
|
# so the help formatter knows
|
||||||
|
parser.main = True # type: ignore
|
||||||
|
|
||||||
|
# create command listing for description
|
||||||
|
command_summaries = get_summaries()
|
||||||
|
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
|
||||||
|
parser.description = '\n'.join(description)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def parse_command(args):
|
||||||
|
# type: (List[str]) -> Tuple[str, List[str]]
|
||||||
|
parser = create_main_parser()
|
||||||
|
|
||||||
|
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||||
|
# call is to split the initial args into the general options before the
|
||||||
|
# subcommand and everything else.
|
||||||
|
# For example:
|
||||||
|
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||||
|
# general_options: ['--timeout==5']
|
||||||
|
# args_else: ['install', '--user', 'INITools']
|
||||||
|
general_options, args_else = parser.parse_args(args)
|
||||||
|
|
||||||
|
# --version
|
||||||
|
if general_options.version:
|
||||||
|
sys.stdout.write(parser.version) # type: ignore
|
||||||
|
sys.stdout.write(os.linesep)
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# pip || pip help -> print_help()
|
||||||
|
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# the subcommand name
|
||||||
|
cmd_name = args_else[0]
|
||||||
|
|
||||||
|
if cmd_name not in commands_dict:
|
||||||
|
guess = get_similar_commands(cmd_name)
|
||||||
|
|
||||||
|
msg = ['unknown command "%s"' % cmd_name]
|
||||||
|
if guess:
|
||||||
|
msg.append('maybe you meant "%s"' % guess)
|
||||||
|
|
||||||
|
raise CommandError(' - '.join(msg))
|
||||||
|
|
||||||
|
# all the args without the subcommand
|
||||||
|
cmd_args = args[:]
|
||||||
|
cmd_args.remove(cmd_name)
|
||||||
|
|
||||||
|
return cmd_name, cmd_args
|
|
@ -0,0 +1,261 @@
|
||||||
|
"""Base option parser setup"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import optparse
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
|
from pip._vendor.six import string_types
|
||||||
|
|
||||||
|
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||||
|
from pip._internal.configuration import Configuration, ConfigurationError
|
||||||
|
from pip._internal.utils.compat import get_terminal_size
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
|
"""A prettier/less verbose help formatter for optparse."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# help position must be aligned with __init__.parseopts.description
|
||||||
|
kwargs['max_help_position'] = 30
|
||||||
|
kwargs['indent_increment'] = 1
|
||||||
|
kwargs['width'] = get_terminal_size()[0] - 2
|
||||||
|
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def format_option_strings(self, option):
|
||||||
|
return self._format_option_strings(option, ' <%s>', ', ')
|
||||||
|
|
||||||
|
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
|
||||||
|
"""
|
||||||
|
Return a comma-separated list of option strings and metavars.
|
||||||
|
|
||||||
|
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||||
|
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
|
||||||
|
:param optsep: separator
|
||||||
|
"""
|
||||||
|
opts = []
|
||||||
|
|
||||||
|
if option._short_opts:
|
||||||
|
opts.append(option._short_opts[0])
|
||||||
|
if option._long_opts:
|
||||||
|
opts.append(option._long_opts[0])
|
||||||
|
if len(opts) > 1:
|
||||||
|
opts.insert(1, optsep)
|
||||||
|
|
||||||
|
if option.takes_value():
|
||||||
|
metavar = option.metavar or option.dest.lower()
|
||||||
|
opts.append(mvarfmt % metavar.lower())
|
||||||
|
|
||||||
|
return ''.join(opts)
|
||||||
|
|
||||||
|
def format_heading(self, heading):
|
||||||
|
if heading == 'Options':
|
||||||
|
return ''
|
||||||
|
return heading + ':\n'
|
||||||
|
|
||||||
|
def format_usage(self, usage):
|
||||||
|
"""
|
||||||
|
Ensure there is only one newline between usage and the first heading
|
||||||
|
if there is no description.
|
||||||
|
"""
|
||||||
|
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def format_description(self, description):
|
||||||
|
# leave full control over description to us
|
||||||
|
if description:
|
||||||
|
if hasattr(self.parser, 'main'):
|
||||||
|
label = 'Commands'
|
||||||
|
else:
|
||||||
|
label = 'Description'
|
||||||
|
# some doc strings have initial newlines, some don't
|
||||||
|
description = description.lstrip('\n')
|
||||||
|
# some doc strings have final newlines and spaces, some don't
|
||||||
|
description = description.rstrip()
|
||||||
|
# dedent, then reindent
|
||||||
|
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||||
|
description = '%s:\n%s\n' % (label, description)
|
||||||
|
return description
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def format_epilog(self, epilog):
|
||||||
|
# leave full control over epilog to us
|
||||||
|
if epilog:
|
||||||
|
return epilog
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def indent_lines(self, text, indent):
|
||||||
|
new_lines = [indent + line for line in text.split('\n')]
|
||||||
|
return "\n".join(new_lines)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||||
|
"""Custom help formatter for use in ConfigOptionParser.
|
||||||
|
|
||||||
|
This is updates the defaults before expanding them, allowing
|
||||||
|
them to show up correctly in the help listing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def expand_default(self, option):
|
||||||
|
if self.parser is not None:
|
||||||
|
self.parser._update_defaults(self.parser.defaults)
|
||||||
|
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
||||||
|
|
||||||
|
|
||||||
|
class CustomOptionParser(optparse.OptionParser):
|
||||||
|
|
||||||
|
def insert_option_group(self, idx, *args, **kwargs):
|
||||||
|
"""Insert an OptionGroup at a given position."""
|
||||||
|
group = self.add_option_group(*args, **kwargs)
|
||||||
|
|
||||||
|
self.option_groups.pop()
|
||||||
|
self.option_groups.insert(idx, group)
|
||||||
|
|
||||||
|
return group
|
||||||
|
|
||||||
|
@property
|
||||||
|
def option_list_all(self):
|
||||||
|
"""Get a list of all options, including those in option groups."""
|
||||||
|
res = self.option_list[:]
|
||||||
|
for i in self.option_groups:
|
||||||
|
res.extend(i.option_list)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigOptionParser(CustomOptionParser):
|
||||||
|
"""Custom option parser which updates its defaults by checking the
|
||||||
|
configuration files and environmental variables"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.name = kwargs.pop('name')
|
||||||
|
|
||||||
|
isolated = kwargs.pop("isolated", False)
|
||||||
|
self.config = Configuration(isolated)
|
||||||
|
|
||||||
|
assert self.name
|
||||||
|
optparse.OptionParser.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def check_default(self, option, key, val):
|
||||||
|
try:
|
||||||
|
return option.check_value(key, val)
|
||||||
|
except optparse.OptionValueError as exc:
|
||||||
|
print("An error occurred during configuration: %s" % exc)
|
||||||
|
sys.exit(3)
|
||||||
|
|
||||||
|
def _get_ordered_configuration_items(self):
|
||||||
|
# Configuration gives keys in an unordered manner. Order them.
|
||||||
|
override_order = ["global", self.name, ":env:"]
|
||||||
|
|
||||||
|
# Pool the options into different groups
|
||||||
|
section_items = {name: [] for name in override_order}
|
||||||
|
for section_key, val in self.config.items():
|
||||||
|
# ignore empty values
|
||||||
|
if not val:
|
||||||
|
logger.debug(
|
||||||
|
"Ignoring configuration key '%s' as it's value is empty.",
|
||||||
|
section_key
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
section, key = section_key.split(".", 1)
|
||||||
|
if section in override_order:
|
||||||
|
section_items[section].append((key, val))
|
||||||
|
|
||||||
|
# Yield each group in their override order
|
||||||
|
for section in override_order:
|
||||||
|
for key, val in section_items[section]:
|
||||||
|
yield key, val
|
||||||
|
|
||||||
|
def _update_defaults(self, defaults):
|
||||||
|
"""Updates the given defaults with values from the config files and
|
||||||
|
the environ. Does a little special handling for certain types of
|
||||||
|
options (lists)."""
|
||||||
|
|
||||||
|
# Accumulate complex default state.
|
||||||
|
self.values = optparse.Values(self.defaults)
|
||||||
|
late_eval = set()
|
||||||
|
# Then set the options with those values
|
||||||
|
for key, val in self._get_ordered_configuration_items():
|
||||||
|
# '--' because configuration supports only long names
|
||||||
|
option = self.get_option('--' + key)
|
||||||
|
|
||||||
|
# Ignore options not present in this parser. E.g. non-globals put
|
||||||
|
# in [global] by users that want them to apply to all applicable
|
||||||
|
# commands.
|
||||||
|
if option is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if option.action in ('store_true', 'store_false', 'count'):
|
||||||
|
try:
|
||||||
|
val = strtobool(val)
|
||||||
|
except ValueError:
|
||||||
|
error_msg = invalid_config_error_message(
|
||||||
|
option.action, key, val
|
||||||
|
)
|
||||||
|
self.error(error_msg)
|
||||||
|
|
||||||
|
elif option.action == 'append':
|
||||||
|
val = val.split()
|
||||||
|
val = [self.check_default(option, key, v) for v in val]
|
||||||
|
elif option.action == 'callback':
|
||||||
|
late_eval.add(option.dest)
|
||||||
|
opt_str = option.get_opt_string()
|
||||||
|
val = option.convert_value(opt_str, val)
|
||||||
|
# From take_action
|
||||||
|
args = option.callback_args or ()
|
||||||
|
kwargs = option.callback_kwargs or {}
|
||||||
|
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||||
|
else:
|
||||||
|
val = self.check_default(option, key, val)
|
||||||
|
|
||||||
|
defaults[option.dest] = val
|
||||||
|
|
||||||
|
for key in late_eval:
|
||||||
|
defaults[key] = getattr(self.values, key)
|
||||||
|
self.values = None
|
||||||
|
return defaults
|
||||||
|
|
||||||
|
def get_default_values(self):
|
||||||
|
"""Overriding to make updating the defaults after instantiation of
|
||||||
|
the option parser possible, _update_defaults() does the dirty work."""
|
||||||
|
if not self.process_default_values:
|
||||||
|
# Old, pre-Optik 1.5 behaviour.
|
||||||
|
return optparse.Values(self.defaults)
|
||||||
|
|
||||||
|
# Load the configuration, or error out in case of an error
|
||||||
|
try:
|
||||||
|
self.config.load()
|
||||||
|
except ConfigurationError as err:
|
||||||
|
self.exit(UNKNOWN_ERROR, str(err))
|
||||||
|
|
||||||
|
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||||
|
for option in self._get_all_options():
|
||||||
|
default = defaults.get(option.dest)
|
||||||
|
if isinstance(default, string_types):
|
||||||
|
opt_str = option.get_opt_string()
|
||||||
|
defaults[option.dest] = option.check_value(opt_str, default)
|
||||||
|
return optparse.Values(defaults)
|
||||||
|
|
||||||
|
def error(self, msg):
|
||||||
|
self.print_usage(sys.stderr)
|
||||||
|
self.exit(UNKNOWN_ERROR, "%s\n" % msg)
|
||||||
|
|
||||||
|
|
||||||
|
def invalid_config_error_message(action, key, val):
|
||||||
|
"""Returns a better error message when invalid configuration option
|
||||||
|
is provided."""
|
||||||
|
if action in ('store_true', 'store_false'):
|
||||||
|
return ("{0} is not a valid value for {1} option, "
|
||||||
|
"please specify a boolean value like yes/no, "
|
||||||
|
"true/false or 1/0 instead.").format(val, key)
|
||||||
|
|
||||||
|
return ("{0} is not a valid value for {1} option, "
|
||||||
|
"please specify a numerical value like 1/0 "
|
||||||
|
"instead.").format(val, key)
|
|
@ -0,0 +1,8 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
SUCCESS = 0
|
||||||
|
ERROR = 1
|
||||||
|
UNKNOWN_ERROR = 2
|
||||||
|
VIRTUALENV_NOT_FOUND = 3
|
||||||
|
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||||
|
NO_MATCHES_FOUND = 23
|
|
@ -0,0 +1,79 @@
|
||||||
|
"""
|
||||||
|
Package containing all pip commands
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from pip._internal.commands.completion import CompletionCommand
|
||||||
|
from pip._internal.commands.configuration import ConfigurationCommand
|
||||||
|
from pip._internal.commands.download import DownloadCommand
|
||||||
|
from pip._internal.commands.freeze import FreezeCommand
|
||||||
|
from pip._internal.commands.hash import HashCommand
|
||||||
|
from pip._internal.commands.help import HelpCommand
|
||||||
|
from pip._internal.commands.list import ListCommand
|
||||||
|
from pip._internal.commands.check import CheckCommand
|
||||||
|
from pip._internal.commands.search import SearchCommand
|
||||||
|
from pip._internal.commands.show import ShowCommand
|
||||||
|
from pip._internal.commands.install import InstallCommand
|
||||||
|
from pip._internal.commands.uninstall import UninstallCommand
|
||||||
|
from pip._internal.commands.wheel import WheelCommand
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Type # noqa: F401
|
||||||
|
from pip._internal.cli.base_command import Command # noqa: F401
|
||||||
|
|
||||||
|
commands_order = [
|
||||||
|
InstallCommand,
|
||||||
|
DownloadCommand,
|
||||||
|
UninstallCommand,
|
||||||
|
FreezeCommand,
|
||||||
|
ListCommand,
|
||||||
|
ShowCommand,
|
||||||
|
CheckCommand,
|
||||||
|
ConfigurationCommand,
|
||||||
|
SearchCommand,
|
||||||
|
WheelCommand,
|
||||||
|
HashCommand,
|
||||||
|
CompletionCommand,
|
||||||
|
HelpCommand,
|
||||||
|
] # type: List[Type[Command]]
|
||||||
|
|
||||||
|
commands_dict = {c.name: c for c in commands_order}
|
||||||
|
|
||||||
|
|
||||||
|
def get_summaries(ordered=True):
|
||||||
|
"""Yields sorted (command name, command summary) tuples."""
|
||||||
|
|
||||||
|
if ordered:
|
||||||
|
cmditems = _sort_commands(commands_dict, commands_order)
|
||||||
|
else:
|
||||||
|
cmditems = commands_dict.items()
|
||||||
|
|
||||||
|
for name, command_class in cmditems:
|
||||||
|
yield (name, command_class.summary)
|
||||||
|
|
||||||
|
|
||||||
|
def get_similar_commands(name):
|
||||||
|
"""Command name auto-correct."""
|
||||||
|
from difflib import get_close_matches
|
||||||
|
|
||||||
|
name = name.lower()
|
||||||
|
|
||||||
|
close_commands = get_close_matches(name, commands_dict.keys())
|
||||||
|
|
||||||
|
if close_commands:
|
||||||
|
return close_commands[0]
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _sort_commands(cmddict, order):
|
||||||
|
def keyfn(key):
|
||||||
|
try:
|
||||||
|
return order.index(key[1])
|
||||||
|
except ValueError:
|
||||||
|
# unordered items should come last
|
||||||
|
return 0xff
|
||||||
|
|
||||||
|
return sorted(cmddict.items(), key=keyfn)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue