Mock Version: 5.5 Mock Version: 5.5 Mock Version: 5.5 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-requests-unixsocket.spec'], chrootPath='/var/lib/mock/dist-an23.3-build-430255-73048/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=990gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '23c18e715d2d4b9ab0022a96178b738b', '-D', '/var/lib/mock/dist-an23.3-build-430255-73048/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-requests-unixsocket.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1745193600 Wrote: /builddir/build/SRPMS/python-requests-unixsocket-0.3.0-1.an23.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-requests-unixsocket.spec'], chrootPath='/var/lib/mock/dist-an23.3-build-430255-73048/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=990gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'a028a099c1ce4b378dc129e74fbbd33c', '-D', '/var/lib/mock/dist-an23.3-build-430255-73048/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-requests-unixsocket.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1745193600 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.NeoM6u + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf requests-unixsocket-0.3.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/requests-unixsocket-0.3.0.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd requests-unixsocket-0.3.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + sed -i 1d requests_unixsocket/tests/test_requests_unixsocket.py + sed -i 1d setup.py + rm pytest.ini + sed -i /pytest-pep8/d test-requirements.txt + sed -i /pytest-capturelog/d test-requirements.txt + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.Ga5QAQ + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/anolis/anolis-hardened-ld -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd requests-unixsocket-0.3.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/anolis/anolis-hardened-ld -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -Wl,--build-id=sha1 ' + /usr/bin/python3 setup.py build '--executable=/usr/bin/python3 -sP' /usr/lib/python3.11/site-packages/setuptools/__init__.py:84: _DeprecatedInstaller: setuptools.installer and fetch_build_eggs are deprecated. !! ******************************************************************************** Requirements should be satisfied by a PEP 517 installer. If you are using pip, you can try `pip install --use-pep517`. ******************************************************************************** !! dist.fetch_build_eggs(dist.setup_requires) WARNING: The wheel package is not available. /usr/lib/python3.11/site-packages/setuptools/command/develop.py:40: EasyInstallDeprecationWarning: easy_install command is deprecated. !! ******************************************************************************** Please avoid running ``setup.py`` and ``easy_install``. Instead, use pypa/build, pypa/installer or other standards-based tools. See https://github.com/pypa/setuptools/issues/917 for details. ******************************************************************************** !! easy_install.initialize_options(self) /usr/lib/python3.11/site-packages/setuptools/_distutils/cmd.py:66: SetuptoolsDeprecationWarning: setup.py install is deprecated. !! ******************************************************************************** Please avoid running ``setup.py`` directly. Instead, use pypa/build, pypa/installer or other standards-based tools. See https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html for details. ******************************************************************************** !! self.initialize_options() running build running build_py creating build creating build/lib creating build/lib/requests_unixsocket copying requests_unixsocket/__init__.py -> build/lib/requests_unixsocket copying requests_unixsocket/adapters.py -> build/lib/requests_unixsocket copying requests_unixsocket/testutils.py -> build/lib/requests_unixsocket running egg_info writing requests_unixsocket.egg-info/PKG-INFO writing dependency_links to requests_unixsocket.egg-info/dependency_links.txt writing requirements to requests_unixsocket.egg-info/requires.txt writing top-level names to requests_unixsocket.egg-info/top_level.txt [pbr] Reusing existing SOURCES.txt /usr/lib/python3.11/site-packages/setuptools/command/build_py.py:201: _Warning: Package 'requests_unixsocket.tests' is absent from the `packages` configuration. !! ******************************************************************************** ############################ # Package would be ignored # ############################ Python recognizes 'requests_unixsocket.tests' as an importable package[^1], but it is absent from setuptools' `packages` configuration. This leads to an ambiguous overall configuration. If you want to distribute this package, please make sure that 'requests_unixsocket.tests' is explicitly added to the `packages` configuration field. Alternatively, you can also rely on setuptools' discovery methods (for example by using `find_namespace_packages(...)`/`find_namespace:` instead of `find_packages(...)`/`find:`). You can read more about "package discovery" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html If you don't want 'requests_unixsocket.tests' to be distributed and are already explicitly excluding 'requests_unixsocket.tests' via `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`, you can try to use `exclude_package_data`, or `include-package-data=False` in combination with a more fine grained `package-data` configuration. You can read more about "package data files" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/datafiles.html [^1]: For Python, any directory (with suitable naming) can be imported, even if it does not contain any `.py` files. On the other hand, currently there is no concept of package data directory, all directories are treated like packages. ******************************************************************************** !! check.warn(importable) creating build/lib/requests_unixsocket/tests copying requests_unixsocket/tests/test_requests_unixsocket.py -> build/lib/requests_unixsocket/tests + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.ZmZRd9 + umask 022 + cd /builddir/build/BUILD + '[' /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch '!=' / ']' + rm -rf /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch ++ dirname /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch + mkdir -p /builddir/build/BUILDROOT + mkdir /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/anolis/anolis-hardened-ld -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd requests-unixsocket-0.3.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/anolis/anolis-hardened-ld -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -Wl,--build-id=sha1 ' + /usr/bin/python3 setup.py install -O1 --skip-build --root /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch --prefix /usr /usr/lib/python3.11/site-packages/setuptools/__init__.py:84: _DeprecatedInstaller: setuptools.installer and fetch_build_eggs are deprecated. !! ******************************************************************************** Requirements should be satisfied by a PEP 517 installer. If you are using pip, you can try `pip install --use-pep517`. ******************************************************************************** !! dist.fetch_build_eggs(dist.setup_requires) WARNING: The wheel package is not available. /usr/lib/python3.11/site-packages/setuptools/command/develop.py:40: EasyInstallDeprecationWarning: easy_install command is deprecated. !! ******************************************************************************** Please avoid running ``setup.py`` and ``easy_install``. Instead, use pypa/build, pypa/installer or other standards-based tools. See https://github.com/pypa/setuptools/issues/917 for details. ******************************************************************************** !! easy_install.initialize_options(self) /usr/lib/python3.11/site-packages/setuptools/_distutils/cmd.py:66: SetuptoolsDeprecationWarning: setup.py install is deprecated. !! ******************************************************************************** Please avoid running ``setup.py`` directly. Instead, use pypa/build, pypa/installer or other standards-based tools. See https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html for details. ******************************************************************************** !! self.initialize_options() running install [pbr] Generating AUTHORS [pbr] AUTHORS complete (0.0s) running install_lib creating /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr creating /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib creating /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11 creating /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages creating /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket copying build/lib/requests_unixsocket/__init__.py -> /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket copying build/lib/requests_unixsocket/adapters.py -> /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket copying build/lib/requests_unixsocket/testutils.py -> /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket creating /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket/tests copying build/lib/requests_unixsocket/tests/test_requests_unixsocket.py -> /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket/tests byte-compiling /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket/adapters.py to adapters.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket/testutils.py to testutils.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket/tests/test_requests_unixsocket.py to test_requests_unixsocket.cpython-311.pyc writing byte-compilation script '/tmp/tmpnoi1l1r5.py' /usr/bin/python3 /tmp/tmpnoi1l1r5.py removing /tmp/tmpnoi1l1r5.py running install_egg_info running egg_info writing requests_unixsocket.egg-info/PKG-INFO writing dependency_links to requests_unixsocket.egg-info/dependency_links.txt writing requirements to requests_unixsocket.egg-info/requires.txt writing top-level names to requests_unixsocket.egg-info/top_level.txt [pbr] Reusing existing SOURCES.txt Copying requests_unixsocket.egg-info to /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11/site-packages/requests_unixsocket-0.3.0-py3.11.egg-info running install_scripts + rm -rfv /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/bin/__pycache__ + /usr/bin/find-debuginfo -j80 --strict-build-id -m -i --build-id-seed 0.3.0-1.an23 --unique-debug-suffix -0.3.0-1.an23.noarch --unique-debug-src-base python-requests-unixsocket-0.3.0-1.an23.noarch --run-dwz --dwz-low-mem-die-limit 10000000 --dwz-max-die-limit 50000000 -S debugsourcefiles.list /builddir/build/BUILD/requests-unixsocket-0.3.0 find: 'debug': No such file or directory + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/anolis/brp-ldconfig + COMPRESS='zstd -f --rm -19 -T0' + COMPRESS_EXT=.zst + /usr/lib/rpm/brp-compress + /usr/lib/rpm/anolis/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/brp-remove-la-files + /usr/lib/rpm/anolis/clean_perl + /usr/lib/rpm/anolis/check_elf_files + /usr/lib/rpm/anolis/brp-mangle-shebangs *** WARNING: ./usr/lib/python3.11/site-packages/requests_unixsocket/tests/test_requests_unixsocket.py is executable but has no shebang, removing executable bit + /usr/lib/rpm/anolis/remove-info-dir + /usr/lib/rpm/anolis/check-desktop-files + env -u SOURCE_DATE_EPOCH /usr/lib/rpm/anolis/brp-python-bytecompile '' 1 0 -j80 Bytecompiling .py files below /builddir/build/BUILDROOT/python-requests-unixsocket-0.3.0-1.an23.noarch/usr/lib/python3.11 using python3.11 Not clamping source mtimes, $SOURCE_DATE_EPOCH not set + /usr/lib/rpm/anolis/brp-python-hardlink Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.DW6Emm + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/anolis/anolis-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/anolis/anolis-hardened-ld -specs=/usr/lib/rpm/anolis/anolis-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd requests-unixsocket-0.3.0 + /usr/bin/python3 -m pytest -v ============================= test session starts ============================== platform linux -- Python 3.11.6, pytest-7.3.1, pluggy-1.0.0 -- /usr/bin/python3 cachedir: .pytest_cache rootdir: /builddir/build/BUILD/requests-unixsocket-0.3.0 collecting ... collected 5 items requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_ok FAILED [ 20%] requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_url_with_query_params FAILED [ 40%] requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_connection_error FAILED [ 60%] requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_connection_proxies_error FAILED [ 80%] requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_monkeypatch FAILED [100%] =================================== FAILURES =================================== _________________________ test_unix_domain_adapter_ok __________________________ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: > conn = self._get_connection(request, verify, proxies=proxies, cert=cert) /usr/lib/python3.11/site-packages/requests/adapters.py:532: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , verify = True, proxies = OrderedDict() cert = None def _get_connection(self, request, verify, proxies=None, cert=None): # Replace the existing get_connection without breaking things and # ensure that TLS settings are considered when we interact with # urllib3 HTTP Pools proxy = select_proxy(request.url, proxies) try: host_params, pool_kwargs = _urllib3_request_context(request, verify, cert) except ValueError as e: raise InvalidURL(e, request=request) if proxy: proxy = prepend_scheme_if_needed(proxy, "http") proxy_url = parse_url(proxy) if not proxy_url.host: raise InvalidProxyURL( "Please check proxy URL. It is malformed " "and could be missing the host." ) proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) else: # Only scheme should be lower case > conn = self.poolmanager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) /usr/lib/python3.11/site-packages/requests/adapters.py:400: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = host = '%2Ftmp%2Ftest_requests.1426325333_325_6000dafc', port = 80 scheme = 'http+unix' pool_kwargs = {'cert_reqs': 'CERT_REQUIRED', 'ssl_context': } def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context["scheme"] = scheme or "http" if not port: port = port_by_scheme.get(request_context["scheme"].lower(), 80) request_context["port"] = port request_context["host"] = host > return self.connection_from_context(request_context) /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:246: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request_context = {'block': False, 'cert_reqs': 'CERT_REQUIRED', 'host': '%2Ftmp%2Ftest_requests.1426325333_325_6000dafc', 'maxsize': 10, ...} def connection_from_context(self, request_context): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme.get(scheme) if not pool_key_constructor: > raise URLSchemeUnknown(scheme) E urllib3.exceptions.URLSchemeUnknown: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:258: URLSchemeUnknown During handling of the above exception, another exception occurred: def test_unix_domain_adapter_ok(): with UnixSocketServerThread() as usock_thread: session = requests_unixsocket.Session('http+unix://') urlencoded_usock = requests.compat.quote_plus(usock_thread.usock) url = 'http+unix://%s/path/to/page' % urlencoded_usock for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: logger.debug('Calling session.%s(%r) ...', method, url) > r = getattr(session, method)(url) requests_unixsocket/tests/test_requests_unixsocket.py:26: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib/python3.11/site-packages/requests/sessions.py:602: in get return self.request("GET", url, **kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:589: in request resp = self.send(prep, **send_kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:703: in send r = adapter.send(request, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self._get_connection(request, verify, proxies=proxies, cert=cert) except LocationValueError as e: > raise InvalidURL(e, request=request) E requests.exceptions.InvalidURL: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/requests/adapters.py:534: InvalidURL ________________ test_unix_domain_adapter_url_with_query_params ________________ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: > conn = self._get_connection(request, verify, proxies=proxies, cert=cert) /usr/lib/python3.11/site-packages/requests/adapters.py:532: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , verify = True, proxies = OrderedDict() cert = None def _get_connection(self, request, verify, proxies=None, cert=None): # Replace the existing get_connection without breaking things and # ensure that TLS settings are considered when we interact with # urllib3 HTTP Pools proxy = select_proxy(request.url, proxies) try: host_params, pool_kwargs = _urllib3_request_context(request, verify, cert) except ValueError as e: raise InvalidURL(e, request=request) if proxy: proxy = prepend_scheme_if_needed(proxy, "http") proxy_url = parse_url(proxy) if not proxy_url.host: raise InvalidProxyURL( "Please check proxy URL. It is malformed " "and could be missing the host." ) proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) else: # Only scheme should be lower case > conn = self.poolmanager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) /usr/lib/python3.11/site-packages/requests/adapters.py:400: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = host = '%2Ftmp%2Ftest_requests.1426325333_325_afe4c328', port = 80 scheme = 'http+unix' pool_kwargs = {'cert_reqs': 'CERT_REQUIRED', 'ssl_context': } def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context["scheme"] = scheme or "http" if not port: port = port_by_scheme.get(request_context["scheme"].lower(), 80) request_context["port"] = port request_context["host"] = host > return self.connection_from_context(request_context) /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:246: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request_context = {'block': False, 'cert_reqs': 'CERT_REQUIRED', 'host': '%2Ftmp%2Ftest_requests.1426325333_325_afe4c328', 'maxsize': 10, ...} def connection_from_context(self, request_context): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme.get(scheme) if not pool_key_constructor: > raise URLSchemeUnknown(scheme) E urllib3.exceptions.URLSchemeUnknown: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:258: URLSchemeUnknown During handling of the above exception, another exception occurred: def test_unix_domain_adapter_url_with_query_params(): with UnixSocketServerThread() as usock_thread: session = requests_unixsocket.Session('http+unix://') urlencoded_usock = requests.compat.quote_plus(usock_thread.usock) url = ('http+unix://%s' '/containers/nginx/logs?timestamp=true' % urlencoded_usock) for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: logger.debug('Calling session.%s(%r) ...', method, url) > r = getattr(session, method)(url) requests_unixsocket/tests/test_requests_unixsocket.py:53: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib/python3.11/site-packages/requests/sessions.py:602: in get return self.request("GET", url, **kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:589: in request resp = self.send(prep, **send_kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:703: in send r = adapter.send(request, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self._get_connection(request, verify, proxies=proxies, cert=cert) except LocationValueError as e: > raise InvalidURL(e, request=request) E requests.exceptions.InvalidURL: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/requests/adapters.py:534: InvalidURL __________________ test_unix_domain_adapter_connection_error ___________________ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: > conn = self._get_connection(request, verify, proxies=proxies, cert=cert) /usr/lib/python3.11/site-packages/requests/adapters.py:532: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , verify = True, proxies = OrderedDict() cert = None def _get_connection(self, request, verify, proxies=None, cert=None): # Replace the existing get_connection without breaking things and # ensure that TLS settings are considered when we interact with # urllib3 HTTP Pools proxy = select_proxy(request.url, proxies) try: host_params, pool_kwargs = _urllib3_request_context(request, verify, cert) except ValueError as e: raise InvalidURL(e, request=request) if proxy: proxy = prepend_scheme_if_needed(proxy, "http") proxy_url = parse_url(proxy) if not proxy_url.host: raise InvalidProxyURL( "Please check proxy URL. It is malformed " "and could be missing the host." ) proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) else: # Only scheme should be lower case > conn = self.poolmanager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) /usr/lib/python3.11/site-packages/requests/adapters.py:400: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = host = 'socket_does_not_exist', port = 80, scheme = 'http+unix' pool_kwargs = {'cert_reqs': 'CERT_REQUIRED', 'ssl_context': } def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context["scheme"] = scheme or "http" if not port: port = port_by_scheme.get(request_context["scheme"].lower(), 80) request_context["port"] = port request_context["host"] = host > return self.connection_from_context(request_context) /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:246: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request_context = {'block': False, 'cert_reqs': 'CERT_REQUIRED', 'host': 'socket_does_not_exist', 'maxsize': 10, ...} def connection_from_context(self, request_context): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme.get(scheme) if not pool_key_constructor: > raise URLSchemeUnknown(scheme) E urllib3.exceptions.URLSchemeUnknown: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:258: URLSchemeUnknown During handling of the above exception, another exception occurred: def test_unix_domain_adapter_connection_error(): session = requests_unixsocket.Session('http+unix://') for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: with pytest.raises(requests.ConnectionError): > getattr(session, method)( 'http+unix://socket_does_not_exist/path/to/page') requests_unixsocket/tests/test_requests_unixsocket.py:76: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib/python3.11/site-packages/requests/sessions.py:602: in get return self.request("GET", url, **kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:589: in request resp = self.send(prep, **send_kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:703: in send r = adapter.send(request, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self._get_connection(request, verify, proxies=proxies, cert=cert) except LocationValueError as e: > raise InvalidURL(e, request=request) E requests.exceptions.InvalidURL: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/requests/adapters.py:534: InvalidURL ______________ test_unix_domain_adapter_connection_proxies_error _______________ self = def _new_conn(self): """Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw["source_address"] = self.source_address if self.socket_options: extra_kw["socket_options"] = self.socket_options try: > conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw ) /usr/lib/python3.11/site-packages/urllib3/connection.py:174: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ address = ('10.10.1.10', 1080), timeout = None, source_address = None socket_options = None def create_connection( address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None, ): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address if host.startswith("["): host = host.strip("[]") err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets # us select whether to work with IPv4 DNS records, IPv6 records, or both. # The original create_connection function always returns all records. family = allowed_gai_family() try: host.encode("idna") except UnicodeError: return six.raise_from( LocationParseError(u"'%s', label empty or too long" % host), None ) for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. _set_socket_options(sock, socket_options) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except socket.error as e: err = e if sock is not None: sock.close() sock = None if err is not None: > raise err /usr/lib/python3.11/site-packages/urllib3/util/connection.py:95: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ address = ('10.10.1.10', 1080), timeout = None, source_address = None socket_options = None def create_connection( address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None, ): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address if host.startswith("["): host = host.strip("[]") err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets # us select whether to work with IPv4 DNS records, IPv6 records, or both. # The original create_connection function always returns all records. family = allowed_gai_family() try: host.encode("idna") except UnicodeError: return six.raise_from( LocationParseError(u"'%s', label empty or too long" % host), None ) for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. _set_socket_options(sock, socket_options) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) > sock.connect(sa) E TimeoutError: [Errno 110] Connection timed out /usr/lib/python3.11/site-packages/urllib3/util/connection.py:85: TimeoutError During handling of the above exception, another exception occurred: self = method = 'GET', url = '/path/to/page', body = None headers = {'User-Agent': 'python-requests/2.32.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'} retries = Retry(total=0, connect=None, read=False, redirect=None, status=None) redirect = False, assert_same_host = False timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None release_conn = False, chunked = False, body_pos = None response_kw = {'decode_content': False, 'preload_content': False} parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/path/to/page', query=None, fragment=None) destination_scheme = None, conn = None, release_this_conn = True http_tunnel_required = True, err = None, clean_exit = False def urlopen( self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, chunked=False, body_pos=None, **response_kw ): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param url: The URL to perform the request on. :param body: Data to send in the request body, either :class:`str`, :class:`bytes`, an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When ``False``, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. :param \\**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ parsed_url = parse_url(url) destination_scheme = parsed_url.scheme if headers is None: headers = self.headers if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = response_kw.get("preload_content", True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) # Ensure that the URL we're connecting to is properly encoded if url.startswith("/"): url = six.ensure_str(_encode_target(url)) else: url = six.ensure_str(parsed_url.url) conn = None # Track whether `conn` needs to be released before # returning/raising/recursing. Update this variable if necessary, and # leave `release_conn` constant throughout the function. That way, if # the function recurses, the original value of `release_conn` will be # passed down into the recursive call, and its value will be respected. # # See issue #651 [1] for details. # # [1] release_this_conn = release_conn http_tunnel_required = connection_requires_http_tunnel( self.proxy, self.proxy_config, destination_scheme ) # Merge the proxy headers. Only done when not using HTTP CONNECT. We # have to copy the headers dict so we can safely change it without those # changes being reflected in anyone else's copy. if not http_tunnel_required: headers = headers.copy() headers.update(self.proxy_headers) # Must keep the exception bound to a separate variable or else Python 3 # complains about UnboundLocalError. err = None # Keep track of whether we cleanly exited the except block. This # ensures we do proper cleanup in finally. clean_exit = False # Rewind body position, if needed. Record current position # for future rewinds in the event of a redirect/retry. body_pos = set_file_position(body, body_pos) try: # Request a connection from the queue. timeout_obj = self._get_timeout(timeout) conn = self._get_conn(timeout=pool_timeout) conn.timeout = timeout_obj.connect_timeout is_new_proxy_conn = self.proxy is not None and not getattr( conn, "sock", None ) if is_new_proxy_conn and http_tunnel_required: self._prepare_proxy(conn) # Make the request on the httplib connection object. > httplib_response = self._make_request( conn, method, url, timeout=timeout_obj, body=body, headers=headers, chunked=chunked, ) /usr/lib/python3.11/site-packages/urllib3/connectionpool.py:715: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = conn = method = 'GET', url = '/path/to/page' timeout = Timeout(connect=None, read=None, total=None), chunked = False httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.32.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}} timeout_obj = Timeout(connect=None, read=None, total=None) def _make_request( self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw ): """ Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts. """ self.num_requests += 1 timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) # Trigger any extra validation we need to do. try: self._validate_conn(conn) except (SocketTimeout, BaseSSLError) as e: # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) raise # conn.request() calls http.client.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. try: if chunked: conn.request_chunked(method, url, **httplib_request_kw) else: > conn.request(method, url, **httplib_request_kw) /usr/lib/python3.11/site-packages/urllib3/connectionpool.py:416: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = method = 'GET', url = '/path/to/page', body = None headers = {'User-Agent': 'python-requests/2.32.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'} def request(self, method, url, body=None, headers=None): # Update the inner socket's timeout value to send the request. # This only triggers if the connection is re-used. if getattr(self, "sock", None) is not None: self.sock.settimeout(self.timeout) if headers is None: headers = {} else: # Avoid modifying the headers passed into .request() headers = headers.copy() if "user-agent" not in (six.ensure_str(k.lower()) for k in headers): headers["User-Agent"] = _get_default_user_agent() > super(HTTPConnection, self).request(method, url, body=body, headers=headers) /usr/lib/python3.11/site-packages/urllib3/connection.py:244: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = method = 'GET', url = '/path/to/page', body = None headers = {'User-Agent': 'python-requests/2.32.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'} def request(self, method, url, body=None, headers={}, *, encode_chunked=False): """Send a complete request to the server.""" > self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1286: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = method = 'GET', url = '/path/to/page', body = None headers = {'User-Agent': 'python-requests/2.32.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'} encode_chunked = False def _send_request(self, method, url, body, headers, encode_chunked): # Honor explicitly requested Host: and Accept-Encoding: headers. header_names = frozenset(k.lower() for k in headers) skips = {} if 'host' in header_names: skips['skip_host'] = 1 if 'accept-encoding' in header_names: skips['skip_accept_encoding'] = 1 self.putrequest(method, url, **skips) # chunked encoding will happen if HTTP/1.1 is used and either # the caller passes encode_chunked=True or the following # conditions hold: # 1. content-length has not been explicitly set # 2. the body is a file or iterable, but not a str or bytes-like # 3. Transfer-Encoding has NOT been explicitly set by the caller if 'content-length' not in header_names: # only chunk body if not explicitly set for backwards # compatibility, assuming the client code is already handling the # chunking if 'transfer-encoding' not in header_names: # if content-length cannot be automatically determined, fall # back to chunked encoding encode_chunked = False content_length = self._get_content_length(body, method) if content_length is None: if body is not None: if self.debuglevel > 0: print('Unable to determine size of %r' % body) encode_chunked = True self.putheader('Transfer-Encoding', 'chunked') else: self.putheader('Content-Length', str(content_length)) else: encode_chunked = False for hdr, value in headers.items(): self.putheader(hdr, value) if isinstance(body, str): # RFC 2616 Section 3.7.1 says that text default has a # default charset of iso-8859-1. body = _encode(body, 'body') > self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1332: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = message_body = None def endheaders(self, message_body=None, *, encode_chunked=False): """Indicate that the last header line has been sent to the server. This method sends the request to the server. The optional message_body argument can be used to pass a message body associated with the request. """ if self.__state == _CS_REQ_STARTED: self.__state = _CS_REQ_SENT else: raise CannotSendHeader() > self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = message_body = None, encode_chunked = False def _send_output(self, message_body=None, encode_chunked=False): """Send the currently buffered request and clear the buffer. Appends an extra \\r\\n to the buffer. A message_body may be specified, to be appended to the request. """ self._buffer.extend((b"", b"")) msg = b"\r\n".join(self._buffer) del self._buffer[:] > self.send(msg) /usr/lib64/python3.11/http/client.py:1041: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = data = b'GET /path/to/page HTTP/1.1\r\nHost: 10.10.1.10:1080\r\nUser-Agent: python-requests/2.32.0\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nConnection: keep-alive\r\n\r\n' def send(self, data): """Send `data' to the server. ``data`` can be a string object, a bytes object, an array object, a file-like object that supports a .read() method, or an iterable object. """ if self.sock is None: if self.auto_open: > self.connect() /usr/lib64/python3.11/http/client.py:979: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = def connect(self): > conn = self._new_conn() /usr/lib/python3.11/site-packages/urllib3/connection.py:205: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = def _new_conn(self): """Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw["source_address"] = self.source_address if self.socket_options: extra_kw["socket_options"] = self.socket_options try: conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw ) except SocketTimeout: > raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout), ) E urllib3.exceptions.ConnectTimeoutError: (, 'Connection to 10.10.1.10 timed out. (connect timeout=None)') /usr/lib/python3.11/site-packages/urllib3/connection.py:179: ConnectTimeoutError During handling of the above exception, another exception occurred: self = request = , stream = False timeout = Timeout(connect=None, read=None, total=None), verify = True cert = None, proxies = OrderedDict([('http+unix', 'http://10.10.1.10:1080')]) def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self._get_connection(request, verify, proxies=proxies, cert=cert) except LocationValueError as e: raise InvalidURL(e, request=request) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers( request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, ) chunked = not (request.body is None or "Content-Length" in request.headers) if isinstance(timeout, tuple): try: connect, read = timeout timeout = TimeoutSauce(connect=connect, read=read) except ValueError: raise ValueError( f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " f"or a single float to set both timeouts to the same value." ) elif isinstance(timeout, TimeoutSauce): pass else: timeout = TimeoutSauce(connect=timeout, read=timeout) try: > resp = conn.urlopen( method=request.method, url=url, body=request.body, headers=request.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.max_retries, timeout=timeout, chunked=chunked, ) /usr/lib/python3.11/site-packages/requests/adapters.py:564: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = method = 'GET', url = '/path/to/page', body = None headers = {'User-Agent': 'python-requests/2.32.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'} retries = Retry(total=0, connect=None, read=False, redirect=None, status=None) redirect = False, assert_same_host = False timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None release_conn = False, chunked = False, body_pos = None response_kw = {'decode_content': False, 'preload_content': False} parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/path/to/page', query=None, fragment=None) destination_scheme = None, conn = None, release_this_conn = True http_tunnel_required = True, err = None, clean_exit = False def urlopen( self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, chunked=False, body_pos=None, **response_kw ): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param url: The URL to perform the request on. :param body: Data to send in the request body, either :class:`str`, :class:`bytes`, an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When ``False``, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. :param \\**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ parsed_url = parse_url(url) destination_scheme = parsed_url.scheme if headers is None: headers = self.headers if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = response_kw.get("preload_content", True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) # Ensure that the URL we're connecting to is properly encoded if url.startswith("/"): url = six.ensure_str(_encode_target(url)) else: url = six.ensure_str(parsed_url.url) conn = None # Track whether `conn` needs to be released before # returning/raising/recursing. Update this variable if necessary, and # leave `release_conn` constant throughout the function. That way, if # the function recurses, the original value of `release_conn` will be # passed down into the recursive call, and its value will be respected. # # See issue #651 [1] for details. # # [1] release_this_conn = release_conn http_tunnel_required = connection_requires_http_tunnel( self.proxy, self.proxy_config, destination_scheme ) # Merge the proxy headers. Only done when not using HTTP CONNECT. We # have to copy the headers dict so we can safely change it without those # changes being reflected in anyone else's copy. if not http_tunnel_required: headers = headers.copy() headers.update(self.proxy_headers) # Must keep the exception bound to a separate variable or else Python 3 # complains about UnboundLocalError. err = None # Keep track of whether we cleanly exited the except block. This # ensures we do proper cleanup in finally. clean_exit = False # Rewind body position, if needed. Record current position # for future rewinds in the event of a redirect/retry. body_pos = set_file_position(body, body_pos) try: # Request a connection from the queue. timeout_obj = self._get_timeout(timeout) conn = self._get_conn(timeout=pool_timeout) conn.timeout = timeout_obj.connect_timeout is_new_proxy_conn = self.proxy is not None and not getattr( conn, "sock", None ) if is_new_proxy_conn and http_tunnel_required: self._prepare_proxy(conn) # Make the request on the httplib connection object. httplib_response = self._make_request( conn, method, url, timeout=timeout_obj, body=body, headers=headers, chunked=chunked, ) # If we're going to release the connection in ``finally:``, then # the response doesn't need to know about the connection. Otherwise # it will also try to release it and we'll have a double-release # mess. response_conn = conn if not release_conn else None # Pass method to Response for length checking response_kw["request_method"] = method # Import httplib's response into our own wrapper object response = self.ResponseCls.from_httplib( httplib_response, pool=self, connection=response_conn, retries=retries, **response_kw ) # Everything went great! clean_exit = True except EmptyPoolError: # Didn't get a connection from the pool, no need to clean up clean_exit = True release_this_conn = False raise except ( TimeoutError, HTTPException, SocketError, ProtocolError, BaseSSLError, SSLError, CertificateError, ) as e: # Discard the connection for these exceptions. It will be # replaced during the next _get_conn() call. clean_exit = False def _is_ssl_error_message_from_http_proxy(ssl_error): # We're trying to detect the message 'WRONG_VERSION_NUMBER' but # SSLErrors are kinda all over the place when it comes to the message, # so we try to cover our bases here! message = " ".join(re.split("[^a-z]", str(ssl_error).lower())) return ( "wrong version number" in message or "unknown protocol" in message or "record layer failure" in message ) # Try to detect a common user error with proxies which is to # set an HTTP proxy to be HTTPS when it should be 'http://' # (ie {'http': 'http://proxy', 'https': 'https://proxy'}) # Instead we add a nice error message and point to a URL. if ( isinstance(e, BaseSSLError) and self.proxy and _is_ssl_error_message_from_http_proxy(e) and conn.proxy and conn.proxy.scheme == "https" ): e = ProxyError( "Your proxy appears to only use HTTP and not HTTPS, " "try changing your proxy URL to be HTTP. See: " "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" "#https-proxy-error-http-proxy", SSLError(e), ) elif isinstance(e, (BaseSSLError, CertificateError)): e = SSLError(e) elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: e = ProxyError("Cannot connect to proxy.", e) elif isinstance(e, (SocketError, HTTPException)): e = ProtocolError("Connection aborted.", e) > retries = retries.increment( method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] ) /usr/lib/python3.11/site-packages/urllib3/connectionpool.py:801: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = Retry(total=0, connect=None, read=False, redirect=None, status=None) method = 'GET', url = '/path/to/page', response = None error = ConnectTimeoutError(, 'Connection to 10.10.1.10 timed out. (connect timeout=None)') _pool = _stacktrace = def increment( self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None, ): """Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. """ if self.total is False and error: # Disabled, indicate to re-raise the error. raise six.reraise(type(error), error, _stacktrace) total = self.total if total is not None: total -= 1 connect = self.connect read = self.read redirect = self.redirect status_count = self.status other = self.other cause = "unknown" status = None redirect_location = None if error and self._is_connection_error(error): # Connect retry? if connect is False: raise six.reraise(type(error), error, _stacktrace) elif connect is not None: connect -= 1 elif error and self._is_read_error(error): # Read retry? if read is False or not self._is_method_retryable(method): raise six.reraise(type(error), error, _stacktrace) elif read is not None: read -= 1 elif error: # Other retry? if other is not None: other -= 1 elif response and response.get_redirect_location(): # Redirect retry? if redirect is not None: redirect -= 1 cause = "too many redirects" redirect_location = response.get_redirect_location() status = response.status else: # Incrementing because of a server error like a 500 in # status_forcelist and the given method is in the allowed_methods cause = ResponseError.GENERIC_ERROR if response and response.status: if status_count is not None: status_count -= 1 cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) status = response.status history = self.history + ( RequestHistory(method, url, error, status, redirect_location), ) new_retry = self.new( total=total, connect=connect, read=read, redirect=redirect, status=status_count, other=other, history=history, ) if new_retry.is_exhausted(): > raise MaxRetryError(_pool, url, error or ResponseError(cause)) E urllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='10.10.1.10', port=1080): Max retries exceeded with url: /path/to/page (Caused by ConnectTimeoutError(, 'Connection to 10.10.1.10 timed out. (connect timeout=None)')) /usr/lib/python3.11/site-packages/urllib3/util/retry.py:594: MaxRetryError During handling of the above exception, another exception occurred: def test_unix_domain_adapter_connection_proxies_error(): session = requests_unixsocket.Session('http+unix://') for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: with pytest.raises(ValueError) as excinfo: > getattr(session, method)( 'http+unix://socket_does_not_exist/path/to/page', proxies={"http+unix": "http://10.10.1.10:1080"}) requests_unixsocket/tests/test_requests_unixsocket.py:85: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib/python3.11/site-packages/requests/sessions.py:602: in get return self.request("GET", url, **kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:589: in request resp = self.send(prep, **send_kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:703: in send r = adapter.send(request, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , stream = False timeout = Timeout(connect=None, read=None, total=None), verify = True cert = None, proxies = OrderedDict([('http+unix', 'http://10.10.1.10:1080')]) def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self._get_connection(request, verify, proxies=proxies, cert=cert) except LocationValueError as e: raise InvalidURL(e, request=request) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers( request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, ) chunked = not (request.body is None or "Content-Length" in request.headers) if isinstance(timeout, tuple): try: connect, read = timeout timeout = TimeoutSauce(connect=connect, read=read) except ValueError: raise ValueError( f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " f"or a single float to set both timeouts to the same value." ) elif isinstance(timeout, TimeoutSauce): pass else: timeout = TimeoutSauce(connect=timeout, read=timeout) try: resp = conn.urlopen( method=request.method, url=url, body=request.body, headers=request.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.max_retries, timeout=timeout, chunked=chunked, ) except (ProtocolError, OSError) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): # TODO: Remove this in 3.0.0: see #2811 if not isinstance(e.reason, NewConnectionError): > raise ConnectTimeout(e, request=request) E requests.exceptions.ConnectTimeout: HTTPConnectionPool(host='10.10.1.10', port=1080): Max retries exceeded with url: /path/to/page (Caused by ConnectTimeoutError(, 'Connection to 10.10.1.10 timed out. (connect timeout=None)')) /usr/lib/python3.11/site-packages/requests/adapters.py:585: ConnectTimeout _____________________ test_unix_domain_adapter_monkeypatch _____________________ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: > conn = self._get_connection(request, verify, proxies=proxies, cert=cert) /usr/lib/python3.11/site-packages/requests/adapters.py:532: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , verify = True, proxies = OrderedDict() cert = None def _get_connection(self, request, verify, proxies=None, cert=None): # Replace the existing get_connection without breaking things and # ensure that TLS settings are considered when we interact with # urllib3 HTTP Pools proxy = select_proxy(request.url, proxies) try: host_params, pool_kwargs = _urllib3_request_context(request, verify, cert) except ValueError as e: raise InvalidURL(e, request=request) if proxy: proxy = prepend_scheme_if_needed(proxy, "http") proxy_url = parse_url(proxy) if not proxy_url.host: raise InvalidProxyURL( "Please check proxy URL. It is malformed " "and could be missing the host." ) proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) else: # Only scheme should be lower case > conn = self.poolmanager.connection_from_host( **host_params, pool_kwargs=pool_kwargs ) /usr/lib/python3.11/site-packages/requests/adapters.py:400: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = host = '%2Ftmp%2Ftest_requests.1426325333_325_c9cccae7', port = 80 scheme = 'http+unix' pool_kwargs = {'cert_reqs': 'CERT_REQUIRED', 'ssl_context': } def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context["scheme"] = scheme or "http" if not port: port = port_by_scheme.get(request_context["scheme"].lower(), 80) request_context["port"] = port request_context["host"] = host > return self.connection_from_context(request_context) /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:246: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request_context = {'block': False, 'cert_reqs': 'CERT_REQUIRED', 'host': '%2Ftmp%2Ftest_requests.1426325333_325_c9cccae7', 'maxsize': 10, ...} def connection_from_context(self, request_context): """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme.get(scheme) if not pool_key_constructor: > raise URLSchemeUnknown(scheme) E urllib3.exceptions.URLSchemeUnknown: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/urllib3/poolmanager.py:258: URLSchemeUnknown During handling of the above exception, another exception occurred: def test_unix_domain_adapter_monkeypatch(): with UnixSocketServerThread() as usock_thread: with requests_unixsocket.monkeypatch('http+unix://'): urlencoded_usock = requests.compat.quote_plus(usock_thread.usock) url = 'http+unix://%s/path/to/page' % urlencoded_usock for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']: logger.debug('Calling session.%s(%r) ...', method, url) > r = getattr(requests, method)(url) requests_unixsocket/tests/test_requests_unixsocket.py:101: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ requests_unixsocket/__init__.py:51: in get return request('get', url, **kwargs) requests_unixsocket/__init__.py:46: in request return session.request(method=method, url=url, **kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:589: in request resp = self.send(prep, **send_kwargs) /usr/lib/python3.11/site-packages/requests/sessions.py:703: in send r = adapter.send(request, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = request = , stream = False, timeout = None, verify = True cert = None, proxies = OrderedDict() def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self._get_connection(request, verify, proxies=proxies, cert=cert) except LocationValueError as e: > raise InvalidURL(e, request=request) E requests.exceptions.InvalidURL: Not supported URL scheme http+unix /usr/lib/python3.11/site-packages/requests/adapters.py:534: InvalidURL =========================== short test summary info ============================ FAILED requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_ok FAILED requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_url_with_query_params FAILED requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_connection_error FAILED requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_connection_proxies_error FAILED requests_unixsocket/tests/test_requests_unixsocket.py::test_unix_domain_adapter_monkeypatch ======================== 5 failed in 130.63s (0:02:10) ========================= error: Bad exit status from /var/tmp/rpm-tmp.DW6Emm (%check) Bad exit status from /var/tmp/rpm-tmp.DW6Emm (%check) RPM build errors: Child return code was: 1 EXCEPTION: [Error('Command failed: \n # /usr/bin/systemd-nspawn -q -M a028a099c1ce4b378dc129e74fbbd33c -D /var/lib/mock/dist-an23.3-build-430255-73048/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin \'--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"\' \'--setenv=PS1= \\s-\\v\\$ \' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c \'/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-requests-unixsocket.spec\'\n', 1)] Traceback (most recent call last): File "/usr/lib/python3.6/site-packages/mockbuild/trace_decorator.py", line 93, in trace result = func(*args, **kw) File "/usr/lib/python3.6/site-packages/mockbuild/util.py", line 612, in do_with_status raise exception.Error("Command failed: \n # %s\n%s" % (cmd_pretty(command, env), output), child.returncode) mockbuild.exception.Error: Command failed: # /usr/bin/systemd-nspawn -q -M a028a099c1ce4b378dc129e74fbbd33c -D /var/lib/mock/dist-an23.3-build-430255-73048/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.mdqe8hut:/etc/resolv.conf --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin '--setenv=PROMPT_COMMAND=printf "\033]0;\007"' '--setenv=PS1= \s-\v\$ ' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-requests-unixsocket.spec'