Mock Version: 5.6 Mock Version: 5.6 Mock Version: 5.6 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target s390x --nodeps /builddir/build/SPECS/python-geopandas.spec'], chrootPath='/var/lib/mock/f42-build-55024301-6531854/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=201600uid=1000gid=425user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '2b8e2245a74641ad9fb389f03750c6e3', '-D', '/var/lib/mock/f42-build-55024301-6531854/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target s390x --nodeps /builddir/build/SPECS/python-geopandas.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: s390x Building for target s390x setting SOURCE_DATE_EPOCH=1731801600 Wrote: /builddir/build/SRPMS/python-geopandas-1.0.1-2.fc42.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target s390x --nodeps /builddir/build/SPECS/python-geopandas.spec'], chrootPath='/var/lib/mock/f42-build-55024301-6531854/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=201600uid=1000gid=425user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueraiseExc=FalseprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '3a7f3329f0cb4edaa1047d2d8bc54afe', '-D', '/var/lib/mock/f42-build-55024301-6531854/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target s390x --nodeps /builddir/build/SPECS/python-geopandas.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: s390x Building for target s390x setting SOURCE_DATE_EPOCH=1731801600 Executing(%mkbuilddir): /bin/sh -e /var/tmp/rpm-tmp.lElerv + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + test -d /builddir/build/BUILD/python-geopandas-1.0.1-build + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w /builddir/build/BUILD/python-geopandas-1.0.1-build + /usr/bin/rm -rf /builddir/build/BUILD/python-geopandas-1.0.1-build + /usr/bin/mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build + /usr/bin/mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build/SPECPARTS + RPM_EC=0 ++ jobs -p + exit 0 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.A2M44a + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + rm -rf geopandas-1.0.1 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/geopandas-1.0.1.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd geopandas-1.0.1 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.lZE7XL + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + cd geopandas-1.0.1 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(packaging)' + echo 'python3dist(pip) >= 19' + '[' -f pyproject.toml ']' + echo '(python3dist(tomli) if python3-devel < 3.11)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + echo -n + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + LT_SYS_LIBRARY_PATH=/usr/lib64: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + RPM_TOXENV=py313 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir --output /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-buildrequires Handling setuptools>=61.0.0 from build-system.requires Requirement satisfied: setuptools>=61.0.0 (installed: setuptools 74.1.3) Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running egg_info creating geopandas.egg-info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt writing manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running dist_info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg creating '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas-1.0.1.dist-info' Handling numpy >=1.22 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: numpy >=1.22 (installed: numpy 1.26.4) Handling pyogrio >=0.7.2 from hook generated metadata: Requires-Dist (geopandas) Requirement not satisfied: pyogrio >=0.7.2 Handling packaging from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: packaging (installed: packaging 24.2) Handling pandas >=1.4.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pandas >=1.4.0 (installed: pandas 2.2.1) Handling pyproj >=3.3.0 from hook generated metadata: Requires-Dist (geopandas) Requirement not satisfied: pyproj >=3.3.0 Handling shapely >=2.0.0 from hook generated metadata: Requires-Dist (geopandas) Requirement not satisfied: shapely >=2.0.0 Handling psycopg-binary >=3.1.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: psycopg-binary >=3.1.0 ; extra == 'all' Handling SQLAlchemy >=1.3 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: SQLAlchemy >=1.3 ; extra == 'all' Handling geopy ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: geopy ; extra == 'all' Handling matplotlib >=3.5.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: matplotlib >=3.5.0 ; extra == 'all' Handling mapclassify ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: mapclassify ; extra == 'all' Handling xyzservices ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: xyzservices ; extra == 'all' Handling folium ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: folium ; extra == 'all' Handling GeoAlchemy2 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: GeoAlchemy2 ; extra == 'all' Handling pyarrow >=8.0.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pyarrow >=8.0.0 ; extra == 'all' Handling pytest >=3.1.0 ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest >=3.1.0 ; extra == 'dev' Handling pytest-cov ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest-cov ; extra == 'dev' Handling pytest-xdist ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest-xdist ; extra == 'dev' Handling codecov ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: codecov ; extra == 'dev' Handling black ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: black ; extra == 'dev' Handling pre-commit ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pre-commit ; extra == 'dev' + cat /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-buildrequires + rm -rfv geopandas-1.0.1.dist-info/ removed 'geopandas-1.0.1.dist-info/top_level.txt' removed 'geopandas-1.0.1.dist-info/METADATA' removed 'geopandas-1.0.1.dist-info/LICENSE.txt' removed directory 'geopandas-1.0.1.dist-info/' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-geopandas-1.0.1-2.fc42.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noprep --noclean --target s390x --nodeps /builddir/build/SPECS/python-geopandas.spec'], chrootPath='/var/lib/mock/f42-build-55024301-6531854/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=201600uid=1000gid=425user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueraiseExc=FalseprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '31e5e8fc16af4bfd8de66c02b6fba21b', '-D', '/var/lib/mock/f42-build-55024301-6531854/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --noprep --noclean --target s390x --nodeps /builddir/build/SPECS/python-geopandas.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: s390x Building for target s390x setting SOURCE_DATE_EPOCH=1731801600 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.ebeWO5 + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + cd geopandas-1.0.1 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(packaging)' + echo 'python3dist(pip) >= 19' + '[' -f pyproject.toml ']' + echo '(python3dist(tomli) if python3-devel < 3.11)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + echo -n + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + LT_SYS_LIBRARY_PATH=/usr/lib64: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + RPM_TOXENV=py313 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir --output /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-buildrequires Handling setuptools>=61.0.0 from build-system.requires Requirement satisfied: setuptools>=61.0.0 (installed: setuptools 74.1.3) Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running egg_info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running dist_info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg creating '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas-1.0.1.dist-info' Handling numpy >=1.22 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: numpy >=1.22 (installed: numpy 1.26.4) Handling pyogrio >=0.7.2 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pyogrio >=0.7.2 (installed: pyogrio 0.10.0) Handling packaging from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: packaging (installed: packaging 24.2) Handling pandas >=1.4.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pandas >=1.4.0 (installed: pandas 2.2.1) Handling pyproj >=3.3.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pyproj >=3.3.0 (installed: pyproj 3.7.0) Handling shapely >=2.0.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: shapely >=2.0.0 (installed: shapely 2.0.6) Handling psycopg-binary >=3.1.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: psycopg-binary >=3.1.0 ; extra == 'all' Handling SQLAlchemy >=1.3 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: SQLAlchemy >=1.3 ; extra == 'all' Handling geopy ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: geopy ; extra == 'all' Handling matplotlib >=3.5.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: matplotlib >=3.5.0 ; extra == 'all' Handling mapclassify ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: mapclassify ; extra == 'all' Handling xyzservices ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: xyzservices ; extra == 'all' Handling folium ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: folium ; extra == 'all' Handling GeoAlchemy2 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: GeoAlchemy2 ; extra == 'all' Handling pyarrow >=8.0.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pyarrow >=8.0.0 ; extra == 'all' Handling pytest >=3.1.0 ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest >=3.1.0 ; extra == 'dev' Handling pytest-cov ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest-cov ; extra == 'dev' Handling pytest-xdist ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest-xdist ; extra == 'dev' Handling codecov ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: codecov ; extra == 'dev' Handling black ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: black ; extra == 'dev' Handling pre-commit ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pre-commit ; extra == 'dev' + cat /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-buildrequires + rm -rfv geopandas-1.0.1.dist-info/ removed 'geopandas-1.0.1.dist-info/top_level.txt' removed 'geopandas-1.0.1.dist-info/METADATA' removed 'geopandas-1.0.1.dist-info/LICENSE.txt' removed directory 'geopandas-1.0.1.dist-info/' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-geopandas-1.0.1-2.fc42.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target s390x /builddir/build/SPECS/python-geopandas.spec'], chrootPath='/var/lib/mock/f42-build-55024301-6531854/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=201600uid=1000gid=425user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'e03016ded2c24d52b2a57dcecadef146', '-D', '/var/lib/mock/f42-build-55024301-6531854/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target s390x /builddir/build/SPECS/python-geopandas.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: s390x Building for target s390x setting SOURCE_DATE_EPOCH=1731801600 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.IepYht + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + cd geopandas-1.0.1 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(packaging)' + echo 'python3dist(pip) >= 19' + '[' -f pyproject.toml ']' + echo '(python3dist(tomli) if python3-devel < 3.11)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + echo -n + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + LT_SYS_LIBRARY_PATH=/usr/lib64: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + RPM_TOXENV=py313 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir --output /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-buildrequires Handling setuptools>=61.0.0 from build-system.requires Requirement satisfied: setuptools>=61.0.0 (installed: setuptools 74.1.3) Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running egg_info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running dist_info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg creating '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas-1.0.1.dist-info' Handling numpy >=1.22 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: numpy >=1.22 (installed: numpy 1.26.4) Handling pyogrio >=0.7.2 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pyogrio >=0.7.2 (installed: pyogrio 0.10.0) Handling packaging from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: packaging (installed: packaging 24.2) Handling pandas >=1.4.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pandas >=1.4.0 (installed: pandas 2.2.1) Handling pyproj >=3.3.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: pyproj >=3.3.0 (installed: pyproj 3.7.0) Handling shapely >=2.0.0 from hook generated metadata: Requires-Dist (geopandas) Requirement satisfied: shapely >=2.0.0 (installed: shapely 2.0.6) Handling psycopg-binary >=3.1.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: psycopg-binary >=3.1.0 ; extra == 'all' Handling SQLAlchemy >=1.3 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: SQLAlchemy >=1.3 ; extra == 'all' Handling geopy ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: geopy ; extra == 'all' Handling matplotlib >=3.5.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: matplotlib >=3.5.0 ; extra == 'all' Handling mapclassify ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: mapclassify ; extra == 'all' Handling xyzservices ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: xyzservices ; extra == 'all' Handling folium ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: folium ; extra == 'all' Handling GeoAlchemy2 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: GeoAlchemy2 ; extra == 'all' Handling pyarrow >=8.0.0 ; extra == 'all' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pyarrow >=8.0.0 ; extra == 'all' Handling pytest >=3.1.0 ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest >=3.1.0 ; extra == 'dev' Handling pytest-cov ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest-cov ; extra == 'dev' Handling pytest-xdist ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pytest-xdist ; extra == 'dev' Handling codecov ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: codecov ; extra == 'dev' Handling black ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: black ; extra == 'dev' Handling pre-commit ; extra == 'dev' from hook generated metadata: Requires-Dist (geopandas) Ignoring alien requirement: pre-commit ; extra == 'dev' + cat /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-buildrequires + rm -rfv geopandas-1.0.1.dist-info/ removed 'geopandas-1.0.1.dist-info/top_level.txt' removed 'geopandas-1.0.1.dist-info/METADATA' removed 'geopandas-1.0.1.dist-info/LICENSE.txt' removed directory 'geopandas-1.0.1.dist-info/' + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.2oEQAZ + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd geopandas-1.0.1 + mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + LT_SYS_LIBRARY_PATH=/usr/lib64: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_wheel.py /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir Processing /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1 Preparing metadata (pyproject.toml): started Running command Preparing metadata (pyproject.toml) Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running dist_info creating /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info writing /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/PKG-INFO writing dependency_links to /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/dependency_links.txt writing requirements to /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/requires.txt writing top-level names to /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/top_level.txt writing manifest file '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/SOURCES.txt' reading manifest file '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg creating '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-modern-metadata-3x9mczrv/geopandas-1.0.1.dist-info' Preparing metadata (pyproject.toml): finished with status 'done' Building wheels for collected packages: geopandas Building wheel for geopandas (pyproject.toml): started Running command Building wheel for geopandas (pyproject.toml) Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg running bdist_wheel running build running build_py Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg creating build creating build/lib creating build/lib/geopandas copying geopandas/__init__.py -> build/lib/geopandas copying geopandas/_compat.py -> build/lib/geopandas copying geopandas/_config.py -> build/lib/geopandas copying geopandas/_decorator.py -> build/lib/geopandas copying geopandas/_version.py -> build/lib/geopandas copying geopandas/array.py -> build/lib/geopandas copying geopandas/base.py -> build/lib/geopandas copying geopandas/conftest.py -> build/lib/geopandas copying geopandas/explore.py -> build/lib/geopandas copying geopandas/geodataframe.py -> build/lib/geopandas copying geopandas/geoseries.py -> build/lib/geopandas copying geopandas/plotting.py -> build/lib/geopandas copying geopandas/sindex.py -> build/lib/geopandas copying geopandas/testing.py -> build/lib/geopandas creating build/lib/geopandas/datasets copying geopandas/datasets/__init__.py -> build/lib/geopandas/datasets creating build/lib/geopandas/io copying geopandas/io/__init__.py -> build/lib/geopandas/io copying geopandas/io/_geoarrow.py -> build/lib/geopandas/io copying geopandas/io/_pyarrow_hotfix.py -> build/lib/geopandas/io copying geopandas/io/arrow.py -> build/lib/geopandas/io copying geopandas/io/file.py -> build/lib/geopandas/io copying geopandas/io/sql.py -> build/lib/geopandas/io copying geopandas/io/util.py -> build/lib/geopandas/io creating build/lib/geopandas/tests copying geopandas/tests/__init__.py -> build/lib/geopandas/tests copying geopandas/tests/test_api.py -> build/lib/geopandas/tests copying geopandas/tests/test_array.py -> build/lib/geopandas/tests copying geopandas/tests/test_compat.py -> build/lib/geopandas/tests copying geopandas/tests/test_config.py -> build/lib/geopandas/tests copying geopandas/tests/test_crs.py -> build/lib/geopandas/tests copying geopandas/tests/test_datasets.py -> build/lib/geopandas/tests copying geopandas/tests/test_decorator.py -> build/lib/geopandas/tests copying geopandas/tests/test_dissolve.py -> build/lib/geopandas/tests copying geopandas/tests/test_explore.py -> build/lib/geopandas/tests copying geopandas/tests/test_extension_array.py -> build/lib/geopandas/tests copying geopandas/tests/test_geocode.py -> build/lib/geopandas/tests copying geopandas/tests/test_geodataframe.py -> build/lib/geopandas/tests copying geopandas/tests/test_geom_methods.py -> build/lib/geopandas/tests copying geopandas/tests/test_geoseries.py -> build/lib/geopandas/tests copying geopandas/tests/test_merge.py -> build/lib/geopandas/tests copying geopandas/tests/test_op_output_types.py -> build/lib/geopandas/tests copying geopandas/tests/test_overlay.py -> build/lib/geopandas/tests copying geopandas/tests/test_pandas_methods.py -> build/lib/geopandas/tests copying geopandas/tests/test_plotting.py -> build/lib/geopandas/tests copying geopandas/tests/test_show_versions.py -> build/lib/geopandas/tests copying geopandas/tests/test_sindex.py -> build/lib/geopandas/tests copying geopandas/tests/test_testing.py -> build/lib/geopandas/tests copying geopandas/tests/test_types.py -> build/lib/geopandas/tests copying geopandas/tests/util.py -> build/lib/geopandas/tests creating build/lib/geopandas/tools copying geopandas/tools/__init__.py -> build/lib/geopandas/tools copying geopandas/tools/_random.py -> build/lib/geopandas/tools copying geopandas/tools/_show_versions.py -> build/lib/geopandas/tools copying geopandas/tools/clip.py -> build/lib/geopandas/tools copying geopandas/tools/geocoding.py -> build/lib/geopandas/tools copying geopandas/tools/hilbert_curve.py -> build/lib/geopandas/tools copying geopandas/tools/overlay.py -> build/lib/geopandas/tools copying geopandas/tools/sjoin.py -> build/lib/geopandas/tools copying geopandas/tools/util.py -> build/lib/geopandas/tools creating build/lib/geopandas/io/tests copying geopandas/io/tests/__init__.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/generate_legacy_storage_files.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_arrow.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_file.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_file_geom_types_drivers.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_geoarrow.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_infer_schema.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_pickle.py -> build/lib/geopandas/io/tests copying geopandas/io/tests/test_sql.py -> build/lib/geopandas/io/tests creating build/lib/geopandas/tools/tests copying geopandas/tools/tests/__init__.py -> build/lib/geopandas/tools/tests copying geopandas/tools/tests/test_clip.py -> build/lib/geopandas/tools/tests copying geopandas/tools/tests/test_hilbert_curve.py -> build/lib/geopandas/tools/tests copying geopandas/tools/tests/test_random.py -> build/lib/geopandas/tools/tests copying geopandas/tools/tests/test_sjoin.py -> build/lib/geopandas/tools/tests copying geopandas/tools/tests/test_tools.py -> build/lib/geopandas/tools/tests running egg_info writing geopandas.egg-info/PKG-INFO writing dependency_links to geopandas.egg-info/dependency_links.txt writing requirements to geopandas.egg-info/requires.txt writing top-level names to geopandas.egg-info/top_level.txt reading manifest file 'geopandas.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE.txt' writing manifest file 'geopandas.egg-info/SOURCES.txt' Failed to load config from /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject.toml: 'versioneer' Try to load it from setup.cfg creating build/lib/geopandas/tests/data copying geopandas/tests/data/null_geom.geojson -> build/lib/geopandas/tests/data UPDATING build/lib/geopandas/_version.py set build/lib/geopandas/_version.py to '1.0.1' installing to build/bdist.linux-s390x/wheel running install running install_lib creating build/bdist.linux-s390x creating build/bdist.linux-s390x/wheel creating build/bdist.linux-s390x/wheel/geopandas copying build/lib/geopandas/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/_compat.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/_config.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/_decorator.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/_version.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/array.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/base.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/conftest.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/explore.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/geodataframe.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/geoseries.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/plotting.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/sindex.py -> build/bdist.linux-s390x/wheel/./geopandas copying build/lib/geopandas/testing.py -> build/bdist.linux-s390x/wheel/./geopandas creating build/bdist.linux-s390x/wheel/geopandas/datasets copying build/lib/geopandas/datasets/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas/datasets creating build/bdist.linux-s390x/wheel/geopandas/io copying build/lib/geopandas/io/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas/io copying build/lib/geopandas/io/_geoarrow.py -> build/bdist.linux-s390x/wheel/./geopandas/io copying build/lib/geopandas/io/_pyarrow_hotfix.py -> build/bdist.linux-s390x/wheel/./geopandas/io copying build/lib/geopandas/io/arrow.py -> build/bdist.linux-s390x/wheel/./geopandas/io copying build/lib/geopandas/io/file.py -> build/bdist.linux-s390x/wheel/./geopandas/io copying build/lib/geopandas/io/sql.py -> build/bdist.linux-s390x/wheel/./geopandas/io copying build/lib/geopandas/io/util.py -> build/bdist.linux-s390x/wheel/./geopandas/io creating build/bdist.linux-s390x/wheel/geopandas/io/tests copying build/lib/geopandas/io/tests/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/generate_legacy_storage_files.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_arrow.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_file.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_file_geom_types_drivers.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_geoarrow.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_infer_schema.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_pickle.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests copying build/lib/geopandas/io/tests/test_sql.py -> build/bdist.linux-s390x/wheel/./geopandas/io/tests creating build/bdist.linux-s390x/wheel/geopandas/tests copying build/lib/geopandas/tests/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_api.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_array.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_compat.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_config.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_crs.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_datasets.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_decorator.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_dissolve.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_explore.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_extension_array.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_geocode.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_geodataframe.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_geom_methods.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_geoseries.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_merge.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_op_output_types.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_overlay.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_pandas_methods.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_plotting.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_show_versions.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_sindex.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_testing.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/test_types.py -> build/bdist.linux-s390x/wheel/./geopandas/tests copying build/lib/geopandas/tests/util.py -> build/bdist.linux-s390x/wheel/./geopandas/tests creating build/bdist.linux-s390x/wheel/geopandas/tests/data copying build/lib/geopandas/tests/data/null_geom.geojson -> build/bdist.linux-s390x/wheel/./geopandas/tests/data creating build/bdist.linux-s390x/wheel/geopandas/tools copying build/lib/geopandas/tools/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/_random.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/_show_versions.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/clip.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/geocoding.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/hilbert_curve.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/overlay.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/sjoin.py -> build/bdist.linux-s390x/wheel/./geopandas/tools copying build/lib/geopandas/tools/util.py -> build/bdist.linux-s390x/wheel/./geopandas/tools creating build/bdist.linux-s390x/wheel/geopandas/tools/tests copying build/lib/geopandas/tools/tests/__init__.py -> build/bdist.linux-s390x/wheel/./geopandas/tools/tests copying build/lib/geopandas/tools/tests/test_clip.py -> build/bdist.linux-s390x/wheel/./geopandas/tools/tests copying build/lib/geopandas/tools/tests/test_hilbert_curve.py -> build/bdist.linux-s390x/wheel/./geopandas/tools/tests copying build/lib/geopandas/tools/tests/test_random.py -> build/bdist.linux-s390x/wheel/./geopandas/tools/tests copying build/lib/geopandas/tools/tests/test_sjoin.py -> build/bdist.linux-s390x/wheel/./geopandas/tools/tests copying build/lib/geopandas/tools/tests/test_tools.py -> build/bdist.linux-s390x/wheel/./geopandas/tools/tests running install_egg_info Copying geopandas.egg-info to build/bdist.linux-s390x/wheel/./geopandas-1.0.1-py3.13.egg-info running install_scripts creating build/bdist.linux-s390x/wheel/geopandas-1.0.1.dist-info/WHEEL creating '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir/pip-wheel-2h1tg757/.tmp-2dn5f4hf/geopandas-1.0.1-py3-none-any.whl' and adding 'build/bdist.linux-s390x/wheel' to it adding 'geopandas/__init__.py' adding 'geopandas/_compat.py' adding 'geopandas/_config.py' adding 'geopandas/_decorator.py' adding 'geopandas/_version.py' adding 'geopandas/array.py' adding 'geopandas/base.py' adding 'geopandas/conftest.py' adding 'geopandas/explore.py' adding 'geopandas/geodataframe.py' adding 'geopandas/geoseries.py' adding 'geopandas/plotting.py' adding 'geopandas/sindex.py' adding 'geopandas/testing.py' adding 'geopandas/datasets/__init__.py' adding 'geopandas/io/__init__.py' adding 'geopandas/io/_geoarrow.py' adding 'geopandas/io/_pyarrow_hotfix.py' adding 'geopandas/io/arrow.py' adding 'geopandas/io/file.py' adding 'geopandas/io/sql.py' adding 'geopandas/io/util.py' adding 'geopandas/io/tests/__init__.py' adding 'geopandas/io/tests/generate_legacy_storage_files.py' adding 'geopandas/io/tests/test_arrow.py' adding 'geopandas/io/tests/test_file.py' adding 'geopandas/io/tests/test_file_geom_types_drivers.py' adding 'geopandas/io/tests/test_geoarrow.py' adding 'geopandas/io/tests/test_infer_schema.py' adding 'geopandas/io/tests/test_pickle.py' adding 'geopandas/io/tests/test_sql.py' adding 'geopandas/tests/__init__.py' adding 'geopandas/tests/test_api.py' adding 'geopandas/tests/test_array.py' adding 'geopandas/tests/test_compat.py' adding 'geopandas/tests/test_config.py' adding 'geopandas/tests/test_crs.py' adding 'geopandas/tests/test_datasets.py' adding 'geopandas/tests/test_decorator.py' adding 'geopandas/tests/test_dissolve.py' adding 'geopandas/tests/test_explore.py' adding 'geopandas/tests/test_extension_array.py' adding 'geopandas/tests/test_geocode.py' adding 'geopandas/tests/test_geodataframe.py' adding 'geopandas/tests/test_geom_methods.py' adding 'geopandas/tests/test_geoseries.py' adding 'geopandas/tests/test_merge.py' adding 'geopandas/tests/test_op_output_types.py' adding 'geopandas/tests/test_overlay.py' adding 'geopandas/tests/test_pandas_methods.py' adding 'geopandas/tests/test_plotting.py' adding 'geopandas/tests/test_show_versions.py' adding 'geopandas/tests/test_sindex.py' adding 'geopandas/tests/test_testing.py' adding 'geopandas/tests/test_types.py' adding 'geopandas/tests/util.py' adding 'geopandas/tests/data/null_geom.geojson' adding 'geopandas/tools/__init__.py' adding 'geopandas/tools/_random.py' adding 'geopandas/tools/_show_versions.py' adding 'geopandas/tools/clip.py' adding 'geopandas/tools/geocoding.py' adding 'geopandas/tools/hilbert_curve.py' adding 'geopandas/tools/overlay.py' adding 'geopandas/tools/sjoin.py' adding 'geopandas/tools/util.py' adding 'geopandas/tools/tests/__init__.py' adding 'geopandas/tools/tests/test_clip.py' adding 'geopandas/tools/tests/test_hilbert_curve.py' adding 'geopandas/tools/tests/test_random.py' adding 'geopandas/tools/tests/test_sjoin.py' adding 'geopandas/tools/tests/test_tools.py' adding 'geopandas-1.0.1.dist-info/LICENSE.txt' adding 'geopandas-1.0.1.dist-info/METADATA' adding 'geopandas-1.0.1.dist-info/WHEEL' adding 'geopandas-1.0.1.dist-info/top_level.txt' adding 'geopandas-1.0.1.dist-info/RECORD' removing build/bdist.linux-s390x/wheel Building wheel for geopandas (pyproject.toml): finished with status 'done' Created wheel for geopandas: filename=geopandas-1.0.1-py3-none-any.whl size=322800 sha256=60310433d7b2bedda680eceb2507ab4c92f105d612e286975903fc9725467a99 Stored in directory: /builddir/.cache/pip/wheels/76/3a/00/7ae3278c84826b499d3bd99e0c7a96f611a083949a026f74dc Successfully built geopandas + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.AB03Il + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + '[' /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT '!=' / ']' + rm -rf /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT ++ dirname /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT + mkdir -p /builddir/build/BUILD/python-geopandas-1.0.1-build + mkdir /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd geopandas-1.0.1 ++ ls /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir/geopandas-1.0.1-py3-none-any.whl ++ xargs basename --multiple ++ sed -E 's/([^-]+)-([^-]+)-.+\.whl/\1==\2/' + specifier=geopandas==1.0.1 + '[' -z geopandas==1.0.1 ']' + TMPDIR=/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir + /usr/bin/python3 -m pip install --root /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT --prefix /usr --no-deps --disable-pip-version-check --progress-bar off --verbose --ignore-installed --no-warn-script-location --no-index --no-cache-dir --find-links /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir geopandas==1.0.1 Using pip 24.3.1 from /usr/lib/python3.13/site-packages/pip (python 3.13) Looking in links: /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/pyproject-wheeldir Processing ./pyproject-wheeldir/geopandas-1.0.1-py3-none-any.whl Installing collected packages: geopandas Successfully installed geopandas-1.0.1 + '[' -d /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/bin ']' + rm -f /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-ghost-distinfo + site_dirs=() + '[' -d /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages ']' + site_dirs+=("/usr/lib/python3.13/site-packages") + '[' /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib64/python3.13/site-packages '!=' /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages ']' + '[' -d /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib64/python3.13/site-packages ']' + for site_dir in ${site_dirs[@]} + for distinfo in /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT$site_dir/*.dist-info + echo '%ghost /usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info' + sed -i s/pip/rpm/ /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info/INSTALLER + PYTHONPATH=/usr/lib/rpm/redhat + /usr/bin/python3 -B /usr/lib/rpm/redhat/pyproject_preprocess_record.py --buildroot /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT --record /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info/RECORD --output /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-record + rm -fv /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info/RECORD removed '/builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info/RECORD' + rm -fv /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info/REQUESTED removed '/builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas-1.0.1.dist-info/REQUESTED' ++ wc -l /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-ghost-distinfo ++ cut -f1 '-d ' + lines=1 + '[' 1 -ne 1 ']' + RPM_FILES_ESCAPE=4.19 + /usr/bin/python3 /usr/lib/rpm/redhat/pyproject_save_files.py --output-files /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-files --output-modules /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-modules --buildroot /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT --sitelib /usr/lib/python3.13/site-packages --sitearch /usr/lib64/python3.13/site-packages --python-version 3.13 --pyproject-record /builddir/build/BUILD/python-geopandas-1.0.1-build/python-geopandas-1.0.1-2.fc42.s390x-pyproject-record --prefix /usr geopandas + /usr/bin/find-debuginfo -j3 --strict-build-id -m -i --build-id-seed 1.0.1-2.fc42 --unique-debug-suffix -1.0.1-2.fc42.s390x --unique-debug-src-base python-geopandas-1.0.1-2.fc42.s390x --run-dwz --dwz-low-mem-die-limit 10000000 --dwz-max-die-limit 50000000 -S debugsourcefiles.list /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1 find-debuginfo: starting Extracting debug info from 0 files Creating .debug symlinks for symlinks to ELF files find: ‘debug’: No such file or directory find-debuginfo: done + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/redhat/brp-ldconfig + /usr/lib/rpm/brp-compress + /usr/lib/rpm/redhat/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/redhat/brp-mangle-shebangs + /usr/lib/rpm/brp-remove-la-files + env /usr/lib/rpm/redhat/brp-python-bytecompile '' 1 0 -j3 Bytecompiling .py files below /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13 using python3.13 + /usr/lib/rpm/redhat/brp-python-hardlink + /usr/bin/add-determinism --brp -j3 /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/datasets/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/generate_legacy_storage_files.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_infer_schema.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_infer_schema.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_pickle.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_pickle.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_sql.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_arrow.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_sql.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_arrow.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_file_geom_types_drivers.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_file.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_geoarrow.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/_geoarrow.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_geoarrow.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/_pyarrow_hotfix.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/tests/__pycache__/test_file.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/_geoarrow.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/file.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/arrow.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/file.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/sql.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_api.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/io/__pycache__/util.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_api.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_config.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_config.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_crs.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_crs.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_datasets.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_datasets.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_decorator.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_decorator.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_compat.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_compat.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_array.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_dissolve.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_dissolve.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_array.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_explore.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_explore.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_op_output_types.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_op_output_types.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geodataframe.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_extension_array.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geocode.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geocode.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_extension_array.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_show_versions.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_show_versions.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_overlay.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_overlay.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_pandas_methods.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_sindex.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_testing.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geodataframe.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_testing.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_types.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_types.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/util.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/util.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_sindex.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_pandas_methods.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_plotting.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geom_methods.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geom_methods.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geoseries.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_merge.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_merge.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_geoseries.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_clip.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_hilbert_curve.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_hilbert_curve.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_clip.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_random.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_random.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tests/__pycache__/test_plotting.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_tools.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_tools.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/_random.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/_show_versions.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_sjoin.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/clip.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/tests/__pycache__/test_sjoin.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/geocoding.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/hilbert_curve.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/util.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/_version.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/_compat.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/overlay.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/tools/__pycache__/sjoin.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/_config.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/_decorator.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/explore.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/geodataframe.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/array.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/base.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/geoseries.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/conftest.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/sindex.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/plotting.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/testing.cpython-313.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/testing.cpython-313.opt-1.pyc: replacing with normalized version /builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages/geopandas/__pycache__/geoseries.cpython-313.opt-1.pyc: replacing with normalized version Scanned 21 directories and 219 files, processed 110 inodes, 110 modified (78 replaced + 32 rewritten), 0 unsupported format, 0 errors Reading /builddir/build/BUILD/python-geopandas-1.0.1-build/SPECPARTS/rpm-debuginfo.specpart Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.LC8QRL + umask 022 + cd /builddir/build/BUILD/python-geopandas-1.0.1-build + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Clink-arg=-specs=/usr/lib/rpm/redhat/redhat-package-notes --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd geopandas-1.0.1 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=z13 -mtune=z14 -fasynchronous-unwind-tables -fstack-clash-protection ' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes ' + PATH=/builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/sbin + PYTHONPATH=/builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib64/python3.13/site-packages:/builddir/build/BUILD/python-geopandas-1.0.1-build/BUILDROOT/usr/lib/python3.13/site-packages + PYTHONDONTWRITEBYTECODE=1 + PYTEST_ADDOPTS=' --ignore=/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/.pyproject-builddir' + PYTEST_XDIST_AUTO_NUM_WORKERS=3 + /usr/bin/pytest -ra geopandas -m 'not web' ============================= test session starts ============================== platform linux -- Python 3.13.0, pytest-8.3.3, pluggy-1.5.0 rootdir: /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1 configfile: pyproject.toml plugins: hypothesis-6.104.2, xdist-3.6.1 collected 2566 items / 8 deselected / 1 skipped / 2558 selected geopandas/io/tests/test_arrow.py .....................F.FF.F...F.F.FFFF. [ 1%] ..F............F..sF........Fs............FFFFFFFFFFFFFFFFFF.FFFF....... [ 4%] ................. [ 5%] geopandas/io/tests/test_file.py ........................................ [ 6%] ..ss....ssss....ss....ssss.............................................. [ 9%] .......................................................x...x..x...x..... [ 12%] ...................................................s.................... [ 15%] ..............s.........ss. [ 16%] geopandas/io/tests/test_file_geom_types_drivers.py ..................... [ 16%] ........................................................................ [ 19%] ................... [ 20%] geopandas/io/tests/test_geoarrow.py FFFFFFFFFFFF........................ [ 21%] .............ssssssssssss................sssssssssssss. [ 24%] geopandas/io/tests/test_infer_schema.py ....................... [ 24%] geopandas/io/tests/test_pickle.py sss. [ 25%] geopandas/io/tests/test_sql.py ssssssss..sssssssssssssssssssssssss [ 26%] geopandas/tests/test_api.py . [ 26%] geopandas/tests/test_array.py .......................................... [ 28%] ........................................................s.....ss..... [ 30%] geopandas/tests/test_compat.py ......... [ 31%] geopandas/tests/test_config.py ... [ 31%] geopandas/tests/test_crs.py ............................................ [ 32%] .............................. [ 34%] geopandas/tests/test_datasets.py .... [ 34%] geopandas/tests/test_decorator.py .... [ 34%] geopandas/tests/test_dissolve.py .....s............ [ 35%] geopandas/tests/test_extension_array.py ................................ [ 36%] ........................................................................ [ 39%] xx....................................................x................. [ 42%] .................................................................sssssss [ 44%] ssssssssssssss.......................................................... [ 47%] ..................ss............................................ss...... [ 50%] .....................................................ssssssssss......... [ 53%] .....................ss [ 54%] geopandas/tests/test_geocode.py ........ [ 54%] geopandas/tests/test_geodataframe.py ................................... [ 55%] ..................F..................................................... [ 58%] ............ [ 59%] geopandas/tests/test_geom_methods.py ................................... [ 60%] ........................................................................ [ 63%] ..............................sss.................. [ 65%] geopandas/tests/test_geoseries.py ...................................... [ 66%] ........s........................... [ 68%] geopandas/tests/test_merge.py ........ [ 68%] geopandas/tests/test_op_output_types.py .....x.......................... [ 69%] . [ 69%] geopandas/tests/test_overlay.py ........................s.............s. [ 71%] ........................................................................ [ 74%] ............... [ 74%] geopandas/tests/test_pandas_methods.py ..............................XX. [ 76%] ................. [ 76%] geopandas/tests/test_plotting.py ....................................... [ 78%] ................s.s...................................sssssssssss.s... [ 81%] geopandas/tests/test_show_versions.py .... [ 81%] geopandas/tests/test_sindex.py .....s...........s....................... [ 82%] .................................s.......x.............................. [ 85%] .........x........................................... [ 87%] geopandas/tests/test_testing.py ........ [ 87%] geopandas/tests/test_types.py ............... [ 88%] geopandas/tools/tests/test_clip.py ..................................... [ 90%] .................................................... [ 92%] geopandas/tools/tests/test_hilbert_curve.py ...... [ 92%] geopandas/tools/tests/test_random.py .......... [ 92%] geopandas/tools/tests/test_sjoin.py .................................... [ 94%] ........................................................................ [ 96%] ..............xs....................................................... [ 99%] geopandas/tools/tests/test_tools.py ........ [100%] =================================== FAILURES =================================== _____________ test_pandas_parquet_roundtrip2[naturalearth_lowres] ______________ test_dataset = 'naturalearth_lowres' tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_pandas_parquet_roundtrip20') request = > @pytest.mark.parametrize( "test_dataset", ["naturalearth_lowres", "naturalearth_cities", "nybb_filename"] ) def test_pandas_parquet_roundtrip2(test_dataset, tmpdir, request): path = request.getfixturevalue(test_dataset) df = DataFrame(read_file(path).drop(columns=["geometry"])) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename) pq_df = pd_read_parquet(filename) > assert_frame_equal(df, pq_df) geopandas/io/tests/test_arrow.py:387: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: DataFrame.iloc[:, 0] (column name="pop_est") are different E E DataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError ________________ test_pandas_parquet_roundtrip2[nybb_filename] _________________ test_dataset = 'nybb_filename' tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_pandas_parquet_roundtrip22') request = > @pytest.mark.parametrize( "test_dataset", ["naturalearth_lowres", "naturalearth_cities", "nybb_filename"] ) def test_pandas_parquet_roundtrip2(test_dataset, tmpdir, request): path = request.getfixturevalue(test_dataset) df = DataFrame(read_file(path).drop(columns=["geometry"])) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename) pq_df = pd_read_parquet(filename) > assert_frame_equal(df, pq_df) geopandas/io/tests/test_arrow.py:387: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: DataFrame.iloc[:, 2] (column name="Shape_Leng") are different E E DataFrame.iloc[:, 2] (column name="Shape_Leng") values are different (100.0 %) E [index]: [0, 1, 2, 3, 4] E [left]: [330470.010332, 896344.047763, 741080.523166, 359299.096471, 464392.991824] E [right]: [nan, nan, nan, nan, nan] E At positional index 0, first diff: 330470.010332 != nan testing.pyx:173: AssertionError _________________ test_roundtrip[parquet-naturalearth_lowres] __________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_roundtrip_parquet_natural0') file_format = (, ) test_dataset = 'naturalearth_lowres' request = > @pytest.mark.parametrize( "test_dataset", ["naturalearth_lowres", "naturalearth_cities", "nybb_filename"] ) def test_roundtrip(tmpdir, file_format, test_dataset, request): """Writing to parquet should not raise errors, and should not alter original GeoDataFrame """ path = request.getfixturevalue(test_dataset) reader, writer = file_format df = read_file(path) orig = df.copy() filename = os.path.join(str(tmpdir), "test.pq") writer(df, filename) assert os.path.exists(filename) # make sure that the original data frame is unaltered assert_geodataframe_equal(df, orig) # make sure that we can roundtrip the data frame pq_df = reader(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:416: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError ____________________ test_roundtrip[parquet-nybb_filename] _____________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_roundtrip_parquet_nybb_fi0') file_format = (, ) test_dataset = 'nybb_filename' request = > @pytest.mark.parametrize( "test_dataset", ["naturalearth_lowres", "naturalearth_cities", "nybb_filename"] ) def test_roundtrip(tmpdir, file_format, test_dataset, request): """Writing to parquet should not raise errors, and should not alter original GeoDataFrame """ path = request.getfixturevalue(test_dataset) reader, writer = file_format df = read_file(path) orig = df.copy() filename = os.path.join(str(tmpdir), "test.pq") writer(df, filename) assert os.path.exists(filename) # make sure that the original data frame is unaltered assert_geodataframe_equal(df, orig) # make sure that we can roundtrip the data frame pq_df = reader(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:416: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 2] (column name="Shape_Leng") are different E E GeoDataFrame.iloc[:, 2] (column name="Shape_Leng") values are different (100.0 %) E [index]: [0, 1, 2, 3, 4] E [left]: [330470.010332, 896344.047763, 741080.523166, 359299.096471, 464392.991824] E [right]: [nan, nan, nan, nan, nan] E At positional index 0, first diff: 330470.010332 != nan testing.pyx:173: AssertionError _____________________________ test_index[parquet] ______________________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_index_parquet_0') file_format = (, ) naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_index(tmpdir, file_format, naturalearth_lowres): """Setting index=`True` should preserve index in output, and setting index=`False` should drop index from output. """ reader, writer = file_format df = read_file(naturalearth_lowres).set_index("iso_a3") filename = os.path.join(str(tmpdir), "test_with_index.pq") writer(df, filename, index=True) pq_df = reader(filename) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:430: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [FJI, TZA, ESH, CAN, USA, KAZ, UZB, PNG, IDN, ARG, CHL, COD, SOM, KEN, SDN, TCD, HTI, DOM, RUS, BHS, FLK, NOR, GRL, ATF, TLS, ZAF, LSO, MEX, URY, BRA, BOL, PER, COL, PAN, CRI, NIC, HND, SLV, GTM, BLZ, VEN, GUY, SUR, FRA, ECU, PRI, JAM, CUB, ZWE, BWA, NAM, SEN, MLI, MRT, BEN, NER, NGA, CMR, TGO, GHA, CIV, GIN, GNB, LBR, SLE, BFA, CAF, COG, GAB, GNQ, ZMB, MWI, MOZ, SWZ, AGO, BDI, ISR, LBN, MDG, PSE, GMB, TUN, DZA, JOR, ARE, QAT, KWT, IRQ, OMN, VUT, KHM, THA, LAO, MMR, VNM, PRK, KOR, MNG, IND, BGD, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError __________________________ test_column_order[parquet] __________________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_column_order_parquet_0') file_format = (, ) naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_column_order(tmpdir, file_format, naturalearth_lowres): """The order of columns should be preserved in the output.""" reader, writer = file_format df = read_file(naturalearth_lowres) df = df.set_index("iso_a3") df["geom2"] = df.geometry.representative_point() table = _geopandas_to_arrow(df) custom_column_order = [ "iso_a3", "geom2", "pop_est", "continent", "name", "geometry", "gdp_md_est", ] table = table.select(custom_column_order) if reader is read_parquet: filename = os.path.join(str(tmpdir), "test_column_order.pq") pq.write_table(table, filename) else: filename = os.path.join(str(tmpdir), "test_column_order.feather") feather.write_feather(table, filename) result = reader(filename) assert list(result.columns) == custom_column_order[1:] > assert_geodataframe_equal(result, df[custom_column_order[1:]]) geopandas/io/tests/test_arrow.py:466: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [FJI, TZA, ESH, CAN, USA, KAZ, UZB, PNG, IDN, ARG, CHL, COD, SOM, KEN, SDN, TCD, HTI, DOM, RUS, BHS, FLK, NOR, GRL, ATF, TLS, ZAF, LSO, MEX, URY, BRA, BOL, PER, COL, PAN, CRI, NIC, HND, SLV, GTM, BLZ, VEN, GUY, SUR, FRA, ECU, PRI, JAM, CUB, ZWE, BWA, NAM, SEN, MLI, MRT, BEN, NER, NGA, CMR, TGO, GHA, CIV, GIN, GNB, LBR, SLE, BFA, CAF, COG, GAB, GNQ, ZMB, MWI, MOZ, SWZ, AGO, BDI, ISR, LBN, MDG, PSE, GMB, TUN, DZA, JOR, ARE, QAT, KWT, IRQ, OMN, VUT, KHM, THA, LAO, MMR, VNM, PRK, KOR, MNG, IND, BGD, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: nan != 619896.0 testing.pyx:173: AssertionError _______________________ test_parquet_compression[snappy] _______________________ compression = 'snappy' tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_compression_snapp0') naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' @pytest.mark.parametrize("compression", ["snappy", "gzip", "brotli", None]) def test_parquet_compression(compression, tmpdir, naturalearth_lowres): """Using compression options should not raise errors, and should return identical GeoDataFrame. """ df = read_file(naturalearth_lowres) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, compression=compression) pq_df = read_parquet(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:482: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError ________________________ test_parquet_compression[gzip] ________________________ compression = 'gzip' tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_compression_gzip_0') naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' @pytest.mark.parametrize("compression", ["snappy", "gzip", "brotli", None]) def test_parquet_compression(compression, tmpdir, naturalearth_lowres): """Using compression options should not raise errors, and should return identical GeoDataFrame. """ df = read_file(naturalearth_lowres) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, compression=compression) pq_df = read_parquet(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:482: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError _______________________ test_parquet_compression[brotli] _______________________ compression = 'brotli' tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_compression_brotl0') naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' @pytest.mark.parametrize("compression", ["snappy", "gzip", "brotli", None]) def test_parquet_compression(compression, tmpdir, naturalearth_lowres): """Using compression options should not raise errors, and should return identical GeoDataFrame. """ df = read_file(naturalearth_lowres) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, compression=compression) pq_df = read_parquet(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:482: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError ________________________ test_parquet_compression[None] ________________________ compression = None tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_compression_None_0') naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' @pytest.mark.parametrize("compression", ["snappy", "gzip", "brotli", None]) def test_parquet_compression(compression, tmpdir, naturalearth_lowres): """Using compression options should not raise errors, and should return identical GeoDataFrame. """ df = read_file(naturalearth_lowres) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, compression=compression) pq_df = read_parquet(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:482: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError ___________________ test_parquet_multiple_geom_cols[parquet] ___________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_multiple_geom_col0') file_format = (, ) naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_parquet_multiple_geom_cols(tmpdir, file_format, naturalearth_lowres): """If multiple geometry columns are present when written to parquet, they should all be returned as such when read from parquet. """ reader, writer = file_format df = read_file(naturalearth_lowres) df["geom2"] = df.geometry.copy() filename = os.path.join(str(tmpdir), "test.pq") writer(df, filename) assert os.path.exists(filename) pq_df = reader(filename) assert isinstance(pq_df, GeoDataFrame) > assert_geodataframe_equal(df, pq_df) geopandas/io/tests/test_arrow.py:522: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError __________________________ test_missing_crs[parquet] ___________________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_missing_crs_parquet_0') file_format = (, ) naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_missing_crs(tmpdir, file_format, naturalearth_lowres): """If CRS is `None`, it should be properly handled and remain `None` when read from parquet`. """ reader, writer = file_format df = read_file(naturalearth_lowres) df.geometry.array.crs = None filename = os.path.join(str(tmpdir), "test.pq") writer(df, filename) pq_df = reader(filename) assert pq_df.crs is None > assert_geodataframe_equal(df, pq_df, check_crs=True) geopandas/io/tests/test_arrow.py:691: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: 619896.0 != nan testing.pyx:173: AssertionError _______________________________ test_fsspec_url ________________________________ naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_fsspec_url(naturalearth_lowres): fsspec = pytest.importorskip("fsspec") import fsspec.implementations.memory class MyMemoryFileSystem(fsspec.implementations.memory.MemoryFileSystem): # Simple fsspec filesystem that adds a required keyword. # Attempting to use this filesystem without the keyword will raise an exception. def __init__(self, is_set, *args, **kwargs): self.is_set = is_set super().__init__(*args, **kwargs) fsspec.register_implementation("memory", MyMemoryFileSystem, clobber=True) memfs = MyMemoryFileSystem(is_set=True) df = read_file(naturalearth_lowres) with memfs.open("data.parquet", "wb") as f: df.to_parquet(f) result = read_parquet("memory://data.parquet", storage_options={"is_set": True}) > assert_geodataframe_equal(result, df) geopandas/io/tests/test_arrow.py:737: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (8.47458 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 128, first diff: nan != 619896.0 testing.pyx:173: AssertionError ______________________________ test_write_iso_wkb ______________________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_write_iso_wkb0') @pytest.mark.skipif(shapely.geos_version < (3, 10, 0), reason="requires GEOS>=3.10") def test_write_iso_wkb(tmpdir): gdf = geopandas.GeoDataFrame( geometry=geopandas.GeoSeries.from_wkt(["POINT Z (1 2 3)"]) ) gdf.to_parquet(tmpdir / "test.parquet") from pyarrow.parquet import read_table table = read_table(tmpdir / "test.parquet") wkb = table["geometry"][0].as_py().hex() # correct ISO flavor > assert wkb == "01e9030000000000000000f03f00000000000000400000000000000840" E AssertionError: assert '00000003e93f...8000000000000' == '01e903000000...0000000000840' E E - 01e9030000000000000000f03f00000000000000400000000000000840 E + 00000003e93ff000000000000040000000000000004008000000000000 geopandas/io/tests/test_arrow.py:833: AssertionError _______________________ test_read_versioned_file[0.1.0] ________________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_v0.1.0.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_v0.1.0.parquet': Parquet file size is 6951 bytes, smaller than the size reported by footer's (3826581504bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: version = '0.1.0' @pytest.mark.parametrize("version", ["0.1.0", "0.4.0", "1.0.0-beta.1"]) def test_read_versioned_file(version): """ Verify that files for different metadata spec versions can be read created for each supported version: # small dummy test dataset (not naturalearth_lowres, as this can change over time) from shapely.geometry import box, MultiPolygon df = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5,5)], crs="EPSG:4326", ) df.to_feather(DATA_PATH / 'arrow' / f'test_data_v{METADATA_VERSION}.feather') df.to_parquet(DATA_PATH / 'arrow' / f'test_data_v{METADATA_VERSION}.parquet') """ expected = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5, 5)], crs="EPSG:4326", ) df = geopandas.read_feather(DATA_PATH / "arrow" / f"test_data_v{version}.feather") assert_geodataframe_equal(df, expected, check_crs=True) > df = geopandas.read_parquet(DATA_PATH / "arrow" / f"test_data_v{version}.parquet") geopandas/io/tests/test_arrow.py:915: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 6951 bytes, smaller than the size reported by footer's (3826581504bytes) pyarrow/error.pxi:91: ArrowInvalid _______________________ test_read_versioned_file[0.4.0] ________________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_v0.4.0.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_v0.4.0.parquet': Parquet file size is 6803 bytes, smaller than the size reported by footer's (1343553536bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: version = '0.4.0' @pytest.mark.parametrize("version", ["0.1.0", "0.4.0", "1.0.0-beta.1"]) def test_read_versioned_file(version): """ Verify that files for different metadata spec versions can be read created for each supported version: # small dummy test dataset (not naturalearth_lowres, as this can change over time) from shapely.geometry import box, MultiPolygon df = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5,5)], crs="EPSG:4326", ) df.to_feather(DATA_PATH / 'arrow' / f'test_data_v{METADATA_VERSION}.feather') df.to_parquet(DATA_PATH / 'arrow' / f'test_data_v{METADATA_VERSION}.parquet') """ expected = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5, 5)], crs="EPSG:4326", ) df = geopandas.read_feather(DATA_PATH / "arrow" / f"test_data_v{version}.feather") assert_geodataframe_equal(df, expected, check_crs=True) > df = geopandas.read_parquet(DATA_PATH / "arrow" / f"test_data_v{version}.parquet") geopandas/io/tests/test_arrow.py:915: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 6803 bytes, smaller than the size reported by footer's (1343553536bytes) pyarrow/error.pxi:91: ArrowInvalid ____________________ test_read_versioned_file[1.0.0-beta.1] ____________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_v1.0.0-beta.1.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_v1.0.0-beta.1.parquet': Parquet file size is 7755 bytes, smaller than the size reported by footer's (135856128bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: version = '1.0.0-beta.1' @pytest.mark.parametrize("version", ["0.1.0", "0.4.0", "1.0.0-beta.1"]) def test_read_versioned_file(version): """ Verify that files for different metadata spec versions can be read created for each supported version: # small dummy test dataset (not naturalearth_lowres, as this can change over time) from shapely.geometry import box, MultiPolygon df = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5,5)], crs="EPSG:4326", ) df.to_feather(DATA_PATH / 'arrow' / f'test_data_v{METADATA_VERSION}.feather') df.to_parquet(DATA_PATH / 'arrow' / f'test_data_v{METADATA_VERSION}.parquet') """ expected = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5, 5)], crs="EPSG:4326", ) df = geopandas.read_feather(DATA_PATH / "arrow" / f"test_data_v{version}.feather") assert_geodataframe_equal(df, expected, check_crs=True) > df = geopandas.read_parquet(DATA_PATH / "arrow" / f"test_data_v{version}.parquet") geopandas/io/tests/test_arrow.py:915: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 7755 bytes, smaller than the size reported by footer's (135856128bytes) pyarrow/error.pxi:91: ArrowInvalid _____________________________ test_read_gdal_files _____________________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_gdal350.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/test_data_gdal350.parquet': Parquet file size is 3584 bytes, smaller than the size reported by footer's (3171418112bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: def test_read_gdal_files(): """ Verify that files written by GDAL can be read by geopandas. Since it is currently not yet straightforward to install GDAL with Parquet/Arrow enabled in our conda setup, we are testing with some generated files included in the repo (using GDAL 3.5.0): # small dummy test dataset (not naturalearth_lowres, as this can change over time) from shapely.geometry import box, MultiPolygon df = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5,5)], crs="EPSG:4326", ) df.to_file("test_data.gpkg", GEOMETRY_NAME="geometry") and then the gpkg file is converted to Parquet/Arrow with: $ ogr2ogr -f Parquet -lco FID= test_data_gdal350.parquet test_data.gpkg $ ogr2ogr -f Arrow -lco FID= -lco GEOMETRY_ENCODING=WKB test_data_gdal350.arrow test_data.gpkg Repeated for GDAL 3.9 which adds a bbox covering column: $ ogr2ogr -f Parquet -lco FID= test_data_gdal390.parquet test_data.gpkg """ # noqa: E501 pytest.importorskip("pyproj") expected = geopandas.GeoDataFrame( {"col_str": ["a", "b"], "col_int": [1, 2], "col_float": [0.1, 0.2]}, geometry=[MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3)]), box(4, 4, 5, 5)], crs="EPSG:4326", ) > df = geopandas.read_parquet(DATA_PATH / "arrow" / "test_data_gdal350.parquet") geopandas/io/tests/test_arrow.py:948: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 3584 bytes, smaller than the size reported by footer's (3171418112bytes) pyarrow/error.pxi:91: ArrowInvalid ____________________ test_parquet_read_partitioned_dataset _____________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_read_partitioned_0') naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_parquet_read_partitioned_dataset(tmpdir, naturalearth_lowres): # we don't yet explicitly support this (in writing), but for Parquet it # works for reading (by relying on pyarrow.read_table) df = read_file(naturalearth_lowres) # manually create partitioned dataset basedir = tmpdir / "partitioned_dataset" basedir.mkdir() df[:100].to_parquet(basedir / "data1.parquet") df[100:].to_parquet(basedir / "data2.parquet") result = read_parquet(basedir) > assert_geodataframe_equal(result, df) geopandas/io/tests/test_arrow.py:978: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/testing.py:351: in assert_geodataframe_equal assert_frame_equal( testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: GeoDataFrame.iloc[:, 0] (column name="pop_est") are different E E GeoDataFrame.iloc[:, 0] (column name="pop_est") values are different (23.16384 %) E [index]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ...] E [left]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, 4974986.0, 299882.0, 16486542.0, 69625582.0, nan, nan, nan, nan, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E [right]: [889953.0, 58005463.0, 603253.0, 37589262.0, 328239523.0, 18513930.0, 33580650.0, 8776109.0, 270625568.0, 44938712.0, 18952038.0, 86790567.0, 10192317.3, 52573973.0, 42813238.0, 15946876.0, 11263077.0, 10738958.0, 144373535.0, 389482.0, 3398.0, 5347896.0, 56225.0, 140.0, 1293119.0, 58558270.0, 2125268.0, 127575529.0, 3461734.0, 211049527.0, 11513100.0, 32510453.0, 50339443.0, 4246439.0, 5047561.0, 6545502.0, 9746117.0, 6453553.0, 16604026.0, 390353.0, 28515829.0, 782766.0, 581363.0, 67059887.0, 17373662.0, 3193694.0, 2948279.0, 11333483.0, 14645468.0, 2303697.0, 2494530.0, 16296364.0, 19658031.0, 4525696.0, 11801151.0, 23310715.0, 200963599.0, 25876380.0, 8082366.0, 30417856.0, 25716544.0, 12771246.0, 1920922.0, 4937374.0, 7813215.0, 20321378.0, 4745185.0, 5380508.0, 2172579.0, 1355986.0, 17861030.0, 18628747.0, 30366036.0, 1148130.0, 31825295.0, 11530580.0, 9053300.0, 6855713.0, 26969307.0, 4685306.0, 2347706.0, 11694719.0, 43053054.0, 10101694.0, 9770529.0, 2832067.0, 4207083.0, 39309783.0, 4974986.0, 299882.0, 16486542.0, 69625582.0, 7169455.0, 54045420.0, 96462106.0, 25666161.0, 51709098.0, 3225167.0, 1366417754.0, 163046161.0, ...] E At positional index 64, first diff: nan != 7813215.0 testing.pyx:173: AssertionError _________________ test_parquet_read_partitioned_dataset_fsspec _________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_parquet_read_partitioned_1') naturalearth_lowres = '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/data/naturalearth_lowres/naturalearth_lowres.shp' def test_parquet_read_partitioned_dataset_fsspec(tmpdir, naturalearth_lowres): fsspec = pytest.importorskip("fsspec") df = read_file(naturalearth_lowres) # manually create partitioned dataset > memfs = fsspec.filesystem("memory") geopandas/io/tests/test_arrow.py:987: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib/python3.13/site-packages/fsspec/registry.py:303: in filesystem return cls(**storage_options) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ cls = .MyMemoryFileSystem'> args = (), kwargs = {}, extra_tokens = () token = 'd141fd632814284963f634b9da42d306', skip = False def __call__(cls, *args, **kwargs): kwargs = apply_config(cls, kwargs) extra_tokens = tuple( getattr(cls, attr, None) for attr in cls._extra_tokenize_attributes ) token = tokenize( cls, cls._pid, threading.get_ident(), *args, *extra_tokens, **kwargs ) skip = kwargs.pop("skip_instance_cache", False) if os.getpid() != cls._pid: cls._cache.clear() cls._pid = os.getpid() if not skip and cls.cachable and token in cls._cache: cls._latest = token return cls._cache[token] else: > obj = super().__call__(*args, **kwargs) E TypeError: test_fsspec_url..MyMemoryFileSystem.__init__() missing 1 required positional argument: 'is_set' /usr/lib/python3.13/site-packages/fsspec/spec.py:81: TypeError ______________________ test_read_parquet_geoarrow[point] _______________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-point-encoding_native.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-point-encoding_native.parquet': Parquet file size is 1835 bytes, smaller than the size reported by footer's (2869166080bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: geometry_type = 'point' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_read_parquet_geoarrow(geometry_type): > result = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_native.parquet" ) geopandas/io/tests/test_arrow.py:1003: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 1835 bytes, smaller than the size reported by footer's (2869166080bytes) pyarrow/error.pxi:91: ArrowInvalid ____________________ test_read_parquet_geoarrow[linestring] ____________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-linestring-encoding_native.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-linestring-encoding_native.parquet': Parquet file size is 2023 bytes, smaller than the size reported by footer's (1124401152bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: geometry_type = 'linestring' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_read_parquet_geoarrow(geometry_type): > result = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_native.parquet" ) geopandas/io/tests/test_arrow.py:1003: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 2023 bytes, smaller than the size reported by footer's (1124401152bytes) pyarrow/error.pxi:91: ArrowInvalid _____________________ test_read_parquet_geoarrow[polygon] ______________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-polygon-encoding_native.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-polygon-encoding_native.parquet': Parquet file size is 2197 bytes, smaller than the size reported by footer's (2718236672bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: geometry_type = 'polygon' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_read_parquet_geoarrow(geometry_type): > result = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_native.parquet" ) geopandas/io/tests/test_arrow.py:1003: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 2197 bytes, smaller than the size reported by footer's (2718236672bytes) pyarrow/error.pxi:91: ArrowInvalid ____________________ test_read_parquet_geoarrow[multipoint] ____________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipoint-encoding_native.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipoint-encoding_native.parquet': Parquet file size is 2027 bytes, smaller than the size reported by footer's (990183424bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: geometry_type = 'multipoint' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_read_parquet_geoarrow(geometry_type): > result = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_native.parquet" ) geopandas/io/tests/test_arrow.py:1003: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 2027 bytes, smaller than the size reported by footer's (990183424bytes) pyarrow/error.pxi:91: ArrowInvalid _________________ test_read_parquet_geoarrow[multilinestring] __________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multilinestring-encoding_native.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multilinestring-encoding_native.parquet': Parquet file size is 2218 bytes, smaller than the size reported by footer's (3523543040bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: geometry_type = 'multilinestring' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_read_parquet_geoarrow(geometry_type): > result = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_native.parquet" ) geopandas/io/tests/test_arrow.py:1003: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 2218 bytes, smaller than the size reported by footer's (3523543040bytes) pyarrow/error.pxi:91: ArrowInvalid ___________________ test_read_parquet_geoarrow[multipolygon] ___________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipolygon-encoding_native.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipolygon-encoding_native.parquet': Parquet file size is 2421 bytes, smaller than the size reported by footer's (956694528bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: geometry_type = 'multipolygon' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_read_parquet_geoarrow(geometry_type): > result = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_native.parquet" ) geopandas/io/tests/test_arrow.py:1003: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 2421 bytes, smaller than the size reported by footer's (956694528bytes) pyarrow/error.pxi:91: ArrowInvalid ________________________ test_geoarrow_roundtrip[point] ________________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-point-encoding_wkb.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-point-encoding_wkb.parquet': Parquet file size is 1398 bytes, smaller than the size reported by footer's (2818768896bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: tmp_path = PosixPath('/tmp/pytest-of-mockbuild/pytest-0/test_geoarrow_roundtrip_point_0') geometry_type = 'point' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_geoarrow_roundtrip(tmp_path, geometry_type): > df = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_wkb.parquet" ) geopandas/io/tests/test_arrow.py:1024: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 1398 bytes, smaller than the size reported by footer's (2818768896bytes) pyarrow/error.pxi:91: ArrowInvalid _____________________ test_geoarrow_roundtrip[linestring] ______________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-linestring-encoding_wkb.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-linestring-encoding_wkb.parquet': Parquet file size is 1474 bytes, smaller than the size reported by footer's (3322085376bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: tmp_path = PosixPath('/tmp/pytest-of-mockbuild/pytest-0/test_geoarrow_roundtrip_linest0') geometry_type = 'linestring' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_geoarrow_roundtrip(tmp_path, geometry_type): > df = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_wkb.parquet" ) geopandas/io/tests/test_arrow.py:1024: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 1474 bytes, smaller than the size reported by footer's (3322085376bytes) pyarrow/error.pxi:91: ArrowInvalid _______________________ test_geoarrow_roundtrip[polygon] _______________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-polygon-encoding_wkb.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-polygon-encoding_wkb.parquet': Parquet file size is 1861 bytes, smaller than the size reported by footer's (721682432bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: tmp_path = PosixPath('/tmp/pytest-of-mockbuild/pytest-0/test_geoarrow_roundtrip_polygo0') geometry_type = 'polygon' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_geoarrow_roundtrip(tmp_path, geometry_type): > df = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_wkb.parquet" ) geopandas/io/tests/test_arrow.py:1024: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 1861 bytes, smaller than the size reported by footer's (721682432bytes) pyarrow/error.pxi:91: ArrowInvalid _____________________ test_geoarrow_roundtrip[multipoint] ______________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipoint-encoding_wkb.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipoint-encoding_wkb.parquet': Parquet file size is 1622 bytes, smaller than the size reported by footer's (3926065152bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: tmp_path = PosixPath('/tmp/pytest-of-mockbuild/pytest-0/test_geoarrow_roundtrip_multip0') geometry_type = 'multipoint' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_geoarrow_roundtrip(tmp_path, geometry_type): > df = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_wkb.parquet" ) geopandas/io/tests/test_arrow.py:1024: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 1622 bytes, smaller than the size reported by footer's (3926065152bytes) pyarrow/error.pxi:91: ArrowInvalid ___________________ test_geoarrow_roundtrip[multilinestring] ___________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multilinestring-encoding_wkb.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multilinestring-encoding_wkb.parquet': Parquet file size is 1805 bytes, smaller than the size reported by footer's (704905216bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: tmp_path = PosixPath('/tmp/pytest-of-mockbuild/pytest-0/test_geoarrow_roundtrip_multil0') geometry_type = 'multilinestring' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_geoarrow_roundtrip(tmp_path, geometry_type): > df = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_wkb.parquet" ) geopandas/io/tests/test_arrow.py:1024: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 1805 bytes, smaller than the size reported by footer's (704905216bytes) pyarrow/error.pxi:91: ArrowInvalid ____________________ test_geoarrow_roundtrip[multipolygon] _____________________ path = PosixPath('/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipolygon-encoding_wkb.parquet') filesystem = None def _read_parquet_schema_and_metadata(path, filesystem): """ Opening the Parquet file/dataset a first time to get the schema and metadata. TODO: we should look into how we can reuse opened dataset for reading the actual data, to avoid discovering the dataset twice (problem right now is that the ParquetDataset interface doesn't allow passing the filters on read) """ import pyarrow from pyarrow import parquet kwargs = {} if Version(pyarrow.__version__) < Version("15.0.0"): kwargs = dict(use_legacy_dataset=False) try: > schema = parquet.ParquetDataset(path, filesystem=filesystem, **kwargs).schema geopandas/io/arrow.py:653: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:1329: in __init__ [fragment], schema=schema or fragment.physical_schema, pyarrow/_dataset.pyx:1431: in pyarrow._dataset.Fragment.physical_schema.__get__ ??? pyarrow/error.pxi:154: in pyarrow.lib.pyarrow_internal_check_status ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Could not open Parquet input source '/builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/data/arrow/geoparquet/data-multipolygon-encoding_wkb.parquet': Parquet file size is 2276 bytes, smaller than the size reported by footer's (2734948352bytes) pyarrow/error.pxi:91: ArrowInvalid During handling of the above exception, another exception occurred: tmp_path = PosixPath('/tmp/pytest-of-mockbuild/pytest-0/test_geoarrow_roundtrip_multip1') geometry_type = 'multipolygon' @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) def test_geoarrow_roundtrip(tmp_path, geometry_type): > df = geopandas.read_parquet( DATA_PATH / "arrow" / "geoparquet" / f"data-{geometry_type}-encoding_wkb.parquet" ) geopandas/io/tests/test_arrow.py:1024: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ geopandas/io/arrow.py:751: in _read_parquet schema, metadata = _read_parquet_schema_and_metadata(path, filesystem) geopandas/io/arrow.py:655: in _read_parquet_schema_and_metadata schema = parquet.read_schema(path, filesystem=filesystem) /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:2311: in read_schema file = ParquetFile( /usr/lib64/python3.13/site-packages/pyarrow/parquet/core.py:317: in __init__ self.reader.open( pyarrow/_parquet.pyx:1480: in pyarrow._parquet.ParquetReader.open ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E pyarrow.lib.ArrowInvalid: Parquet file size is 2276 bytes, smaller than the size reported by footer's (2734948352bytes) pyarrow/error.pxi:91: ArrowInvalid ______________________ test_to_parquet_bbox_values[Point] ______________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_to_parquet_bbox_values_Po0') geometry = expected_bbox = {'xmax': 1.0, 'xmin': 1.0, 'ymax': 3.0, 'ymin': 3.0} @pytest.mark.parametrize( "geometry, expected_bbox", [ (Point(1, 3), {"xmin": 1.0, "ymin": 3.0, "xmax": 1.0, "ymax": 3.0}), ( LineString([(1, 1), (3, 3)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( Polygon([(2, 1), (1, 2), (2, 3), (3, 2)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3), box(4, 4, 5, 5)]), {"xmin": 0.0, "ymin": 0.0, "xmax": 5.0, "ymax": 5.0}, ), ], ids=["Point", "LineString", "Polygon", "Multipolygon"], ) def test_to_parquet_bbox_values(tmpdir, geometry, expected_bbox): # check bbox bounds being written for different geometry types. import pyarrow.parquet as pq df = GeoDataFrame(data=[[1, 2]], columns=["a", "b"], geometry=[geometry]) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, write_covering_bbox=True) result = pq.read_table(filename).to_pandas() > assert result["bbox"][0] == expected_bbox E AssertionError: assert {'xmax': None... 'ymin': None} == {'xmax': 1.0,..., 'ymin': 3.0} E E Differing items: E {'xmin': None} != {'xmin': 1.0} E {'xmax': None} != {'xmax': 1.0} E {'ymin': None} != {'ymin': 3.0} E {'ymax': None} != {'ymax': 3.0} E Use -v to get more diff geopandas/io/tests/test_arrow.py:1092: AssertionError ___________________ test_to_parquet_bbox_values[LineString] ____________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_to_parquet_bbox_values_Li0') geometry = expected_bbox = {'xmax': 3.0, 'xmin': 1.0, 'ymax': 3.0, 'ymin': 1.0} @pytest.mark.parametrize( "geometry, expected_bbox", [ (Point(1, 3), {"xmin": 1.0, "ymin": 3.0, "xmax": 1.0, "ymax": 3.0}), ( LineString([(1, 1), (3, 3)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( Polygon([(2, 1), (1, 2), (2, 3), (3, 2)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3), box(4, 4, 5, 5)]), {"xmin": 0.0, "ymin": 0.0, "xmax": 5.0, "ymax": 5.0}, ), ], ids=["Point", "LineString", "Polygon", "Multipolygon"], ) def test_to_parquet_bbox_values(tmpdir, geometry, expected_bbox): # check bbox bounds being written for different geometry types. import pyarrow.parquet as pq df = GeoDataFrame(data=[[1, 2]], columns=["a", "b"], geometry=[geometry]) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, write_covering_bbox=True) result = pq.read_table(filename).to_pandas() > assert result["bbox"][0] == expected_bbox E AssertionError: assert {'xmax': None... 'ymin': None} == {'xmax': 3.0,..., 'ymin': 1.0} E E Differing items: E {'xmin': None} != {'xmin': 1.0} E {'xmax': None} != {'xmax': 3.0} E {'ymin': None} != {'ymin': 1.0} E {'ymax': None} != {'ymax': 3.0} E Use -v to get more diff geopandas/io/tests/test_arrow.py:1092: AssertionError _____________________ test_to_parquet_bbox_values[Polygon] _____________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_to_parquet_bbox_values_Po1') geometry = expected_bbox = {'xmax': 3.0, 'xmin': 1.0, 'ymax': 3.0, 'ymin': 1.0} @pytest.mark.parametrize( "geometry, expected_bbox", [ (Point(1, 3), {"xmin": 1.0, "ymin": 3.0, "xmax": 1.0, "ymax": 3.0}), ( LineString([(1, 1), (3, 3)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( Polygon([(2, 1), (1, 2), (2, 3), (3, 2)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3), box(4, 4, 5, 5)]), {"xmin": 0.0, "ymin": 0.0, "xmax": 5.0, "ymax": 5.0}, ), ], ids=["Point", "LineString", "Polygon", "Multipolygon"], ) def test_to_parquet_bbox_values(tmpdir, geometry, expected_bbox): # check bbox bounds being written for different geometry types. import pyarrow.parquet as pq df = GeoDataFrame(data=[[1, 2]], columns=["a", "b"], geometry=[geometry]) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, write_covering_bbox=True) result = pq.read_table(filename).to_pandas() > assert result["bbox"][0] == expected_bbox E AssertionError: assert {'xmax': None... 'ymin': None} == {'xmax': 3.0,..., 'ymin': 1.0} E E Differing items: E {'xmin': None} != {'xmin': 1.0} E {'xmax': None} != {'xmax': 3.0} E {'ymin': None} != {'ymin': 1.0} E {'ymax': None} != {'ymax': 3.0} E Use -v to get more diff geopandas/io/tests/test_arrow.py:1092: AssertionError __________________ test_to_parquet_bbox_values[Multipolygon] ___________________ tmpdir = local('/tmp/pytest-of-mockbuild/pytest-0/test_to_parquet_bbox_values_Mu0') geometry = expected_bbox = {'xmax': 5.0, 'xmin': 0.0, 'ymax': 5.0, 'ymin': 0.0} @pytest.mark.parametrize( "geometry, expected_bbox", [ (Point(1, 3), {"xmin": 1.0, "ymin": 3.0, "xmax": 1.0, "ymax": 3.0}), ( LineString([(1, 1), (3, 3)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( Polygon([(2, 1), (1, 2), (2, 3), (3, 2)]), {"xmin": 1.0, "ymin": 1.0, "xmax": 3.0, "ymax": 3.0}, ), ( MultiPolygon([box(0, 0, 1, 1), box(2, 2, 3, 3), box(4, 4, 5, 5)]), {"xmin": 0.0, "ymin": 0.0, "xmax": 5.0, "ymax": 5.0}, ), ], ids=["Point", "LineString", "Polygon", "Multipolygon"], ) def test_to_parquet_bbox_values(tmpdir, geometry, expected_bbox): # check bbox bounds being written for different geometry types. import pyarrow.parquet as pq df = GeoDataFrame(data=[[1, 2]], columns=["a", "b"], geometry=[geometry]) filename = os.path.join(str(tmpdir), "test.pq") df.to_parquet(filename, write_covering_bbox=True) result = pq.read_table(filename).to_pandas() > assert result["bbox"][0] == expected_bbox E AssertionError: assert {'xmax': None... 'ymin': None} == {'xmax': 5.0,..., 'ymin': 0.0} E E Differing items: E {'xmin': None} != {'xmin': 0.0} E {'xmax': None} != {'xmax': 5.0} E {'ymin': None} != {'ymin': 0.0} E {'ymax': None} != {'ymax': 5.0} E Use -v to get more diff geopandas/io/tests/test_arrow.py:1092: AssertionError ______________________ test_geoarrow_export[WKB-point-xy] ______________________ geometry_type = 'point', dim = 'xy', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[0000000001403E0000000000004024000000000000,00000000017FF80000000000007FF8000000000000,00000000017FF80000000000007FF8000000000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[01010000000000000000003E400000000000002440,0101000000000000000000F87F000000000000F87F,0101000000000000000000F87F000000000000F87F]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -0000000001403E0000000000004024000000000000 E -00000000017FF80000000000007FF8000000000000 E -00000000017FF80000000000007FF8000000000000 E +01010000000000000000003E400000000000002440 E +0101000000000000000000F87F000000000000F87F E +0101000000000000000000F87F000000000000F87F geopandas/io/tests/test_geoarrow.py:115: AssertionError _____________________ test_geoarrow_export[WKB-point-xyz] ______________________ geometry_type = 'point', dim = 'xyz', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[00000003E9403E0000000000004024...00000003E97FF80000000000007FF80000000000007FF8000000000000,00000003E97FF80000000000007FF80000000000007FF8000000000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[01E90300000000000000003E400000...01E9030000000000000000F87F000000000000F87F000000000000F87F,01E9030000000000000000F87F000000000000F87F000000000000F87F]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000003E9403E00000000000040240000000000004044000000000000 E -00000003E97FF80000000000007FF80000000000007FF8000000000000 E -00000003E97FF80000000000007FF80000000000007FF8000000000000 E +01E90300000000000000003E4000000000000024400000000000004440 E +01E9030000000000000000F87F000000000000F87F000000000000F87F E +01E9030000000000000000F87F000000000000F87F000000000000F87F geopandas/io/tests/test_geoarrow.py:115: AssertionError ___________________ test_geoarrow_export[WKB-linestring-xy] ____________________ geometry_type = 'linestring', dim = 'xy', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[000000000200000003403E00000000...0240000000000004024000000000000403E00000000000040440000000000004044000000000000,000000000200000000,000000000200000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[010200000003000000000000000000...00000000000244000000000000024400000000000003E4000000000000044400000000000004440,010200000000000000,010200000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -000000000200000003403E00000000000040240000000000004024000000000000403E00000000000040440000000000004044000000000000 E -000000000200000000 E -000000000200000000 E +0102000000030000000000000000003E40000000000000244000000000000024400000000000003E4000000000000044400000000000004440 E +010200000000000000 E +010200000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError ___________________ test_geoarrow_export[WKB-linestring-xyz] ___________________ geometry_type = 'linestring', dim = 'xyz', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[00000003EA00000003403E00000000...03E0000000000004044000000000000404400000000000040440000000000004054000000000000,00000003EA00000000,00000003EA00000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[01EA03000003000000000000000000...000000000003E400000000000004440000000000000444000000000000044400000000000005440,01EA03000000000000,01EA03000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000003EA00000003403E000000000000402400000000000040440000000000004024000000000000403E0000000000004044000000000000404400000000000040440000000000004054000000000000 E -00000003EA00000000 E -00000003EA00000000 E +01EA030000030000000000000000003E400000000000002440000000000000444000000000000024400000000000003E400000000000004440000000000000444000000000000044400000000000005440 E +01EA03000000000000 E +01EA03000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError _____________________ test_geoarrow_export[WKB-polygon-xy] _____________________ geometry_type = 'polygon', dim = 'xy', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4]] geometry: [[0000000003000000010000000540...041800000000000403E00000000000040340000000000004034000000000000403E000000000000,000000000300000000,000000000300000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4]] geometry: [[0103000000010000000500000000...0000000008041400000000000003E40000000000000344000000000000034400000000000003E40,010300000000000000,010300000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000000030000000100000005403E0000000000004024000000000000404400000000000040440000000000004034000000000000404400000000000040240000000000004034000000000000403E0000000000004024000000000000 E -000000000300000002000000054041800000000000402400000000000040468000000000004046800000000000402E00000000000040440000000000004024000000000000403400000000000040418000000000004024000000000000000000044034000000000000403E00000000000040418000000000004041800000000000403E00000000000040340000000000004034000000000000403E000000000000 E -000000000300000000 E -000000000300000000 E +010300000001000000050000000000000000003E4000000000000024400000000000004440000000000000444000000000000034400000000000004440000000000000244000000000000034400000000000003E400000000000002440 E +0103000000020000000500000000000000008041400000000000002440000000000080464000000000008046400000000000002E40000000000000444000000000000024400000000000003440000000000080414000000000000024400400000000000000000034400000000000003E40000000000080414000000000008041400000000000003E40000000000000344000000000000034400000000000003E40 E +010300000000000000 E +010300000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError ____________________ test_geoarrow_export[WKB-polygon-xyz] _____________________ geometry_type = 'polygon', dim = 'xyz', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4]] geometry: [[00000003EB000000010000000540...03400000000000040490000000000004034000000000000403E0000000000004049000000000000,00000003EB00000000,00000003EB00000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4]] geometry: [[01EB030000010000000500000000...000000000003440000000000000494000000000000034400000000000003E400000000000004940,01EB03000000000000,01EB03000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000003EB0000000100000005403E0000000000004024000000000000404400000000000040440000000000004044000000000000405400000000000040340000000000004044000000000000404E00000000000040240000000000004034000000000000403E000000000000403E00000000000040240000000000004044000000000000 E -00000003EB0000000200000005404180000000000040240000000000004046800000000000404680000000000040468000000000004056800000000000402E0000000000004044000000000000404B80000000000040240000000000004034000000000000403E000000000000404180000000000040240000000000004046800000000000000000044034000000000000403E0000000000004049000000000000404180000000000040418000000000004051800000000000403E000000000000403400000000000040490000000000004034000000000000403E0000000000004049000000000000 E -00000003EB00000000 E -00000003EB00000000 E +01EB03000001000000050000000000000000003E4000000000000024400000000000004440000000000000444000000000000044400000000000005440000000000000344000000000000044400000000000004E40000000000000244000000000000034400000000000003E400000000000003E4000000000000024400000000000004440 E +01EB03000002000000050000000000000000804140000000000000244000000000008046400000000000804640000000000080464000000000008056400000000000002E4000000000000044400000000000804B40000000000000244000000000000034400000000000003E400000000000804140000000000000244000000000008046400400000000000000000034400000000000003E4000000000000049400000000000804140000000000080414000000000008051400000000000003E400000000000003440000000000000494000000000000034400000000000003E400000000000004940 E +01EB03000000000000 E +01EB03000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError ___________________ test_geoarrow_export[WKB-multipoint-xy] ____________________ geometry_type = 'multipoint', dim = 'xy', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4,5]] geometry: [[00000000040000000100000000...00001403400000000000040340000000000000000000001403E0000000000004024000000000000,000000000400000000,000000000400000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4,5]] geometry: [[01040000000100000001010000...000000000000000003440000000000000344001010000000000000000003E400000000000002440,010400000000000000,010400000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -0000000004000000010000000001403E0000000000004024000000000000 E -00000000040000000400000000014024000000000000404400000000000000000000014044000000000000403E0000000000000000000001403400000000000040340000000000000000000001403E0000000000004024000000000000 E -00000000040000000400000000014024000000000000404400000000000000000000014044000000000000403E0000000000000000000001403400000000000040340000000000000000000001403E0000000000004024000000000000 E -000000000400000000 E -000000000400000000 E +01040000000100000001010000000000000000003E400000000000002440 E +010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003E4001010000000000000000003440000000000000344001010000000000000000003E400000000000002440 E +010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003E4001010000000000000000003440000000000000344001010000000000000000003E400000000000002440 E +010400000000000000 E +010400000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError ___________________ test_geoarrow_export[WKB-multipoint-xyz] ___________________ geometry_type = 'multipoint', dim = 'xyz', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[00000003EC0000000100000003E940...000000003E940340000000000004034000000000000404400000000000000000003E9403E00000000000040240000000000004044000000000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[01EC0300000100000001E903000000...001E903000000000000000034400000000000003440000000000000444001E90300000000000000003E4000000000000024400000000000004440]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000003EC0000000100000003E9403E00000000000040240000000000004044000000000000 E -00000003EC0000000400000003E940240000000000004044000000000000404900000000000000000003E94044000000000000403E000000000000405180000000000000000003E940340000000000004034000000000000404400000000000000000003E9403E00000000000040240000000000004044000000000000 E -00000003EC0000000400000003E940240000000000004044000000000000404900000000000000000003E94044000000000000403E000000000000405180000000000000000003E940340000000000004034000000000000404400000000000000000003E9403E00000000000040240000000000004044000000000000 E +01EC0300000100000001E90300000000000000003E4000000000000024400000000000004440 E +01EC0300000400000001E903000000000000000024400000000000004440000000000000494001E903000000000000000044400000000000003E40000000000080514001E903000000000000000034400000000000003440000000000000444001E90300000000000000003E4000000000000024400000000000004440 E +01EC0300000400000001E903000000000000000024400000000000004440000000000000494001E903000000000000000044400000000000003E40000000000080514001E903000000000000000034400000000000003440000000000000444001E90300000000000000003E4000000000000024400000000000004440 geopandas/io/tests/test_geoarrow.py:115: AssertionError _________________ test_geoarrow_export[WKB-multilinestring-xy] _________________ geometry_type = 'multilinestring', dim = 'xy', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4]] geometry: [[0000000005000000010000000002...03E00000000000040440000000000004034000000000000403E0000000000004024000000000000,000000000500000000,000000000500000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4]] geometry: [[0105000000010000000102000000...000000000003E40000000000000444000000000000034400000000000003E400000000000002440,010500000000000000,010500000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -000000000500000001000000000200000003403E00000000000040240000000000004024000000000000403E00000000000040440000000000004044000000000000 E -00000000050000000200000000020000000340240000000000004024000000000000403400000000000040340000000000004024000000000000404400000000000000000000020000000440440000000000004044000000000000403E000000000000403E00000000000040440000000000004034000000000000403E0000000000004024000000000000 E -000000000500000000 E -000000000500000000 E +0105000000010000000102000000030000000000000000003E40000000000000244000000000000024400000000000003E4000000000000044400000000000004440 E +010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003E400000000000003E40000000000000444000000000000034400000000000003E400000000000002440 E +010500000000000000 E +010500000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError ________________ test_geoarrow_export[WKB-multilinestring-xyz] _________________ geometry_type = 'multilinestring', dim = 'xyz', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2]] geometry: [[00000003ED0000000100000003EA0000...00000404E00000000000040440000000000004034000000000000404E000000000000403E00000000000040240000000000004044000000000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2]] geometry: [[01ED0300000100000001EA0300000300...03E400000000000004E40000000000000444000000000000034400000000000004E400000000000003E4000000000000024400000000000004440]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000003ED0000000100000003EA00000003403E000000000000402400000000000040440000000000004024000000000000403E0000000000004044000000000000404400000000000040440000000000004054000000000000 E -00000003ED0000000200000003EA0000000340240000000000004024000000000000403400000000000040340000000000004034000000000000404400000000000040240000000000004044000000000000404900000000000000000003EA00000004404400000000000040440000000000004054000000000000403E000000000000403E000000000000404E00000000000040440000000000004034000000000000404E000000000000403E00000000000040240000000000004044000000000000 E +01ED0300000100000001EA030000030000000000000000003E400000000000002440000000000000444000000000000024400000000000003E400000000000004440000000000000444000000000000044400000000000005440 E +01ED0300000200000001EA0300000300000000000000000024400000000000002440000000000000344000000000000034400000000000003440000000000000444000000000000024400000000000004440000000000000494001EA030000040000000000000000004440000000000000444000000000000054400000000000003E400000000000003E400000000000004E40000000000000444000000000000034400000000000004E400000000000003E4000000000000024400000000000004440 geopandas/io/tests/test_geoarrow.py:115: AssertionError __________________ test_geoarrow_export[WKB-multipolygon-xy] ___________________ geometry_type = 'multipolygon', dim = 'xy', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4,5]] geometry: [[00000000060000000100000000...02E00000000000040340000000000004039000000000000403E0000000000004034000000000000,000000000600000000,000000000600000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3,4,5]] geometry: [[01060000000100000001030000...000000000002E40000000000000344000000000000039400000000000003E400000000000003440,010600000000000000,010600000000000000]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000000060000000100000000030000000100000005403E0000000000004024000000000000404400000000000040440000000000004034000000000000404400000000000040240000000000004034000000000000403E0000000000004024000000000000 E -00000000060000000200000000030000000100000004403E00000000000040340000000000004046800000000000404400000000000040240000000000004044000000000000403E000000000000403400000000000000000000030000000100000005402E0000000000004014000000000000404400000000000040240000000000004024000000000000403400000000000040140000000000004024000000000000402E0000000000004014000000000000 E -0000000006000000020000000003000000010000000440440000000000004044000000000000403400000000000040468000000000004046800000000000403E0000000000004044000000000000404400000000000000000000030000000200000006403400000000000040418000000000004024000000000000403E00000000000040240000000000004024000000000000403E0000000000004014000000000000404680000000000040340000000000004034000000000000404180000000000000000004403E00000000000040340000000000004034000000000000402E00000000000040340000000000004039000000000000403E0000000000004034000000000000 E -000000000600000000 E -000000000600000000 E +010600000001000000010300000001000000050000000000000000003E4000000000000024400000000000004440000000000000444000000000000034400000000000004440000000000000244000000000000034400000000000003E400000000000002440 E +010600000002000000010300000001000000040000000000000000003E40000000000000344000000000008046400000000000004440000000000000244000000000000044400000000000003E400000000000003440010300000001000000050000000000000000002E4000000000000014400000000000004440000000000000244000000000000024400000000000003440000000000000144000000000000024400000000000002E400000000000001440 E +01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003E4000000000000044400000000000004440010300000002000000060000000000000000003440000000000080414000000000000024400000000000003E40000000000000244000000000000024400000000000003E4000000000000014400000000000804640000000000000344000000000000034400000000000804140040000000000000000003E40000000000000344000000000000034400000000000002E40000000000000344000000000000039400000000000003E400000000000003440 E +010600000000000000 E +010600000000000000 geopandas/io/tests/test_geoarrow.py:115: AssertionError __________________ test_geoarrow_export[WKB-multipolygon-xyz] __________________ geometry_type = 'multipolygon', dim = 'xyz', geometry_encoding = 'WKB' interleaved = None @pytest.mark.skipif( shapely.geos_version < (3, 9, 0), reason="Checking for empty is buggy with GEOS<3.9", ) # an old GEOS is installed in the CI builds with the defaults channel @pytest.mark.parametrize( "dim", [ "xy", pytest.param( "xyz", marks=pytest.mark.skipif( shapely.geos_version < (3, 10, 0), reason="Cannot write 3D geometries with GEOS<3.10", ), ), ], ) @pytest.mark.parametrize( "geometry_type", ["point", "linestring", "polygon", "multipoint", "multilinestring", "multipolygon"], ) @pytest.mark.parametrize( "geometry_encoding, interleaved", [("WKB", None), ("geoarrow", True), ("geoarrow", False)], ids=["WKB", "geoarrow-interleaved", "geoarrow-separated"], ) def test_geoarrow_export(geometry_type, dim, geometry_encoding, interleaved): base_path = DATA_PATH / "geoarrow" suffix = geometry_type + ("_z" if dim == "xyz" else "") # Read the example data df = feather.read_feather(base_path / f"example-{suffix}-wkb.arrow") df["geometry"] = GeoSeries.from_wkb(df["geometry"]) df["row_number"] = df["row_number"].astype("int32") df = GeoDataFrame(df) df.geometry.array.crs = None # Read the expected data if geometry_encoding == "WKB": filename = f"example-{suffix}-wkb.arrow" else: filename = f"example-{suffix}{'-interleaved' if interleaved else ''}.arrow" expected = feather.read_table(base_path / filename) # GeoDataFrame -> Arrow Table result = pa_table( df.to_arrow(geometry_encoding=geometry_encoding, interleaved=interleaved) ) # remove the "pandas" metadata result = result.replace_schema_metadata(None) mask_nonempty = None if ( geometry_encoding == "WKB" and dim == "xyz" and geometry_type.startswith("multi") ): # for collections with z dimension, drop the empties because those don't # roundtrip correctly to WKB # (https://github.com/libgeos/geos/issues/888) mask_nonempty = pa.array(np.asarray(~df.geometry.is_empty)) result = result.filter(mask_nonempty) expected = expected.filter(mask_nonempty) > assert_table_equal(result, expected) geopandas/io/tests/test_geoarrow.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ left = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[00000003EE0000000100000003EB00...000004041800000000000403400000000000040390000000000004046800000000000403E00000000000040340000000000004049000000000000]] right = pyarrow.Table row_number: int32 geometry: binary ---- row_number: [[1,2,3]] geometry: [[01EE0300000100000001EB03000001...02E4000000000008041400000000000003440000000000000394000000000008046400000000000003E4000000000000034400000000000004940]] check_metadata = True def assert_table_equal(left, right, check_metadata=True): geom_type = left["geometry"].type # in case of Points (directly the inner fixed_size_list or struct type) # -> there are NaNs for empties -> we need to compare them separately # and then fill, because pyarrow.Table.equals considers NaNs as not equal if pa.types.is_fixed_size_list(geom_type): left_values = left["geometry"].chunk(0).values right_values = right["geometry"].chunk(0).values assert pc.is_nan(left_values).equals(pc.is_nan(right_values)) left_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(left_values, pc.is_nan(left_values), 0.0), type=left["geometry"].type, ) right_geoms = pa.FixedSizeListArray.from_arrays( pc.replace_with_mask(right_values, pc.is_nan(right_values), 0.0), type=right["geometry"].type, ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) elif pa.types.is_struct(geom_type): left_arr = left["geometry"].chunk(0) right_arr = right["geometry"].chunk(0) for i in range(left_arr.type.num_fields): assert pc.is_nan(left_arr.field(i)).equals(pc.is_nan(right_arr.field(i))) left_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( left_arr.field(i), pc.is_nan(left_arr.field(i)), 0.0 ) for i in range(left_arr.type.num_fields) ], fields=list(left["geometry"].type), ) right_geoms = pa.StructArray.from_arrays( [ pc.replace_with_mask( right_arr.field(i), pc.is_nan(right_arr.field(i)), 0.0 ) for i in range(right_arr.type.num_fields) ], fields=list(right["geometry"].type), ) left = left.set_column(1, left.schema.field("geometry"), left_geoms) right = right.set_column(1, right.schema.field("geometry"), right_geoms) if left.equals(right, check_metadata=check_metadata): return if not left.schema.equals(right.schema): raise AssertionError( "Schema not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema, right.schema ) ) if check_metadata: if not left.schema.equals(right.schema, check_metadata=True): if not left.schema.metadata == right.schema.metadata: raise AssertionError( "Metadata not equal\nLeft:\n{0}\nRight:\n{1}".format( left.schema.metadata, right.schema.metadata ) ) for col in left.schema.names: assert left.schema.field(col).equals( right.schema.field(col), check_metadata=True ) for col in left.column_names: a_left = pa.concat_arrays(left.column(col).chunks) a_right = pa.concat_arrays(right.column(col).chunks) if not a_left.equals(a_right): > raise AssertionError( "Column '{0}' not equal:\n{1}".format(col, a_left.diff(a_right)) ) E AssertionError: Column 'geometry' not equal: E E @@ -0, +0 @@ E -00000003EE0000000100000003EB0000000100000005403E0000000000004024000000000000404400000000000040440000000000004044000000000000405400000000000040340000000000004044000000000000404E00000000000040240000000000004034000000000000403E000000000000403E00000000000040240000000000004044000000000000 E -00000003EE0000000200000003EB0000000100000004403E00000000000040340000000000004049000000000000404680000000000040440000000000004055400000000000402400000000000040440000000000004049000000000000403E0000000000004034000000000000404900000000000000000003EB0000000100000005402E0000000000004014000000000000403400000000000040440000000000004024000000000000404900000000000040240000000000004034000000000000403E00000000000040140000000000004024000000000000402E000000000000402E00000000000040140000000000004034000000000000 E -00000003EE0000000200000003EB00000001000000044044000000000000404400000000000040540000000000004034000000000000404680000000000040504000000000004046800000000000403E0000000000004052C0000000000040440000000000004044000000000000405400000000000000000003EB000000020000000640340000000000004041800000000000404B8000000000004024000000000000403E0000000000004044000000000000402400000000000040240000000000004034000000000000403E0000000000004014000000000000404180000000000040468000000000004034000000000000405040000000000040340000000000004041800000000000404B80000000000000000004403E000000000000403400000000000040490000000000004034000000000000402E0000000000004041800000000000403400000000000040390000000000004046800000000000403E00000000000040340000000000004049000000000000 E +01EE0300000100000001EB03000001000000050000000000000000003E4000000000000024400000000000004440000000000000444000000000000044400000000000005440000000000000344000000000000044400000000000004E40000000000000244000000000000034400000000000003E400000000000003E4000000000000024400000000000004440 E +01EE0300000200000001EB03000001000000040000000000000000003E40000000000000344000000000000049400000000000804640000000000000444000000000004055400000000000002440000000000000444000000000000049400000000000003E400000000000003440000000000000494001EB03000001000000050000000000000000002E4000000000000014400000000000003440000000000000444000000000000024400000000000004940000000000000244000000000000034400000000000003E40000000000000144000000000000024400000000000002E400000000000002E4000000000000014400000000000003440 E +01EE0300000200000001EB030000010000000400000000000000000044400000000000004440000000000000544000000000000034400000000000804640000000000040504000000000008046400000000000003E400000000000C0524000000000000044400000000000004440000000000000544001EB0300000200000006000000000000000000344000000000008041400000000000804B4000000000000024400000000000003E4000000000000044400000000000002440000000000000244000000000000034400000000000003E4000000000000014400000000000804140000000000080464000000000000034400000000000405040000000000000344000000000008041400000000000804B40040000000000000000003E400000000000003440000000000000494000000000000034400000000000002E4000000000008041400000000000003440000000000000394000000000008046400000000000003E4000000000000034400000000000004940 geopandas/io/tests/test_geoarrow.py:115: AssertionError __________________________ TestDataFrame.test_to_wkb ___________________________ self = def test_to_wkb(self): wkbs0 = [ ( # POINT (0 0) b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" ), ( # POINT (1 1) b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\xf0?\x00\x00\x00\x00\x00\x00\xf0?" ), ] wkbs1 = [ ( # POINT (2 2) b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00@\x00\x00\x00\x00\x00\x00\x00@" ), ( # POINT (3 3) b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x08@\x00\x00\x00\x00\x00\x00\x08@" ), ] gs0 = GeoSeries.from_wkb(wkbs0) gs1 = GeoSeries.from_wkb(wkbs1) gdf = GeoDataFrame({"geom_col0": gs0, "geom_col1": gs1}) expected_df = pd.DataFrame({"geom_col0": wkbs0, "geom_col1": wkbs1}) > assert_frame_equal(expected_df, gdf.to_wkb()) geopandas/tests/test_geodataframe.py:1001: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ testing.pyx:55: in pandas._libs.testing.assert_almost_equal ??? _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > ??? E AssertionError: DataFrame.iloc[:, 0] (column name="geom_col0") are different E E DataFrame.iloc[:, 0] (column name="geom_col0") values are different (100.0 %) E [index]: [0, 1] E [left]: [b'\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0?\x00\x00\x00\x00\x00\x00\xf0?'] E [right]: [b'\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'\x00\x00\x00\x00\x01?\xf0\x00\x00\x00\x00\x00\x00?\xf0\x00\x00\x00\x00\x00\x00'] E At positional index 0, first diff: b'\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' != b'\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' testing.pyx:173: AssertionError =============================== warnings summary =============================== geopandas/io/tests/test_file.py::test_read_file_datetime_invalid[pyogrio-gpkg] /usr/lib64/python3.13/site-packages/pyogrio/raw.py:198: RuntimeWarning: Invalid content for record 3 in column date: 9999-99-99T00:00:00.000 return ogr_read( geopandas/io/tests/test_file.py::test_read_file_datetime_invalid[pyogrio-geojson] geopandas/io/tests/test_file.py::test_read_file_datetime_out_of_bounds_ns[pyogrio-geojson] geopandas/io/tests/test_file.py::test_read_file_datetime_mixed_offsets /usr/lib64/python3.13/site-packages/pyogrio/geopandas.py:662: UserWarning: 'crs' was not provided. The output dataset will not have projection information defined and may not be usable in other systems. write( geopandas/io/tests/test_file.py::test_to_file_column_len[pyogrio] /usr/lib64/python3.13/site-packages/pyogrio/raw.py:723: RuntimeWarning: Normalized/laundered field name: '0123456789A' to '0123456789' ogr_write( geopandas/io/tests/test_file.py::test_list_layers /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/test_file.py:1424: UserWarning: Geometry is in a geographic CRS. Results from 'buffer' are likely incorrect. Use 'GeoSeries.to_crs()' to re-project geometries to a projected CRS before this operation. df_points.set_geometry(df_points.buffer(1)).to_file(tempfilename, layer="buffered") geopandas/io/tests/test_file.py::test_list_layers /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/io/tests/test_file.py:1425: UserWarning: Geometry is in a geographic CRS. Results from 'buffer' are likely incorrect. Use 'GeoSeries.to_crs()' to re-project geometries to a projected CRS before this operation. df_points.set_geometry(df_points.buffer(2).boundary).to_file( geopandas/tests/test_extension_array.py::TestGetitem::test_getitem_series_integer_with_missing_raises[integer-array] /usr/lib64/python3.13/site-packages/pandas/tests/extension/base/getitem.py:276: FutureWarning: Series.__getitem__ treating keys as positions is deprecated. In a future version, integer keys will always be treated as labels (consistent with DataFrame behavior). To access a value by position, use `ser.iloc[pos]` ser[idx] geopandas/tests/test_extension_array.py::TestSetitem::test_setitem_integer_with_missing_raises[list-True] /usr/lib64/python3.13/site-packages/pandas/tests/extension/base/setitem.py:227: FutureWarning: Series.__getitem__ treating keys as positions is deprecated. In a future version, integer keys will always be treated as labels (consistent with DataFrame behavior). To access a value by position, use `ser.iloc[pos]` arr[idx] = arr[0] geopandas/tests/test_extension_array.py::TestMissing::test_fillna_limit_backfill /usr/lib64/python3.13/site-packages/pandas/tests/extension/base/missing.py:107: FutureWarning: Series.fillna with 'method' is deprecated and will raise in a future version. Use obj.ffill() or obj.bfill() instead. result = pd.Series(arr).fillna(method="backfill", limit=2) geopandas/tests/test_merge.py::TestMerging::test_concat_axis0_unaligned_cols geopandas/tests/test_merge.py::TestMerging::test_concat_axis0_unaligned_cols /builddir/build/BUILD/python-geopandas-1.0.1-build/geopandas-1.0.1/geopandas/tests/test_merge.py:137: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy partial_none_case.iloc[0] = None -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html =================================== XPASSES ==================================== =========================== short test summary info ============================ SKIPPED [1] geopandas/tests/test_explore.py:14: could not import 'folium': No module named 'folium' SKIPPED [1] geopandas/io/tests/test_arrow.py:703: Feather only supported for pyarrow >= 0.17 SKIPPED [1] geopandas/io/tests/test_arrow.py:836: tests GEOS<3.10 SKIPPED [8] geopandas/io/tests/test_file.py:213: Driver corresponding to ext .shp doesn't support dt fields SKIPPED [4] geopandas/io/tests/test_file.py:213: Driver corresponding to ext doesn't support dt fields SKIPPED [1] geopandas/io/tests/test_file.py:920: test for fiona < 1.9 SKIPPED [1] geopandas/io/tests/test_file.py:1327: Fiona >= 1.9 supports metadata SKIPPED [1] geopandas/io/tests/test_file.py:1400: test for pyogrio not installed SKIPPED [1] geopandas/io/tests/test_file.py:1410: test for fiona not installed SKIPPED [12] geopandas/io/tests/test_geoarrow.py:363: could not import 'geoarrow.pyarrow': No module named 'geoarrow' SKIPPED [12] geopandas/io/tests/test_geoarrow.py:479: could not import 'geoarrow.pyarrow': No module named 'geoarrow' SKIPPED [1] geopandas/io/tests/test_geoarrow.py:500: could not import 'geoarrow.pyarrow': No module named 'geoarrow' SKIPPED [3] geopandas/io/tests/test_pickle.py:34: shapely 2.0/pygeos-based unpickling currently only works for shapely-2.0/pygeos-written files SKIPPED [1] geopandas/io/tests/test_sql.py:230: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:244: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:257: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:268: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:284: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:298: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:311: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:321: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:357: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:372: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:388: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:404: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:420: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:440: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:458: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:489: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:512: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:535: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:562: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:589: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:611: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:645: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:669: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:714: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:730: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:747: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:765: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:782: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:798: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:814: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:825: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:844: Cannot connect with postgresql database SKIPPED [1] geopandas/io/tests/test_sql.py:865: Cannot connect with postgresql database SKIPPED [1] geopandas/tests/test_array.py:937: pyproj installed SKIPPED [1] geopandas/tests/test_array.py:972: pyproj installed SKIPPED [1] geopandas/tests/test_array.py:982: pyproj installed SKIPPED [1] geopandas/tests/test_dissolve.py:104: warning for pandas 1.5.x SKIPPED [20] ../../../../../usr/lib64/python3.13/site-packages/pandas/tests/extension/base/reduce.py:123: geometry is not numeric dtype SKIPPED [1] geopandas/tests/test_extension_array.py:488: boolean reduce (any/all) tested in test_pandas_methods SKIPPED [1] geopandas/tests/test_extension_array.py:526: not applicable SKIPPED [1] geopandas/tests/test_extension_array.py:530: not applicable SKIPPED [2] ../../../../../usr/lib64/python3.13/site-packages/pandas/tests/extension/base/methods.py:443: does not support diff SKIPPED [2] geopandas/tests/test_extension_array.py:577: searchsorted not supported SKIPPED [1] geopandas/tests/test_extension_array.py:581: Not yet implemented SKIPPED [1] geopandas/tests/test_extension_array.py:585: addition not supported SKIPPED [1] geopandas/tests/test_extension_array.py:589: Not yet implemented SKIPPED [1] geopandas/tests/test_extension_array.py:595: Min/max not supported SKIPPED [1] geopandas/tests/test_extension_array.py:599: Min/max not supported SKIPPED [1] geopandas/tests/test_extension_array.py:603: Min/max not supported SKIPPED [1] geopandas/tests/test_extension_array.py:607: Min/max not supported SKIPPED [1] geopandas/tests/test_extension_array.py:611: Min/max not supported SKIPPED [2] ../../../../../usr/lib64/python3.13/site-packages/pandas/tests/extension/base/io.py:12: Not yet implemented SKIPPED [3] geopandas/tests/test_geom_methods.py:1918: could not import 'pointpats': No module named 'pointpats' SKIPPED [1] geopandas/tests/test_geoseries.py:500: pyproj installed SKIPPED [1] geopandas/tests/test_overlay.py:260: Skipped SKIPPED [1] geopandas/tests/test_overlay.py:338: Difference uses columns from one df only. SKIPPED [1] geopandas/tests/test_plotting.py:1064: array-like style_kwds not supported for mixed geometry types (#1379) SKIPPED [1] geopandas/tests/test_plotting.py:1083: array-like style_kwds not supported for mixed geometry types (#1379) SKIPPED [12] ../../../../../usr/lib64/python3.13/site-packages/matplotlib/testing/compare.py:282: Don't know how to convert .pdf files to png SKIPPED [1] geopandas/tests/test_sindex.py:74: append removed in pandas 2.0 SKIPPED [1] geopandas/tests/test_sindex.py: unconditional skip SKIPPED [1] geopandas/tests/test_sindex.py:474: Test for 'dwithin'-incompatible versions of GEOS SKIPPED [1] geopandas/tools/tests/test_sjoin.py:947: Not implemented XFAIL geopandas/io/tests/test_file.py::test_empty_crs[fiona-GPKG-.gpkg] - reason: GPKG is read with Undefined geographic SRS. XFAIL geopandas/io/tests/test_file.py::test_empty_crs[fiona-None-.gpkg] - reason: GPKG is read with Undefined geographic SRS. XFAIL geopandas/io/tests/test_file.py::test_empty_crs[pyogrio-GPKG-.gpkg] - reason: GPKG is read with Undefined geographic SRS. XFAIL geopandas/io/tests/test_file.py::test_empty_crs[pyogrio-None-.gpkg] - reason: GPKG is read with Undefined geographic SRS. XFAIL geopandas/tests/test_extension_array.py::TestGetitem::test_getitem_series_integer_with_missing_raises[list] - Tries label-based and raises KeyError; in some cases raises when calling np.asarray XFAIL geopandas/tests/test_extension_array.py::TestGetitem::test_getitem_series_integer_with_missing_raises[integer-array] - Tries label-based and raises KeyError; in some cases raises when calling np.asarray XFAIL geopandas/tests/test_extension_array.py::TestSetitem::test_setitem_integer_with_missing_raises[list-True] - GH-31948 XFAIL geopandas/tests/test_op_output_types.py::test_loc_add_row[geom] - pre-regression behaviour only works for geometry col geometry XFAIL geopandas/tests/test_sindex.py::TestShapelyInterface::test_query_sorting[False-expected1] - reason: rtree results are known to be unordered, see https://github.com/geopandas/geopandas/issues/1337 Expected: [0, 1, 2] Got: [1, 0, 2] XFAIL geopandas/tests/test_sindex.py::TestShapelyInterface::test_query_bulk_sorting[False-expected1] - reason: rtree results are known to be unordered, see https://github.com/geopandas/geopandas/issues/1337 Expected: [[0, 0, 0], [0, 1, 2]] Got: [[0, 0, 0], [1, 0, 2]] XFAIL geopandas/tools/tests/test_sjoin.py::TestSpatialJoinNYBB::test_no_overlapping_geometry XPASS geopandas/tests/test_pandas_methods.py::test_drop_duplicates_series XPASS geopandas/tests/test_pandas_methods.py::test_drop_duplicates_frame FAILED geopandas/io/tests/test_arrow.py::test_pandas_parquet_roundtrip2[naturalearth_lowres] FAILED geopandas/io/tests/test_arrow.py::test_pandas_parquet_roundtrip2[nybb_filename] FAILED geopandas/io/tests/test_arrow.py::test_roundtrip[parquet-naturalearth_lowres] FAILED geopandas/io/tests/test_arrow.py::test_roundtrip[parquet-nybb_filename] FAILED geopandas/io/tests/test_arrow.py::test_index[parquet] - AssertionError... FAILED geopandas/io/tests/test_arrow.py::test_column_order[parquet] - Asserti... FAILED geopandas/io/tests/test_arrow.py::test_parquet_compression[snappy] - A... FAILED geopandas/io/tests/test_arrow.py::test_parquet_compression[gzip] - Ass... FAILED geopandas/io/tests/test_arrow.py::test_parquet_compression[brotli] - A... FAILED geopandas/io/tests/test_arrow.py::test_parquet_compression[None] - Ass... FAILED geopandas/io/tests/test_arrow.py::test_parquet_multiple_geom_cols[parquet] FAILED geopandas/io/tests/test_arrow.py::test_missing_crs[parquet] - Assertio... FAILED geopandas/io/tests/test_arrow.py::test_fsspec_url - AssertionError: Ge... FAILED geopandas/io/tests/test_arrow.py::test_write_iso_wkb - AssertionError:... FAILED geopandas/io/tests/test_arrow.py::test_read_versioned_file[0.1.0] - py... FAILED geopandas/io/tests/test_arrow.py::test_read_versioned_file[0.4.0] - py... FAILED geopandas/io/tests/test_arrow.py::test_read_versioned_file[1.0.0-beta.1] FAILED geopandas/io/tests/test_arrow.py::test_read_gdal_files - pyarrow.lib.A... FAILED geopandas/io/tests/test_arrow.py::test_parquet_read_partitioned_dataset FAILED geopandas/io/tests/test_arrow.py::test_parquet_read_partitioned_dataset_fsspec FAILED geopandas/io/tests/test_arrow.py::test_read_parquet_geoarrow[point] - ... FAILED geopandas/io/tests/test_arrow.py::test_read_parquet_geoarrow[linestring] FAILED geopandas/io/tests/test_arrow.py::test_read_parquet_geoarrow[polygon] FAILED geopandas/io/tests/test_arrow.py::test_read_parquet_geoarrow[multipoint] FAILED geopandas/io/tests/test_arrow.py::test_read_parquet_geoarrow[multilinestring] FAILED geopandas/io/tests/test_arrow.py::test_read_parquet_geoarrow[multipolygon] FAILED geopandas/io/tests/test_arrow.py::test_geoarrow_roundtrip[point] - pya... FAILED geopandas/io/tests/test_arrow.py::test_geoarrow_roundtrip[linestring] FAILED geopandas/io/tests/test_arrow.py::test_geoarrow_roundtrip[polygon] - p... FAILED geopandas/io/tests/test_arrow.py::test_geoarrow_roundtrip[multipoint] FAILED geopandas/io/tests/test_arrow.py::test_geoarrow_roundtrip[multilinestring] FAILED geopandas/io/tests/test_arrow.py::test_geoarrow_roundtrip[multipolygon] FAILED geopandas/io/tests/test_arrow.py::test_to_parquet_bbox_values[Point] FAILED geopandas/io/tests/test_arrow.py::test_to_parquet_bbox_values[LineString] FAILED geopandas/io/tests/test_arrow.py::test_to_parquet_bbox_values[Polygon] FAILED geopandas/io/tests/test_arrow.py::test_to_parquet_bbox_values[Multipolygon] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-point-xy] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-point-xyz] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-linestring-xy] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-linestring-xyz] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-polygon-xy] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-polygon-xyz] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-multipoint-xy] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-multipoint-xyz] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-multilinestring-xy] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-multilinestring-xyz] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-multipolygon-xy] FAILED geopandas/io/tests/test_geoarrow.py::test_geoarrow_export[WKB-multipolygon-xyz] FAILED geopandas/tests/test_geodataframe.py::TestDataFrame::test_to_wkb - Ass... = 49 failed, 2352 passed, 145 skipped, 8 deselected, 11 xfailed, 2 xpassed, 12 warnings in 73.75s (0:01:13) = RPM build errors: error: Bad exit status from /var/tmp/rpm-tmp.LC8QRL (%check) Bad exit status from /var/tmp/rpm-tmp.LC8QRL (%check) Child return code was: 1 EXCEPTION: [Error('Command failed: \n # /usr/bin/systemd-nspawn -q -M e03016ded2c24d52b2a57dcecadef146 -D /var/lib/mock/f42-build-55024301-6531854/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin \'--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"\' \'--setenv=PS1= \\s-\\v\\$ \' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c \'/usr/bin/rpmbuild -ba --noprep --noclean --target s390x /builddir/build/SPECS/python-geopandas.spec\'\n', 1)] Traceback (most recent call last): File "/usr/lib/python3.12/site-packages/mockbuild/trace_decorator.py", line 93, in trace result = func(*args, **kw) ^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/site-packages/mockbuild/util.py", line 612, in do_with_status raise exception.Error("Command failed: \n # %s\n%s" % (cmd_pretty(command, env), output), child.returncode) mockbuild.exception.Error: Command failed: # /usr/bin/systemd-nspawn -q -M e03016ded2c24d52b2a57dcecadef146 -D /var/lib/mock/f42-build-55024301-6531854/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.wpzx6wld:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin '--setenv=PROMPT_COMMAND=printf "\033]0;\007"' '--setenv=PS1= \s-\v\$ ' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c '/usr/bin/rpmbuild -ba --noprep --noclean --target s390x /builddir/build/SPECS/python-geopandas.spec'