Mock Version: 5.0 Mock Version: 5.0 Mock Version: 5.0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-scrapy.spec'], chrootPath='/var/lib/mock/f40-build-2322268-58639/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=864000uid=996gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-scrapy.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1706227200 Wrote: /builddir/build/SRPMS/python-scrapy-2.10.1-3.fc40.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-scrapy.spec'], chrootPath='/var/lib/mock/f40-build-2322268-58639/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=864000uid=996gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-scrapy.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1706227200 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.RyZMBy + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf scrapy-2.10.1 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/Scrapy-2.10.1.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd scrapy-2.10.1 + rm -rf /builddir/build/BUILD/scrapy-2.10.1-SPECPARTS + /usr/bin/mkdir -p /builddir/build/BUILD/scrapy-2.10.1-SPECPARTS + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.ZvNem7 + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Werror=implicit-function-declaration -Werror=implicit-int -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer ' + export CFLAGS + CXXFLAGS='-O2 -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer ' + export CXXFLAGS + FFLAGS='-O2 -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -I/usr/lib/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -I/usr/lib/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes -Clink-arg=-Wl,-z,relro -Clink-arg=-Wl,-z,now --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd scrapy-2.10.1 + CFLAGS='-O2 -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Werror=implicit-function-declaration -Werror=implicit-int -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer ' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + /usr/bin/python3 setup.py build '--executable=/usr/bin/python3 -sP' running build running build_py creating build creating build/lib creating build/lib/scrapy copying scrapy/responsetypes.py -> build/lib/scrapy copying scrapy/robotstxt.py -> build/lib/scrapy copying scrapy/shell.py -> build/lib/scrapy copying scrapy/__main__.py -> build/lib/scrapy copying scrapy/cmdline.py -> build/lib/scrapy copying scrapy/exporters.py -> build/lib/scrapy copying scrapy/signals.py -> build/lib/scrapy copying scrapy/interfaces.py -> build/lib/scrapy copying scrapy/resolver.py -> build/lib/scrapy copying scrapy/dupefilters.py -> build/lib/scrapy copying scrapy/crawler.py -> build/lib/scrapy copying scrapy/item.py -> build/lib/scrapy copying scrapy/spiderloader.py -> build/lib/scrapy copying scrapy/signalmanager.py -> build/lib/scrapy copying scrapy/link.py -> build/lib/scrapy copying scrapy/exceptions.py -> build/lib/scrapy copying scrapy/squeues.py -> build/lib/scrapy copying scrapy/logformatter.py -> build/lib/scrapy copying scrapy/extension.py -> build/lib/scrapy copying scrapy/pqueues.py -> build/lib/scrapy copying scrapy/mail.py -> build/lib/scrapy copying scrapy/middleware.py -> build/lib/scrapy copying scrapy/__init__.py -> build/lib/scrapy copying scrapy/statscollectors.py -> build/lib/scrapy copying scrapy/addons.py -> build/lib/scrapy creating build/lib/scrapy/linkextractors copying scrapy/linkextractors/lxmlhtml.py -> build/lib/scrapy/linkextractors copying scrapy/linkextractors/__init__.py -> build/lib/scrapy/linkextractors creating build/lib/scrapy/extensions copying scrapy/extensions/httpcache.py -> build/lib/scrapy/extensions copying scrapy/extensions/memdebug.py -> build/lib/scrapy/extensions copying scrapy/extensions/logstats.py -> build/lib/scrapy/extensions copying scrapy/extensions/telnet.py -> build/lib/scrapy/extensions copying scrapy/extensions/spiderstate.py -> build/lib/scrapy/extensions copying scrapy/extensions/closespider.py -> build/lib/scrapy/extensions copying scrapy/extensions/postprocessing.py -> build/lib/scrapy/extensions copying scrapy/extensions/corestats.py -> build/lib/scrapy/extensions copying scrapy/extensions/__init__.py -> build/lib/scrapy/extensions copying scrapy/extensions/memusage.py -> build/lib/scrapy/extensions copying scrapy/extensions/throttle.py -> build/lib/scrapy/extensions copying scrapy/extensions/feedexport.py -> build/lib/scrapy/extensions copying scrapy/extensions/statsmailer.py -> build/lib/scrapy/extensions copying scrapy/extensions/debug.py -> build/lib/scrapy/extensions creating build/lib/scrapy/utils copying scrapy/utils/ssl.py -> build/lib/scrapy/utils copying scrapy/utils/signal.py -> build/lib/scrapy/utils copying scrapy/utils/misc.py -> build/lib/scrapy/utils copying scrapy/utils/test.py -> build/lib/scrapy/utils copying scrapy/utils/template.py -> build/lib/scrapy/utils copying scrapy/utils/ftp.py -> build/lib/scrapy/utils copying scrapy/utils/request.py -> build/lib/scrapy/utils copying scrapy/utils/benchserver.py -> build/lib/scrapy/utils copying scrapy/utils/serialize.py -> build/lib/scrapy/utils copying scrapy/utils/defer.py -> build/lib/scrapy/utils copying scrapy/utils/sitemap.py -> build/lib/scrapy/utils copying scrapy/utils/deprecate.py -> build/lib/scrapy/utils copying scrapy/utils/response.py -> build/lib/scrapy/utils copying scrapy/utils/url.py -> build/lib/scrapy/utils copying scrapy/utils/ossignal.py -> build/lib/scrapy/utils copying scrapy/utils/reactor.py -> build/lib/scrapy/utils copying scrapy/utils/testproc.py -> build/lib/scrapy/utils copying scrapy/utils/curl.py -> build/lib/scrapy/utils copying scrapy/utils/versions.py -> build/lib/scrapy/utils copying scrapy/utils/engine.py -> build/lib/scrapy/utils copying scrapy/utils/httpobj.py -> build/lib/scrapy/utils copying scrapy/utils/testsite.py -> build/lib/scrapy/utils copying scrapy/utils/boto.py -> build/lib/scrapy/utils copying scrapy/utils/asyncgen.py -> build/lib/scrapy/utils copying scrapy/utils/conf.py -> build/lib/scrapy/utils copying scrapy/utils/iterators.py -> build/lib/scrapy/utils copying scrapy/utils/project.py -> build/lib/scrapy/utils copying scrapy/utils/spider.py -> build/lib/scrapy/utils copying scrapy/utils/gz.py -> build/lib/scrapy/utils copying scrapy/utils/__init__.py -> build/lib/scrapy/utils copying scrapy/utils/console.py -> build/lib/scrapy/utils copying scrapy/utils/display.py -> build/lib/scrapy/utils copying scrapy/utils/trackref.py -> build/lib/scrapy/utils copying scrapy/utils/python.py -> build/lib/scrapy/utils copying scrapy/utils/log.py -> build/lib/scrapy/utils copying scrapy/utils/datatypes.py -> build/lib/scrapy/utils copying scrapy/utils/decorators.py -> build/lib/scrapy/utils copying scrapy/utils/job.py -> build/lib/scrapy/utils creating build/lib/scrapy/commands copying scrapy/commands/shell.py -> build/lib/scrapy/commands copying scrapy/commands/bench.py -> build/lib/scrapy/commands copying scrapy/commands/version.py -> build/lib/scrapy/commands copying scrapy/commands/runspider.py -> build/lib/scrapy/commands copying scrapy/commands/list.py -> build/lib/scrapy/commands copying scrapy/commands/fetch.py -> build/lib/scrapy/commands copying scrapy/commands/startproject.py -> build/lib/scrapy/commands copying scrapy/commands/crawl.py -> build/lib/scrapy/commands copying scrapy/commands/edit.py -> build/lib/scrapy/commands copying scrapy/commands/view.py -> build/lib/scrapy/commands copying scrapy/commands/settings.py -> build/lib/scrapy/commands copying scrapy/commands/__init__.py -> build/lib/scrapy/commands copying scrapy/commands/genspider.py -> build/lib/scrapy/commands copying scrapy/commands/check.py -> build/lib/scrapy/commands copying scrapy/commands/parse.py -> build/lib/scrapy/commands creating build/lib/scrapy/selector copying scrapy/selector/__init__.py -> build/lib/scrapy/selector copying scrapy/selector/unified.py -> build/lib/scrapy/selector creating build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/robotstxt.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/httpcache.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/decompression.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/retry.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/redirect.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/downloadtimeout.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/ajaxcrawl.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/httpauth.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/useragent.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/__init__.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/stats.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/httpproxy.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/cookies.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/defaultheaders.py -> build/lib/scrapy/downloadermiddlewares copying scrapy/downloadermiddlewares/httpcompression.py -> build/lib/scrapy/downloadermiddlewares creating build/lib/scrapy/contracts copying scrapy/contracts/default.py -> build/lib/scrapy/contracts copying scrapy/contracts/__init__.py -> build/lib/scrapy/contracts creating build/lib/scrapy/http copying scrapy/http/common.py -> build/lib/scrapy/http copying scrapy/http/headers.py -> build/lib/scrapy/http copying scrapy/http/__init__.py -> build/lib/scrapy/http copying scrapy/http/cookies.py -> build/lib/scrapy/http creating build/lib/scrapy/spiders copying scrapy/spiders/sitemap.py -> build/lib/scrapy/spiders copying scrapy/spiders/init.py -> build/lib/scrapy/spiders copying scrapy/spiders/crawl.py -> build/lib/scrapy/spiders copying scrapy/spiders/feed.py -> build/lib/scrapy/spiders copying scrapy/spiders/__init__.py -> build/lib/scrapy/spiders creating build/lib/scrapy/settings copying scrapy/settings/__init__.py -> build/lib/scrapy/settings copying scrapy/settings/default_settings.py -> build/lib/scrapy/settings creating build/lib/scrapy/spidermiddlewares copying scrapy/spidermiddlewares/referer.py -> build/lib/scrapy/spidermiddlewares copying scrapy/spidermiddlewares/offsite.py -> build/lib/scrapy/spidermiddlewares copying scrapy/spidermiddlewares/httperror.py -> build/lib/scrapy/spidermiddlewares copying scrapy/spidermiddlewares/__init__.py -> build/lib/scrapy/spidermiddlewares copying scrapy/spidermiddlewares/urllength.py -> build/lib/scrapy/spidermiddlewares copying scrapy/spidermiddlewares/depth.py -> build/lib/scrapy/spidermiddlewares creating build/lib/scrapy/core copying scrapy/core/spidermw.py -> build/lib/scrapy/core copying scrapy/core/engine.py -> build/lib/scrapy/core copying scrapy/core/scheduler.py -> build/lib/scrapy/core copying scrapy/core/__init__.py -> build/lib/scrapy/core copying scrapy/core/scraper.py -> build/lib/scrapy/core creating build/lib/scrapy/loader copying scrapy/loader/common.py -> build/lib/scrapy/loader copying scrapy/loader/__init__.py -> build/lib/scrapy/loader copying scrapy/loader/processors.py -> build/lib/scrapy/loader creating build/lib/scrapy/pipelines copying scrapy/pipelines/files.py -> build/lib/scrapy/pipelines copying scrapy/pipelines/images.py -> build/lib/scrapy/pipelines copying scrapy/pipelines/media.py -> build/lib/scrapy/pipelines copying scrapy/pipelines/__init__.py -> build/lib/scrapy/pipelines creating build/lib/scrapy/http/response copying scrapy/http/response/html.py -> build/lib/scrapy/http/response copying scrapy/http/response/text.py -> build/lib/scrapy/http/response copying scrapy/http/response/xml.py -> build/lib/scrapy/http/response copying scrapy/http/response/__init__.py -> build/lib/scrapy/http/response creating build/lib/scrapy/http/request copying scrapy/http/request/form.py -> build/lib/scrapy/http/request copying scrapy/http/request/rpc.py -> build/lib/scrapy/http/request copying scrapy/http/request/__init__.py -> build/lib/scrapy/http/request copying scrapy/http/request/json_request.py -> build/lib/scrapy/http/request creating build/lib/scrapy/core/downloader copying scrapy/core/downloader/tls.py -> build/lib/scrapy/core/downloader copying scrapy/core/downloader/contextfactory.py -> build/lib/scrapy/core/downloader copying scrapy/core/downloader/webclient.py -> build/lib/scrapy/core/downloader copying scrapy/core/downloader/middleware.py -> build/lib/scrapy/core/downloader copying scrapy/core/downloader/__init__.py -> build/lib/scrapy/core/downloader creating build/lib/scrapy/core/http2 copying scrapy/core/http2/protocol.py -> build/lib/scrapy/core/http2 copying scrapy/core/http2/__init__.py -> build/lib/scrapy/core/http2 copying scrapy/core/http2/agent.py -> build/lib/scrapy/core/http2 copying scrapy/core/http2/stream.py -> build/lib/scrapy/core/http2 creating build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/file.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/ftp.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/http10.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/http.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/http11.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/http2.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/__init__.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/s3.py -> build/lib/scrapy/core/downloader/handlers copying scrapy/core/downloader/handlers/datauri.py -> build/lib/scrapy/core/downloader/handlers running egg_info creating Scrapy.egg-info writing Scrapy.egg-info/PKG-INFO writing dependency_links to Scrapy.egg-info/dependency_links.txt writing entry points to Scrapy.egg-info/entry_points.txt writing requirements to Scrapy.egg-info/requires.txt writing top-level names to Scrapy.egg-info/top_level.txt writing manifest file 'Scrapy.egg-info/SOURCES.txt' reading manifest file 'Scrapy.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' warning: no files found matching 'INSTALL' warning: no files found matching 'requirements-*.txt' warning: no files found matching 'license.txt' under directory 'scrapy' no previously-included directories found matching 'docs/build' warning: no files found matching '*' under directory 'bin' warning: no previously-included files matching '__pycache__' found anywhere in distribution adding license file 'LICENSE' adding license file 'AUTHORS' warning: no previously-included files matching '*.py[cod]' found anywhere in distribution writing manifest file 'Scrapy.egg-info/SOURCES.txt' /usr/lib/python3.12/site-packages/setuptools/command/build_py.py:204: _Warning: Package 'scrapy.templates.project' is absent from the `packages` configuration. !! ******************************************************************************** ############################ # Package would be ignored # ############################ Python recognizes 'scrapy.templates.project' as an importable package[^1], but it is absent from setuptools' `packages` configuration. This leads to an ambiguous overall configuration. If you want to distribute this package, please make sure that 'scrapy.templates.project' is explicitly added to the `packages` configuration field. Alternatively, you can also rely on setuptools' discovery methods (for example by using `find_namespace_packages(...)`/`find_namespace:` instead of `find_packages(...)`/`find:`). You can read more about "package discovery" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html If you don't want 'scrapy.templates.project' to be distributed and are already explicitly excluding 'scrapy.templates.project' via `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`, you can try to use `exclude_package_data`, or `include-package-data=False` in combination with a more fine grained `package-data` configuration. You can read more about "package data files" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/datafiles.html [^1]: For Python, any directory (with suitable naming) can be imported, even if it does not contain any `.py` files. On the other hand, currently there is no concept of package data directory, all directories are treated like packages. ******************************************************************************** !! check.warn(importable) /usr/lib/python3.12/site-packages/setuptools/command/build_py.py:204: _Warning: Package 'scrapy.templates.project.module' is absent from the `packages` configuration. !! ******************************************************************************** ############################ # Package would be ignored # ############################ Python recognizes 'scrapy.templates.project.module' as an importable package[^1], but it is absent from setuptools' `packages` configuration. This leads to an ambiguous overall configuration. If you want to distribute this package, please make sure that 'scrapy.templates.project.module' is explicitly added to the `packages` configuration field. Alternatively, you can also rely on setuptools' discovery methods (for example by using `find_namespace_packages(...)`/`find_namespace:` instead of `find_packages(...)`/`find:`). You can read more about "package discovery" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html If you don't want 'scrapy.templates.project.module' to be distributed and are already explicitly excluding 'scrapy.templates.project.module' via `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`, you can try to use `exclude_package_data`, or `include-package-data=False` in combination with a more fine grained `package-data` configuration. You can read more about "package data files" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/datafiles.html [^1]: For Python, any directory (with suitable naming) can be imported, even if it does not contain any `.py` files. On the other hand, currently there is no concept of package data directory, all directories are treated like packages. ******************************************************************************** !! check.warn(importable) /usr/lib/python3.12/site-packages/setuptools/command/build_py.py:204: _Warning: Package 'scrapy.templates.project.module.spiders' is absent from the `packages` configuration. !! ******************************************************************************** ############################ # Package would be ignored # ############################ Python recognizes 'scrapy.templates.project.module.spiders' as an importable package[^1], but it is absent from setuptools' `packages` configuration. This leads to an ambiguous overall configuration. If you want to distribute this package, please make sure that 'scrapy.templates.project.module.spiders' is explicitly added to the `packages` configuration field. Alternatively, you can also rely on setuptools' discovery methods (for example by using `find_namespace_packages(...)`/`find_namespace:` instead of `find_packages(...)`/`find:`). You can read more about "package discovery" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html If you don't want 'scrapy.templates.project.module.spiders' to be distributed and are already explicitly excluding 'scrapy.templates.project.module.spiders' via `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`, you can try to use `exclude_package_data`, or `include-package-data=False` in combination with a more fine grained `package-data` configuration. You can read more about "package data files" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/datafiles.html [^1]: For Python, any directory (with suitable naming) can be imported, even if it does not contain any `.py` files. On the other hand, currently there is no concept of package data directory, all directories are treated like packages. ******************************************************************************** !! check.warn(importable) /usr/lib/python3.12/site-packages/setuptools/command/build_py.py:204: _Warning: Package 'scrapy.templates.spiders' is absent from the `packages` configuration. !! ******************************************************************************** ############################ # Package would be ignored # ############################ Python recognizes 'scrapy.templates.spiders' as an importable package[^1], but it is absent from setuptools' `packages` configuration. This leads to an ambiguous overall configuration. If you want to distribute this package, please make sure that 'scrapy.templates.spiders' is explicitly added to the `packages` configuration field. Alternatively, you can also rely on setuptools' discovery methods (for example by using `find_namespace_packages(...)`/`find_namespace:` instead of `find_packages(...)`/`find:`). You can read more about "package discovery" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/package_discovery.html If you don't want 'scrapy.templates.spiders' to be distributed and are already explicitly excluding 'scrapy.templates.spiders' via `find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`, you can try to use `exclude_package_data`, or `include-package-data=False` in combination with a more fine grained `package-data` configuration. You can read more about "package data files" on setuptools documentation page: - https://setuptools.pypa.io/en/latest/userguide/datafiles.html [^1]: For Python, any directory (with suitable naming) can be imported, even if it does not contain any `.py` files. On the other hand, currently there is no concept of package data directory, all directories are treated like packages. ******************************************************************************** !! check.warn(importable) copying scrapy/VERSION -> build/lib/scrapy copying scrapy/mime.types -> build/lib/scrapy creating build/lib/scrapy/templates creating build/lib/scrapy/templates/project copying scrapy/templates/project/scrapy.cfg -> build/lib/scrapy/templates/project creating build/lib/scrapy/templates/project/module copying scrapy/templates/project/module/__init__.py -> build/lib/scrapy/templates/project/module copying scrapy/templates/project/module/items.py.tmpl -> build/lib/scrapy/templates/project/module copying scrapy/templates/project/module/middlewares.py.tmpl -> build/lib/scrapy/templates/project/module copying scrapy/templates/project/module/pipelines.py.tmpl -> build/lib/scrapy/templates/project/module copying scrapy/templates/project/module/settings.py.tmpl -> build/lib/scrapy/templates/project/module creating build/lib/scrapy/templates/project/module/spiders copying scrapy/templates/project/module/spiders/__init__.py -> build/lib/scrapy/templates/project/module/spiders creating build/lib/scrapy/templates/spiders copying scrapy/templates/spiders/basic.tmpl -> build/lib/scrapy/templates/spiders copying scrapy/templates/spiders/crawl.tmpl -> build/lib/scrapy/templates/spiders copying scrapy/templates/spiders/csvfeed.tmpl -> build/lib/scrapy/templates/spiders copying scrapy/templates/spiders/xmlfeed.tmpl -> build/lib/scrapy/templates/spiders ++ pwd + PYTHONPATH=/builddir/build/BUILD/scrapy-2.10.1 + make -C docs html make: Entering directory '/builddir/build/BUILD/scrapy-2.10.1/docs' mkdir -p build/html build/doctrees sphinx-build -b html -d build/doctrees -D latex_elements.papersize= . build/html Running Sphinx v7.2.6 /builddir/build/BUILD/scrapy-2.10.1/docs/conf.py:230: SyntaxWarning: invalid escape sequence '\d' "http://localhost:\d+", loading intersphinx inventory from https://www.attrs.org/en/stable/objects.inv... loading intersphinx inventory from https://coverage.readthedocs.io/en/stable/objects.inv... loading intersphinx inventory from https://cryptography.io/en/latest/objects.inv... loading intersphinx inventory from https://cssselect.readthedocs.io/en/latest/objects.inv... loading intersphinx inventory from https://itemloaders.readthedocs.io/en/latest/objects.inv... loading intersphinx inventory from https://docs.pytest.org/en/latest/objects.inv... loading intersphinx inventory from https://docs.python.org/3/objects.inv... loading intersphinx inventory from https://www.sphinx-doc.org/en/master/objects.inv... loading intersphinx inventory from https://tox.wiki/en/latest/objects.inv... loading intersphinx inventory from https://docs.twisted.org/en/stable/api/objects.inv... loading intersphinx inventory from https://docs.twisted.org/en/stable/objects.inv... WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://www.sphinx-doc.org/en/master/objects.inv' not fetchable due to : HTTPSConnectionPool(host='www.sphinx-doc.org', port=443): Max retries exceeded with url: /en/master/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) loading intersphinx inventory from https://w3lib.readthedocs.io/en/latest/objects.inv... WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://coverage.readthedocs.io/en/stable/objects.inv' not fetchable due to : HTTPSConnectionPool(host='coverage.readthedocs.io', port=443): Max retries exceeded with url: /en/stable/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://tox.wiki/en/latest/objects.inv' not fetchable due to : HTTPSConnectionPool(host='tox.wiki', port=443): Max retries exceeded with url: /en/latest/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://cryptography.io/en/latest/objects.inv' not fetchable due to : HTTPSConnectionPool(host='cryptography.io', port=443): Max retries exceeded with url: /en/latest/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://docs.pytest.org/en/latest/objects.inv' not fetchable due to : HTTPSConnectionPool(host='docs.pytest.org', port=443): Max retries exceeded with url: /en/latest/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://cssselect.readthedocs.io/en/latest/objects.inv' not fetchable due to : HTTPSConnectionPool(host='cssselect.readthedocs.io', port=443): Max retries exceeded with url: /en/latest/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://www.attrs.org/en/stable/objects.inv' not fetchable due to : HTTPSConnectionPool(host='www.attrs.org', port=443): Max retries exceeded with url: /en/stable/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://itemloaders.readthedocs.io/en/latest/objects.inv' not fetchable due to : HTTPSConnectionPool(host='itemloaders.readthedocs.io', port=443): Max retries exceeded with url: /en/latest/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://docs.python.org/3/objects.inv' not fetchable due to : HTTPSConnectionPool(host='docs.python.org', port=443): Max retries exceeded with url: /3/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://w3lib.readthedocs.io/en/latest/objects.inv' not fetchable due to : HTTPSConnectionPool(host='w3lib.readthedocs.io', port=443): Max retries exceeded with url: /en/latest/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://docs.twisted.org/en/stable/api/objects.inv' not fetchable due to : HTTPSConnectionPool(host='docs.twisted.org', port=443): Max retries exceeded with url: /en/stable/api/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) WARNING: failed to reach any of the inventories with the following issues: intersphinx inventory 'https://docs.twisted.org/en/stable/objects.inv' not fetchable due to : HTTPSConnectionPool(host='docs.twisted.org', port=443): Max retries exceeded with url: /en/stable/objects.inv (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -3] Temporary failure in name resolution')) building [mo]: targets for 0 po files that are out of date writing output... building [html]: targets for 52 source files that are out of date updating environment: [new config] 52 added, 0 changed, 0 removed reading sources... [ 2%] README reading sources... [ 4%] contributing reading sources... [ 6%] faq reading sources... [ 8%] index reading sources... [ 10%] intro/examples reading sources... [ 12%] intro/install reading sources... [ 13%] intro/overview reading sources... [ 15%] intro/tutorial reading sources... [ 17%] news reading sources... [ 19%] topics/addons reading sources... [ 21%] topics/api reading sources... [ 23%] topics/architecture reading sources... [ 25%] topics/asyncio reading sources... [ 27%] topics/autothrottle reading sources... [ 29%] topics/benchmarking reading sources... [ 31%] topics/broad-crawls reading sources... [ 33%] topics/commands reading sources... [ 35%] topics/components reading sources... [ 37%] topics/contracts reading sources... [ 38%] topics/coroutines reading sources... [ 40%] topics/debug reading sources... [ 42%] topics/deploy reading sources... [ 44%] topics/developer-tools reading sources... [ 46%] topics/djangoitem reading sources... [ 48%] topics/downloader-middleware WARNING: autodoc: failed to import method 'crawler.Crawler.stop' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import class 'crawler.CrawlerRunner' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import class 'crawler.CrawlerProcess' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import function 'settings.get_settings_priority' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import class 'settings.Settings' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import class 'settings.BaseSettings' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import module 'signalmanager' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import function 'defer.deferred_to_future' from module 'scrapy.utils'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import function 'defer.maybe_deferred_to_future' from module 'scrapy.utils'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import function 'downloadermiddlewares.retry.get_retry_request' from module 'scrapy'; the following exception was raised: No module named 'parsel' WARNING: autodoc: failed to import class 'robotstxt.RobotParser' from module 'scrapy'; the following exception was raised: No module named 'parsel' Extension error (scrapydocs): Handler for event 'doctree-read' threw an exception (exception: Next node is not a target) make: *** [Makefile:33: build] Error 2 make: Leaving directory '/builddir/build/BUILD/scrapy-2.10.1/docs' error: Bad exit status from /var/tmp/rpm-tmp.ZvNem7 (%build) RPM build errors: Bad exit status from /var/tmp/rpm-tmp.ZvNem7 (%build) Child return code was: 1 EXCEPTION: [Error('Command failed: \n # bash --login -c /usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-scrapy.spec\n', 1)] Traceback (most recent call last): File "/usr/lib/python3.11/site-packages/mockbuild/trace_decorator.py", line 93, in trace result = func(*args, **kw) ^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.11/site-packages/mockbuild/util.py", line 597, in do_with_status raise exception.Error("Command failed: \n # %s\n%s" % (command, output), child.returncode) mockbuild.exception.Error: Command failed: # bash --login -c /usr/bin/rpmbuild -bb --noclean --target noarch --nodeps /builddir/build/SPECS/python-scrapy.spec