aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/release.yml75
-rw-r--r--MANIFEST.in14
-rw-r--r--py_build.py147
-rw-r--r--pyproject.toml59
-rw-r--r--setup.py145
5 files changed, 181 insertions, 259 deletions
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index b229664f..b8be4db0 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -7,8 +7,15 @@ on:
workflow_dispatch:
jobs:
- build-sdist:
+ build-and-deploy:
runs-on: ubuntu-latest
+ environment: release
+
+ permissions:
+ id-token: write # Used to authenticate to PyPI via OIDC
+
+ contents: write # Used to authenticate github release publish
+
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -34,70 +41,26 @@ jobs:
make man
make sdist
+ - name: Build wheel
+ run: make wheel
+
- name: Output dist file info
run: |
+ sha512sum dist/*
tar -ztf dist/*.tar.gz | sort
- - name: Upload sdist artifact
- uses: actions/upload-artifact@v4
- with:
- name: dist-sdist
- path: dist/*.tar.gz
-
-
- build-wheel:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- arch: [x86_64, aarch64]
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: ${{matrix.arch}}
-
- - name: Build wheels
- uses: joerick/cibuildwheel@v2.16.5
- with:
- output-dir: dist
- env:
- CIBW_ARCHS_LINUX: ${{matrix.arch}}
-
- - name: Upload wheel artifacts
- uses: actions/upload-artifact@v4
- with:
- name: dist-wheel-${{matrix.arch}}
- path: dist/*.whl
-
-
- deploy:
- if: startsWith(github.ref, 'refs/tags/')
- needs: ["build-sdist", "build-wheel"]
- runs-on: ubuntu-latest
- environment: release
-
- permissions:
- id-token: write # Used to authenticate to PyPI via OIDC
-
- contents: write # Used to authenticate github release publish
-
- steps:
- - uses: actions/download-artifact@v4
+ - uses: actions/upload-artifact@v4
with:
- pattern: dist-*
- path: dist
- merge-multiple: true
+ name: results
+ path: dist/*
- - name: Publish a Python distribution to PyPI
+ - name: publish
uses: pypa/gh-action-pypi-publish@release/v1
+ if: startsWith(github.ref, 'refs/tags/')
- name: Create GitHub release
uses: softprops/action-gh-release@v1
+ if: startsWith(github.ref, 'refs/tags/')
with:
files: dist/*.tar.gz
fail_on_unmatched_files: true
@@ -105,7 +68,7 @@ jobs:
build-and-push-docker-image:
if: startsWith(github.ref, 'refs/tags/')
- needs: ["deploy"]
+ needs: ["build-and-deploy"]
runs-on: ubuntu-latest
environment: release
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 15fbcc7a..00000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,14 +0,0 @@
-include LICENSE *.py *.rst
-include tox.ini pyproject.toml .coveragerc .pylintrc
-recursive-include bin *
-recursive-include contrib *
-recursive-include data *
-recursive-include doc *
-recursive-exclude doc/generated *
-recursive-exclude doc/man/pkgcheck *
-recursive-include requirements *
-recursive-include src *
-recursive-include testdata *
-recursive-include tests *
-recursive-include tree-sitter-bash *
-global-exclude *.so *.pyc *.pyo __pycache__
diff --git a/py_build.py b/py_build.py
new file mode 100644
index 00000000..5199fdd1
--- /dev/null
+++ b/py_build.py
@@ -0,0 +1,147 @@
+import sys
+from collections import defaultdict
+from functools import partial
+from contextlib import contextmanager
+from pathlib import Path
+from textwrap import dedent
+
+from flit_core import buildapi
+
+
+@contextmanager
+def sys_path():
+ orig_path = sys.path[:]
+ sys.path.insert(0, str(Path.cwd() / "src"))
+ try:
+ yield
+ finally:
+ sys.path = orig_path
+
+
+def write_verinfo(cleanup_files):
+ from snakeoil.version import get_git_version
+
+ cleanup_files.append(path := Path.cwd() / "src/pkgcheck/_verinfo.py")
+ path.parent.mkdir(parents=True, exist_ok=True)
+ print(f"generating version info: {path}")
+ path.write_text(f"version_info={get_git_version(Path.cwd())!r}")
+
+
+def write_const(cleanup_files):
+ cleanup_files.append(path := Path.cwd() / "src/pkgcheck/_const.py")
+ print(f"writing path constants to {path}")
+ with path.open("w") as f:
+ path.chmod(0o644)
+ f.write(
+ dedent(
+ """\
+ from os.path import abspath, exists, join
+ import sys
+ INSTALL_PREFIX = abspath(sys.prefix)
+ if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')):
+ INSTALL_PREFIX = abspath(sys.base_prefix)
+ DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck')
+ """
+ )
+ )
+
+
+def write_objects(cleanup_files):
+ cleanup_files.append(path := Path.cwd() / "src/pkgcheck/_objects.py")
+ print(f"writing objects to {path}")
+
+ class _kls:
+ def __init__(self, module):
+ self.module = module
+
+ def __repr__(self):
+ return self.module
+
+ with sys_path():
+ from pkgcheck import objects
+
+ modules = defaultdict(set)
+ objs = defaultdict(list)
+ for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
+ for name, cls in getattr(objects, obj).items():
+ parent, module = cls.__module__.rsplit(".", 1)
+ modules[parent].add(module)
+ objs[obj].append((name, _kls(f"{module}.{name}")))
+
+ keywords = tuple(objs["KEYWORDS"])
+ checks = tuple(objs["CHECKS"])
+ reporters = tuple(objs["REPORTERS"])
+
+ with path.open("w") as f:
+ path.chmod(0o644)
+ for k, v in sorted(modules.items()):
+ f.write(f"from {k} import {', '.join(sorted(v))}\n")
+ f.write(
+ dedent(
+ f"""\
+ KEYWORDS = {keywords}
+ CHECKS = {checks}
+ REPORTERS = {reporters}
+ """
+ )
+ )
+
+
+def write_files(cleanup_files):
+ with sys_path():
+ from pkgcheck import base, objects
+ from pkgcheck.addons import caches
+
+ (dst := Path.cwd() / "data/share/pkgcheck").mkdir(parents=True, exist_ok=True)
+
+ print("Generating available scopes")
+ cleanup_files.append(path := dst / "scopes")
+ path.write_text("\n".join(base.scopes) + "\n")
+
+ print("Generating available cache types")
+ cache_objs = caches.CachedAddon.caches.values()
+ cleanup_files.append(path := dst / "caches")
+ path.write_text("\n".join(x.type for x in cache_objs) + "\n")
+
+ for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
+ print(f"Generating {obj.lower()} list")
+ cleanup_files.append(path := dst / obj.lower())
+ path.write_text("\n".join(getattr(objects, obj)) + "\n")
+
+
+def prepare_pkgcheck(callback, only_version: bool):
+ cleanup_files = []
+ try:
+ write_verinfo(cleanup_files)
+ if not only_version:
+ write_const(cleanup_files)
+ write_objects(cleanup_files)
+ write_files(cleanup_files)
+
+ return callback()
+ finally:
+ for path in cleanup_files:
+ try:
+ path.unlink()
+ except OSError:
+ pass
+
+
+def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds a wheel, places it in wheel_directory"""
+ callback = partial(buildapi.build_wheel, wheel_directory, config_settings, metadata_directory)
+ return prepare_pkgcheck(callback, only_version=False)
+
+
+def build_editable(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds an "editable" wheel, places it in wheel_directory"""
+ callback = partial(
+ buildapi.build_editable, wheel_directory, config_settings, metadata_directory
+ )
+ return prepare_pkgcheck(callback, only_version=True)
+
+
+def build_sdist(sdist_directory, config_settings=True):
+ """Builds an sdist, places it in sdist_directory"""
+ callback = partial(buildapi.build_sdist, sdist_directory, config_settings)
+ return prepare_pkgcheck(callback, only_version=True)
diff --git a/pyproject.toml b/pyproject.toml
index 020ea1ea..fd0eb616 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,6 @@
[build-system]
requires = [
- "setuptools >= 61.0.0",
- "wheel",
+ "flit_core >=3.8,<4",
# repeat all normal runtime dependencies here
"chardet",
"lazy-object-proxy",
@@ -12,7 +11,8 @@ requires = [
"snakeoil~=0.10.7",
"pkgcore~=0.12.25",
]
-build-backend = "setuptools.build_meta"
+build-backend = "py_build"
+backend-path = ["."]
[project]
name = "pkgcheck"
@@ -48,7 +48,6 @@ dependencies = [
"tree-sitter-bash>=0.21.0",
"snakeoil~=0.10.7",
"pkgcore~=0.12.25",
- "setuptools; python_version >= '3.12'",
]
[project.optional-dependencies]
@@ -74,11 +73,19 @@ Source = "https://github.com/pkgcore/pkgcheck"
[project.scripts]
pkgcheck = "pkgcheck.scripts.__init__:main"
-[tool.setuptools]
-zip-safe = false
+[tool.flit.external-data]
+directory = "data"
-[tool.setuptools.dynamic]
-version = {attr = "pkgcheck.__version__"}
+[tool.flit.sdist]
+include = [
+ "Makefile", "py_build.py", "tox.ini",
+ "NEWS.rst", "doc", "tests", "testdata", "contrib",
+ "build/sphinx/man/pkgcheck.1",
+]
+exclude = [
+ ".github/", ".gitignore",
+ "doc/api/", "doc/generated/", "doc/_build/",
+]
[tool.black]
line-length = 100
@@ -87,39 +94,3 @@ line-length = 100
minversion = "6.0"
addopts = "-vv -ra -l"
testpaths = ["tests"]
-
-[tool.cibuildwheel]
-build = "cp310-*"
-build-frontend = "build"
-
-before-build = [
- "rm -f {project}/src/pkgcheck/bash/lang.so"
-]
-test-requires = ["pytest", "requests"]
-test-command = "pytest {project}/tests"
-
-# glibc 2.24
-manylinux-x86_64-image = "manylinux_2_28"
-manylinux-i686-image = "manylinux_2_28"
-manylinux-aarch64-image = "manylinux_2_28"
-manylinux-ppc64le-image = "manylinux_2_28"
-manylinux-s390x-image = "manylinux_2_28"
-test-skip = "*-*linux_{aarch64,ppc64le,s390x}"
-
-[[tool.cibuildwheel.overrides]]
-select = "*-musllinux*"
-before-all = [
- "apk add --no-cache git bash libxml2-dev libxslt-dev",
-]
-
-[[tool.cibuildwheel.overrides]]
-select = "*-manylinux*"
-before-all = [
- # "apt-get update",
- # "apt-get install -y git libxml2-dev libxslt-dev",
- "yum update -y --nogpgcheck",
- "yum install -y --nogpgcheck git libxslt-devel libxml2-devel",
-
- "curl -L \"https://github.com/robxu9/bash-static/releases/download/5.2.015-1.2.3-2/bash-linux-$(uname -m)\" -o /usr/local/bin/bash",
- "chmod +x /usr/local/bin/bash",
-]
diff --git a/setup.py b/setup.py
deleted file mode 100644
index c71b019e..00000000
--- a/setup.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import logging
-import sys
-from collections import defaultdict
-from contextlib import contextmanager
-from pathlib import Path
-from textwrap import dedent
-
-from setuptools import setup
-from setuptools.command.install import install as orig_install
-from setuptools.command.sdist import sdist as orig_sdist
-
-
-@contextmanager
-def sys_path():
- orig_path = sys.path[:]
- sys.path.insert(0, str(Path.cwd() / "src"))
- try:
- yield
- finally:
- sys.path = orig_path
-
-
-class install(orig_install):
- def finalize_options(self):
- """Force platlib install since non-python libraries are included."""
- super().finalize_options()
- self.install_lib = self.install_platlib
-
- def run(self):
- super().run()
- self.write_obj_lists()
- self.generate_files()
-
- self.copy_tree("data", self.install_data)
-
- def write_obj_lists(self):
- """Generate config file of keyword, check, and other object lists."""
- (base_dir := Path(self.install_lib) / "pkgcheck").mkdir(parents=True, exist_ok=True)
- objects_path = base_dir / "_objects.py"
- const_path = base_dir / "_const.py"
- verinfo_path = base_dir / "_verinfo.py"
-
- # hack to drop quotes on modules in generated files
- class _kls:
- def __init__(self, module):
- self.module = module
-
- def __repr__(self):
- return self.module
-
- with sys_path():
- from pkgcheck import objects
-
- modules = defaultdict(set)
- objs = defaultdict(list)
- for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
- for name, cls in getattr(objects, obj).items():
- parent, module = cls.__module__.rsplit(".", 1)
- modules[parent].add(module)
- objs[obj].append((name, _kls(f"{module}.{name}")))
-
- keywords = tuple(objs["KEYWORDS"])
- checks = tuple(objs["CHECKS"])
- reporters = tuple(objs["REPORTERS"])
-
- logging.info(f"writing objects to {objects_path!r}")
- with objects_path.open("w") as f:
- objects_path.chmod(0o644)
- for k, v in sorted(modules.items()):
- f.write(f"from {k} import {', '.join(sorted(v))}\n")
- f.write(
- dedent(
- f"""\
- KEYWORDS = {keywords}
- CHECKS = {checks}
- REPORTERS = {reporters}
- """
- )
- )
-
- logging.info(f"writing path constants to {const_path!r}")
- with const_path.open("w") as f:
- const_path.chmod(0o644)
- f.write(
- dedent(
- """\
- from os.path import abspath, exists, join
- import sys
- INSTALL_PREFIX = abspath(sys.prefix)
- if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')):
- INSTALL_PREFIX = abspath(sys.base_prefix)
- DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck')
- """
- )
- )
-
- logging.info("generating version info")
- from snakeoil.version import get_git_version
-
- verinfo_path.write_text(f"version_info={get_git_version(Path(__file__).parent)!r}")
-
- def generate_files(self):
- with sys_path():
- from pkgcheck import base, objects
- from pkgcheck.addons import caches
-
- (dst := Path(self.install_data) / "share/pkgcheck").mkdir(parents=True, exist_ok=True)
-
- logging.info("Generating available scopes")
- (dst / "scopes").write_text("\n".join(base.scopes) + "\n")
-
- logging.info("Generating available cache types")
- cache_objs = caches.CachedAddon.caches.values()
- (dst / "caches").write_text("\n".join(x.type for x in cache_objs) + "\n")
-
- for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
- logging.info(f"Generating {obj.lower()} list")
- (dst / obj.lower()).write_text("\n".join(getattr(objects, obj)) + "\n")
-
-
-class sdist(orig_sdist):
- def make_release_tree(self, base_dir, files):
- super().make_release_tree(base_dir, files)
- base_dir = Path(base_dir)
-
- if (man_page := Path(__file__).parent / "build/sphinx/man/pkgcheck.1").exists():
- (base_dir / "man").mkdir(parents=True, exist_ok=True)
- self.copy_file(
- man_page, base_dir / "man/pkgcheck.1", preserve_mode=False, preserve_times=False
- )
-
- logging.info("generating version info")
- from snakeoil.version import get_git_version
-
- (base_dir / "src/pkgcheck/_verinfo.py").write_text(
- f"version_info={get_git_version(Path(__file__).parent)!r}"
- )
-
-
-setup(
- cmdclass={
- "install": install,
- "sdist": sdist,
- }
-)