Skip to content

Instantly share code, notes, and snippets.

@VOID001
Last active December 23, 2018 15:54
Show Gist options
  • Save VOID001/b385311d013872fa33028adc028d70dc to your computer and use it in GitHub Desktop.
Save VOID001/b385311d013872fa33028adc028d70dc to your computer and use it in GitHub Desktop.
Static archlinuxcn package dependency check
#!/usr/bin/python
import os
import yaml
import importlib.util
import pyalpm
import pycman.config
import logging
import re
import shlex
import subprocess
os.system("clear")
repo_root = "/home/void001/packager/repo"
logging.basicConfig(level=logging.INFO,datefmt='%m-%d %H:%M',format='%(asctime)s %(levelname)-8s %(message)s')
logger = logging.getLogger(__name__)
dep_pass = {}
def dependency_check(pkg_name="", level=0):
""" Static dependency check for a given repo """
""" build_prefix might be extra, multilib ..., archlinuxcn is not a recommended prefix"""
logger.info(" "*level+"Checking dependency for package {}".format(pkg_name))
# exec_pre_build_hook()
pkgbuild_path = os.path.join(repo_root, pkg_name, "PKGBUILD")
# We assume PKGBUILD will be downloaded (the function is executed after the pre_build hook
lilac_yaml_path = os.path.join(repo_root, pkg_name, "lilac.yaml")
lilac_py_path = os.path.join(repo_root, pkg_name, "lilac.py")
required_dep = []
build_prefix = "extra-x86_64"
cn_dep = []
err_dep = []
try:
open(lilac_py_path, "r")
except FileNotFoundError:
logger.info("manual package not managed by lilac. skipped")
logger.info(" "*level + "Check PASS for {}".format(pkg_name))
return err_dep
# Read the PKGBUILD
required_dep = get_deps(pkgbuild_path)
# The dep parse code is replaced by get_deps
# try:
# f = open(pkgbuild_path, "r")
# except Exception as e:
# logger.error("error opening PKGBUILD: {}".format(e))
# return -1
# for line in f:
# if line.startswith("depends=") or line.startswith("makedepends="):
# line = line.lstrip("depends=(").lstrip("makedepends=(").rstrip(")\n")
# required_dep.extend(line.split(' '))
# for i, v in enumerate(required_dep):
# required_dep[i] = required_dep[i].lstrip("'").rstrip("'").lstrip("\"").rstrip("\"")
# Now we get the required dep, we need to loop throught the list and check the dependency
try:
lilac_yaml_file = open(lilac_yaml_path, "r")
ymlobj = yaml.load(lilac_yaml_file)
if "depends" in ymlobj:
cn_dep.extend(ymlobj.get("depends"))
if "build_prefix" in ymlobj:
build_prefix = ymlobj.get("build_prefix")
except FileNotFoundError as e:
logger.warning(" "*level + "This package does not contain a lilac.yaml file, needs update!")
pass
spec = importlib.util.spec_from_file_location(pkg_name + ".pkg", lilac_py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
try:
assert mod.depends
cn_dep = mod.depends
except Exception as e:
pass
try:
assert mod.build_prefix
build_prefix = mod.build_prefix
logger.debug("Build Prefix: {}".format(mod.build_prefix))
except Exception as e:
pass
logger.debug("Required dep:\t{}".format(required_dep))
logger.debug("CN dep:\t\t{}".format(cn_dep))
# We get enough information, Now do the overall check
conf_path = os.path.join("/usr/share/devtools/", "pacman-" + build_prefix.rstrip("-x86_64") + ".conf")
handle = pycman.config.init_with_config(conf_path)
syncdbs = handle.get_syncdbs() # type: pyalpm.DB
for pkg_item in required_dep:
ok = False
if dep_pass.get(pkg_name) == True:
continue
for db in syncdbs:
logger.debug("Try Dep {} in repo {}".format(pkg_item, db.name))
pkg = db.get_pkg(pkg_item) # type: pyalpm.Package
if pkg != None:
logger.debug("Dep {} satisifed because it is in repo {}".format(pkg, db.name))
ok = True
break
if not ok:
if not pkg_item in cn_dep:
err_dep.append(pkg_item)
continue
chklist = dependency_check(pkg_item, level+1)
if len(chklist) > 0:
err_dep.append(pkg_item)
continue
if len(err_dep) > 0:
logger.error("Error Deps: {}".format(err_dep))
logger.info(" "*level + "Check NOT pass for {}".format(pkg_name))
return err_dep
dep_pass[pkg_name] = True
logger.info(" "*level + "Check PASS for {}".format(pkg_name))
return err_dep
# TODO: maybe we will use printsrcinfo to gen SRCINFO for each pkg in the future :(
def get_deps(pkgbuild_dir):
deps = []
cmd = shlex.split("env -i zsh -c 'source {} 2>/dev/null && set'".format(pkgbuild_dir))
proc = subprocess.Popen(cmd, stdout = subprocess.PIPE, encoding="UTF-8", stderr = None)
for line in proc.stdout:
(k, _, v) = str(line).partition("=")
if v != "":
if k.startswith("depends") or k.startswith("makedepends"): # TODO: skip i686 arch, only keep x86_64
v =str(v)
v = v.lstrip("(").rstrip(")\n") # type: str
v = v.split(" ")
logger.debug("[get_deps] {}={}".format(k,v))
for i in v:
if i != "" and not i in deps:
deps.append(i)
proc.communicate()
return deps
#dependency_check("mingw-w64-gcc")
all_pkg = next(os.walk("/home/void001/packager/repo_test_check"))[1]
for pkg in all_pkg:
if pkg.startswith("."):
continue
dependency_check(pkg)
@VOID001
Copy link
Author

VOID001 commented Dec 23, 2018

Now the process of dependencies with version expression is not supported

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment