#!/usr/bin/env python # Version 3.0.1 import os import sys import re import subprocess import gettext from git import Repo,Git import configparser import glob import shutil import optparse import fileinput import time PKGHOME = "/data/dev/pkg_repo/packages" SRCHOME = "/data/dev/pkg_repo/sources" SRCPKGHOME = "/data/dev/pkg_repo/src_packages" # See what git branch we're working under git_repo = Git() branches = git_repo.branch() git_branch = branches.split('*')[1].lstrip(' ') if git_branch == "testing": SFIX = "-testing" elif git_branch == "master": SFIX = "" else: print("Can't determine which git branch is in use!") sys.exit(2) #print("Git Branch suffix is:",SFIX) def commandline(makepkg_cmd): #print makepkg_cmd cli_dict = {} cli_list = [] clparser = optparse.OptionParser() clparser.add_option("-b", "--bump", action="store_true", default=False, help="Increase package release one unit.") clparser.add_option("--pkglist", action="store", help="List of packages to compile") clparser.add_option("--config", action="store", help="Use an alternate config file instead of the /etc/makepkg.conf default.") clparser.add_option("-g", "--geninteg", action="store_true", default=False, help="For each source file in the source array of PKGBUILD, download the file if required and generate integrity checks.") clparser.add_option("--rmold", action="store_true", default=False, help="BETA: Remove old src and software packages from repos. Use with caution. False positives may occur (i.e. nvidia pkgs)!") (options, args) = clparser.parse_args() options1 = ['config', 'pkglist'] options2 = ['bump', 'geninteg', 'rmold'] for o in options1: cmd1 = eval('options.'+o) if o != 'p': if cmd1 is not None: cli_dict['--'+o] = cmd1 elif cmd1 is not None: cli_dict['-'+o] = cmd1 for o in options2: cmd2 = eval('options.'+o) if cmd2 is True: cli_list.append('--'+o) # Create makepkg command makepkg_cmd.extend(cli_list) for k in list(cli_dict.keys()): v = cli_dict.get(k) makepkg_cmd.append(k) makepkg_cmd.append(v) # Remove bump option from makepkg command if it exists if "--bump" in makepkg_cmd: makepkg_cmd.remove("--bump") # Remove rmold option from makepkg command if it exists if "--rmold" in makepkg_cmd: makepkg_cmd.remove("--rmold") # Remove "--geninteg" option (if it exists) from makepkg command. # Checking for *sums is done automaticly by mp.py if "--geninteg" in makepkg_cmd: makepkg_cmd.remove("--geninteg") #print("Makepkg Command:",makepkg_cmd) return makepkg_cmd, cli_list, cli_dict class Packagefile(object): def __init__(self,cli_dict,cli_list,makepkg_cmd): self.failure=0 self.attempt=0 self.REPO = "none" self.DOCROOT = "" self.pkgfile = "none" self.pkgbase = "" self.pkgname = "" self.pkgver = "" self.pkgrel = "" self.pkglist=[] self.epoch = "" self.arch = "" self.CARCH = "" self.PKGDEST = "" self.SRCDEST = "" self.SRCPKGDEST = "" self.TOTALPKG = "" self.GZPKG = "" self.XZPKG = "" self.ZSTPKG = "" self.repolist=["core", "extra", "linhes", "chroot-devel", "mv-core", "xmpl", "local"] self.mydir = os.getcwd() self.variables = ['pkgbase','pkgname','pkgver','pkgrel','arch','epoch','depends','makedepends','makedepends_x86_64','checkdepends'] self.pkgrel_incremented = False self.makepkg_cmd = makepkg_cmd self.cli_list = cli_list self.cli_dict = cli_dict self.pkg_dependencies = [] self.makepkg_dependencies = [] self.check_dependencies = [] self.backupdir="/tmp/backup/" self.compile_status = "Pending" self.backup_pkgdir = "" self.pkg_dep_available = [] self.pkg_dep_not_in_repo = [] if "-p" in cli_dict: self.pkgfile = cli_dict["-p"] else: self.pkgfile = "PKGBUILD" self.updateINIT() def set_srcpkg(self): if self.pkgbase: self.SRCPKG = self.pkgbase + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + ".src.tar.gz" else: self.SRCPKG = self.pkglist[0] + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + ".src.tar.gz" #print "Changed " + self.pkgname + " release to " + str(self.pkgrel) def updateINIT(self): # Check what file will be used for our PKGBUILD # Does the file exist? if not os.path.isfile(self.pkgfile): print("ERROR in function config_file: Can't find ",self.pkgfile,"!") sys.exit(2) else: # Loop over contents to get our variables # Use bash to do it because PKGBUILDs are very loose with their format for item in self.variables: v = subprocess.Popen(['/bin/bash','-c', 'source ' + self.pkgfile + '; echo ${' + item + '[@]}'], stdout = subprocess.PIPE,) value = v.communicate()[0].strip(b'\n') value = value.decode('utf-8') if item == "pkgbase": self.pkgbase = value elif item == "pkgname": self.pkgname = value self.pkglist = list(value.split()) elif item == "pkgver": self.pkgver = value elif item == "pkgrel": self.pkgrel = value elif item == "epoch": if value == "0": value = "" if value: self.epoch = "%s:" %value elif item == "arch": self.arch = value elif item == "depends": self.pkg_dependencies = value.split() elif item == "makedepends": self.makepkg_dependencies = self.makepkg_dependencies + value.split() elif item == "makedepends_x86_64": self.makepkg_dependencies = self.makepkg_dependencies + value.split() elif item == "checkdepends": self.check_dependencies = value.split() self.set_srcpkg() # Get needed makepkg.conf variables mpkg="/etc/makepkg.conf" f=open(mpkg,"r") # Read file contents into memory makepkg_contents=f.readlines() f.close() self.PKGDEST = PKGHOME self.SRCDEST = SRCHOME self.SRCPKGDEST = SRCPKGHOME # Loop over contents to get our variables for mp_lines in makepkg_contents: if mp_lines.strip().startswith("CARCH"): self.CARCH = mp_lines.partition("=")[2].strip('\n').replace('\"','') if self.arch != 'any': self.arch = self.CARCH if mp_lines.strip().startswith("PKGDEST"): self.PKGDEST = mp_lines.partition("=")[2].strip('\n') def print_vars(self): print("####################################################") print("") print(" Config File:",self.pkgfile) print(" pkgbase:",self.pkgbase) print(" pkgname:",self.pkgname) print(" pkglist:",self.pkglist) print(" epoch:" ,self.epoch) print(" pkgver:",self.pkgver) print(" pkgrel:",self.pkgrel) print(" arch:",self.arch) print(" Source package:",self.SRCPKG) print(" CARCH is:",self.CARCH) print(" Architecture is:",self.arch) print(" Repository : ", self.REPO) print(" DOCROOT : ", self.DOCROOT) print(" Current directory: ", self.mydir) #print(" Backup directory: ", self.get_backupdir()) print(" Package destination:",self.PKGDEST) print(" Source destination:",self.SRCDEST) print(" Dependencies: ", self.pkg_dependencies) print(" Make Dependencies: ", self.makepkg_dependencies) print(" Package compile status: ", self.get_compile_status()) print("") print("####################################################") def getname(self): return self.pkgname def getfailure(self): return self.failure def getattempts(self): return self.attempt def getdir(self): return self.mydir def get_unmet_compile(self): return self.pkg_dep_not_in_repo def get_unmet_install(self): return self.pkg_dep_available def incrementcompile(self): self.attempt = self.attempt + 1 def get_backupdir(self): return self.backup_pkgdir def get_compile_status(self): return self.compile_status def find_repo(self): # Create a list with the directory names of our current directory tree dir_tree = os.path.dirname(self.mydir).split("/") # Loop through the dir_tree to see if we can find our repo for item in self.repolist: if item not in dir_tree: continue else: repo_name = item if repo_name == "extra": self.REPO = "extra" + SFIX elif repo_name == "core": self.REPO = "core" + SFIX elif repo_name == "linhes": self.REPO = "linhes" + SFIX elif repo_name == "xmpl": self.REPO = "local" elif repo_name not in repolist: print("ERROR in function find_repo: Cannot determine repository!") sys.exit(2) else: self.REPO = repo_name # Ensure our DOCROOT exists and if not, create it self.DOCROOT = "/data/dev/pkg_repo/" + self.CARCH + "/" + self.REPO #print("DOCROOT:",self.DOCROOT) if os.path.exists(self.DOCROOT): print("") #print("INFO: Repository is",self.REPO) else: try: os.mkdir(self.DOCROOT,0o755) print("INFO: ",self.DOCROOT,"directory created. Repository is",self.REPO) except Exception as e: print("ERROR: Creating DOCROOT failed: ", e) pass def increase_pkgrel(self): if not "--bump" in self.cli_list: return print("- Incrementing pkgrel for %s" %self.pkgname) # Backup the original pkgfile shutil.copy2(self.pkgfile, self.pkgfile + ".old") # Let's increase the pkgrel for line in fileinput.input(self.pkgfile, inplace=1): if line.strip().startswith("pkgrel"): self.pkgrel = line.partition("=")[2].strip('\n') # Add 1 to pkgrel new_pkgrel = int(self.pkgrel) + 1 line = line.replace("pkgrel=" + self.pkgrel, "pkgrel=" + str(new_pkgrel)) self.pkgrel = str(new_pkgrel) sys.stdout.write(line) print(" pkgrel:",self.pkgrel) self.set_srcpkg() self.pkgrel_incremented = True def clearSUMS(self): print(" Clearing out old checksums...") filename=self.pkgfile file_contents=[] f = open(filename,"r") lineIter= iter(f) for aLine in lineIter: if (aLine.strip().startswith("md5sums") or aLine.strip().startswith("sha1sums") or aLine.strip().startswith("sha256sums") or aLine.strip().startswith("sha224sums") or aLine.strip().startswith("sha384sums") or aLine.strip().startswith("sha512sums") or aLine.strip().startswith("b2sums") or aLine.strip().startswith("cksums")): if aLine.strip().endswith(")"): continue else: for aModule in lineIter: if aModule.strip().endswith(")"): break else: file_contents.append(aLine) f.close() try: f = open(filename,"w") except: outtext="Couldn't open file for writing: %s" %filename print(outtext) return False for line in file_contents: f.write(line) f.close() def updateSUMS(self): print("- Checking checksums...") if "--geninteg" in self.cli_list: self.clearSUMS() sums = [] pkgfile_contents=[] # Open pkgfile for reading f = open(self.pkgfile, 'r') pkgfile_contents = f.readlines() f.close() # Iterate through the lines looking for a match pattern for line in pkgfile_contents: check = re.compile('cksums|sha1sums|sha224sums|sha256sums|sha384sums|sha512sums|md5sums|b2sums') sums = check.match(line) if sums: print(" Found checksums in PKGBUILD") break if not sums: print(" Adding checksums...") # If no matches are found, append md5sums to the end of the pkgfile p = open(self.pkgfile, 'a') sumsgen = subprocess.Popen(["makepkg", "-g"], stdout = subprocess.PIPE,).communicate()[0] sumsgen = sumsgen.decode('utf-8') p.writelines(sumsgen) p.close() def make_package(self): if self.arch == 'any': self.CARCH = 'any' #creates both binary and source package print("- Making package...") retcode = subprocess.call(["sudo", "ccm", "s"]) if retcode != 0: self.compile_status = "Failed: %s" %retcode self.failure = self.failure + 1 print(" ERROR: ccm failed with return code ",retcode) if self.pkgrel_incremented: if os.path.isfile(self.pkgfile + '.old'): shutil.move(self.pkgfile + '.old', self.pkgfile) print(" Reverted pkgrel increment.") return False elif "--nobuild" in self.makepkg_cmd: self.compile_status = "Failed: nobuild" return False else: print("-------------Making source package-------------") retcode = subprocess.call(["makepkg", "--force", "--holdver", "--source"]) if retcode != 0: self.compile_status = "Failed source: %s" %retcode self.failure = self.failure + 1 print("ERROR: Source package creation failed with return code",retcode) sys.exit(2) print("=============FINISHED CREATING PKG=============") self.compile_status = "Success" return True def update_database(self): print("- Copying files and updating database") # pkgname could be a list of several pkgs. Since bash array format is # loose, let bash parse the pkgname(s) first, then return a list for us. self.updateINIT() if self.arch == 'any': self.CARCH = 'any' for i in self.pkglist: print(" Package name:",i) self.GZPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.gz" self.XZPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.xz" self.ZSTPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.zst" #print(self.GZPKG) #print(self.XZPKG) #print(self.ZSTPKG) if os.path.isfile(self.ZSTPKG): self.TOTALPKG = self.ZSTPKG elif os.path.isfile(self.XZPKG): self.TOTALPKG = self.XZPKG elif os.path.isfile(self.GZPKG): self.TOTALPKG = self.GZPKG else: print("") print(" ERROR in function update_database: Couldn't find the new package:",self.TOTALPKG) sys.exit(2) print(" Copying " + self.TOTALPKG + " to " + self.PKGDEST) if shutil.copy2(self.TOTALPKG, self.PKGDEST): os.remove(self.mydir + "/" + self.TOTALPKG) # Remove old package(s) from local copy #print i + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' oldpkgcheck = re.compile( re.escape(i) + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' ) dirlist = os.listdir(self.DOCROOT) for n in dirlist: if n.startswith(i): if not oldpkgcheck.search(n): print(n,"does not match") continue else: OLDPKG = glob.glob(oldpkgcheck.search(n).group() + "*.pkg.tar.*") print(" OLDPKG =",OLDPKG) if OLDPKG: for DELPKG in OLDPKG: if "--rmold" in self.cli_list: print("Deleting old package:",DELPKG) os.remove(DELPKG) #subprocess.call(["repo-remove", self.DOCROOT+ "/" + self.REPO + ".db.tar.gz", DELPKG]) # Remove any symlinks to old packages # We make it conditional on "--force" because force will overwrite # an existing package and we want the symlink to stay, pointing to # the newly built package with the same pkgrel. if "--force" not in self.makepkg_cmd: if os.path.islink(self.mydir + "/" + DELPKG): os.remove(self.mydir + "/" + DELPKG) # Copy in new package print(" Updating " + self.DOCROOT + " with " + self.TOTALPKG) print(" Copying " + self.PKGDEST + "/" + self.TOTALPKG) shutil.copy2(self.PKGDEST + "/" + self.TOTALPKG, self.DOCROOT) print(" Creating symlink " + self.PKGDEST + "/" + self.TOTALPKG) if os.path.islink(self.mydir + "/" + self.TOTALPKG): os.remove(self.mydir + "/" + self.TOTALPKG) os.symlink(self.DOCROOT + "/" + self.TOTALPKG, self.mydir + "/" + self.TOTALPKG) subprocess.call(["repo-add", self.DOCROOT+ "/" + self.REPO + ".db.tar.gz", self.DOCROOT + "/" + self.TOTALPKG]) #print(" Updating pacman database") #subprocess.call(["pacman","-Syyyyy" ]) def update_srcrepo(self): print("- Updating source file repository") print(" SRCPKG:",self.SRCPKG) OLDSRCPKG="" if not os.path.isfile(self.SRCPKGDEST + "/" +self.SRCPKG): print("ERROR in function update_srcrepo: Couldn't find the new package",self.SRCPKG) sys.exit(2) i = self.pkglist[0] oldpkgcheck = re.compile( re.escape(i) + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' ) dirlist = os.listdir(self.SRCPKGDEST + "/" ) if self.pkgbase: # Remove old src package(s) from local copy if "--rmold" in self.cli_list: for n in dirlist: if n.startswith(self.pkgbase): if not oldpkgcheck.search(n): print(n,"does not match") continue else: OLDSRCPKG = glob.glob(oldpkgcheck.search(n).group() + "*.src.tar.gz") else: # Remove old src package(s) from local copy if "--rmold" in self.cli_list: for n in dirlist: if n.startswith(self.pkgname): if not oldpkgcheck.search(n): print(n,"does not match") continue else: OLDSRCPKG = glob.glob(oldpkgcheck.search(n).group() + "*.src.tar.gz") if OLDSRCPKG: print("OLDSRCPKG =",OLDSRCPKG) for DELSRCPKG in OLDSRCPKG: print("Deleting old source package",DELSRCPKG) os.remove(DELSRCPKG) print(" Copying new source package to",self.SRCPKGDEST + "/" + self.REPO + "/" + self.SRCPKG) if shutil.copy2(self.SRCPKGDEST + "/" + self.SRCPKG, self.SRCPKGDEST + "/" + self.REPO + "/"): os.remove(self.SRCPKGDEST + "/" + self.SRCPKG) def cleanup(self): os.chdir(self.mydir) if os.path.isfile(self.pkgfile + '.old'): os.remove(self.pkgfile + '.old') print("Removed temporary backup file",self.pkgfile + '.old') #remove src.tar.gz so it stops cluttering up git if os.path.islink(self.SRCPKG): os.unlink(self.SRCPKG) #remove src dir if empty if os.path.exists(self.mydir + "/src") and not os.path.isfile(self.mydir + "/src"): if not os.listdir(self.mydir + "/src"): os.rmdir(self.mydir + "/src") # Remove any old -build and -package log files os.chdir(self.mydir) build_log = self.pkgname + "-" + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + "-build.log" pkg_log = self.pkgname + "-" + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + "-package.log" all_logs = glob.glob("*.log*") #print "All Logs:",all_logs saved_logs = [] #save the logrotate files saved_logs = glob.glob("*.logrotate*") #if os.path.isfile(build_log): # saved_logs.append(build_log) #if os.path.isfile(pkg_log): # saved_logs.append(pkg_log) #print ("Saved Logs:",saved_logs) for log in all_logs: if log not in saved_logs: os.remove(log) pass def check_for_changelog(self, pkgdir): filename = "%s/__changelog" %pkgdir return os.path.exists(filename) def main(): pkg_not_found=[] pkg_objdict={} pkg_list=[] global makepkg_cmd global cli_list global cli_dict makepkg_cmd = ['makepkg'] makepkg_cmd, cli_list, cli_dict = commandline(makepkg_cmd) try: pkg_list = cli_dict['--pkglist'].split(',') except: packagefile = None packagefile = Packagefile(cli_dict,cli_list,makepkg_cmd) pkg_objdict[packagefile.getname()]=packagefile pkg_list=[packagefile.getname()] packagefile.find_repo() packagefile.increase_pkgrel() packagefile.print_vars() packagefile.updateSUMS() if packagefile.make_package(): packagefile.update_database() packagefile.update_srcrepo() packagefile.cleanup() pkg_objdict[packagefile.getname()]=packagefile update_pkg = [] success_pkg = [] failed_compile = [] failed_update = [] #create lists for k, v in pkg_objdict.items(): #print k #print v.print_vars() if v.get_compile_status().strip() == "Success": success_pkg.append(k) else: temp = "%s --- %s (%s)" %(k,v.get_compile_status(),v.getattempts()) failed_compile.append(temp) print("\n\n\n\n") print("#######################################################") print("\n") print("Couldn't find these packages:") print("----------------------------") print(pkg_not_found) print("\n") print("Failed to compile these packages:") print("----------------------------") print( failed_compile) print("\n") print("Successful compiled :") print("---------------------------") print(success_pkg) if __name__ == "__main__": main() print("--------------------------")