summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJames Meyer <james.meyer@operamail.com>2012-09-05 18:30:57 (GMT)
committerJames Meyer <james.meyer@operamail.com>2012-09-05 18:30:57 (GMT)
commitd1ca6cf2c742c7051c1684ae170ac2885772312a (patch)
treeb612b1a0c5bfbe1dc1e95c4eab286e9cd2e8abeb
parent3002904ada68c272b0dbe5fc62be2f833c9f2630 (diff)
downloadlinhes_dev-d1ca6cf2c742c7051c1684ae170ac2885772312a.zip
mpv2.py enhanced version of mp.py
-rwxr-xr-xbuild_tools/bin/mpv2.py1044
1 files changed, 1044 insertions, 0 deletions
diff --git a/build_tools/bin/mpv2.py b/build_tools/bin/mpv2.py
new file mode 100755
index 0000000..bcbbfec
--- /dev/null
+++ b/build_tools/bin/mpv2.py
@@ -0,0 +1,1044 @@
+#!/usr/bin/env python2
+# Version 0.8.0
+import pprint
+import os
+import sys
+import re
+import subprocess
+import gettext
+from git import Repo,Git
+import ConfigParser
+import glob
+import shutil
+import optparse
+import fileinput
+import parched
+import time
+
+PKGHOME = "/data/pkg_repo/packages"
+SRCPKGHOME = "/data/pkg_repo/src_packages"
+ARCHABS = "/var/abs"
+
+# See what git branch we're working under
+git_repo = Git()
+branches = git_repo.branch()
+git_branch = branches.split('*')[1].lstrip(' ')
+
+if git_branch == "testing":
+ SFIX = "-testing"
+elif git_branch == "master":
+ SFIX = ""
+else:
+ print "Can't determine which git branch is in use!"
+ sys.exit(2)
+print "Git Branch suffix is:",SFIX
+
+
+def commandline(makepkg_cmd):
+ #print makepkg_cmd
+ cli_dict = {}
+ cli_list = []
+
+ clparser = optparse.OptionParser()
+ clparser.add_option("--asroot", action="store_true", default=True, help="Allow makepkg to run as root.")
+ clparser.add_option("-A", "--ignorearch", action="store_true", default=False, help="Ignore a missing or incomplete arch field in the build script.")
+ clparser.add_option("-b", "--bump", action="store_true", default=False, help="Increase package release one unit.")
+ clparser.add_option("-5", "--md5", action="store_true", default=False, help="Clear out old md5 sums")
+ clparser.add_option("--abs", action="store_true", default=False, help="update packages from abs")
+ clparser.add_option("--recurse", action="store_true", default=False, help="Recursively search for all dependencies")
+ clparser.add_option("--pkgdir", action="store", help="pkg dir to cd into.")
+ clparser.add_option("--pkglist", action="store", help="List of packages to compile")
+ clparser.add_option("-c", "--clean", action="store_true", default=False, help="Clean up leftover work files and directories after a successful build. ")
+ clparser.add_option("-C", "--cleancache", action="store_true", default=False, help="Removes all cached source files from the directory specified in SRCDEST in makepkg.conf")
+ clparser.add_option("--config", action="store", help="Use an alternate config file instead of the /etc/makepkg.conf default.")
+ clparser.add_option("-d", "--nodeps", action="store_true", default=False, help="Do not perform any dependency checks.")
+ clparser.add_option("-e", "--noextract", action="store_true", default=False, help="Do not extract source files; use whatever source already exists in the src/ directory.")
+ clparser.add_option("-f", "--force", action="store_true", default=False, help="This allows a built package to be overwritten.")
+ clparser.add_option("--forcever", action="store_true", default=False, help="This is a hidden option that should not be used unless you really know what you are doing.")
+ clparser.add_option("-g", "--geninteg", action="store_true", default=False, help="For each source file in the source array of PKGBUILD, download the file if required and generate integrity checks.")
+ clparser.add_option("--skipinteg", action="store_true", default=False, help="Do not perform any integrity checks, just print a warning instead.")
+ clparser.add_option("--holdver", action="store_true", default=True, help="Prevents makepkg from automatically bumping the pkgver to the latest revision number in the package's development tree.")
+ clparser.add_option("--nohold", action="store_false", dest="holdver", help="Automatically bump the pkgver to the latest revision number in the package's development tree.")
+ clparser.add_option("-i", "--install", action="store_true", default=False, help="Install or upgrade the package after a successful build.")
+ clparser.add_option("-L", "--log", action="store_true", default=True, help="Enable makepkg build logging.")
+ clparser.add_option("-m", "--nocolor", action="store_true", default=False, help="Disable color in output messages.")
+ clparser.add_option("-o", "--nobuild", action="store_true", default=False, help="Download and extract files only, but do not build them.")
+ clparser.add_option("-p", action="store", help="Read the package script buildscript instead of the PKGBUILD default.", metavar='/path/to/buildscript')
+ clparser.add_option("-r", "--rmdeps", action="store_true", default=False, help="Upon successful build, remove any dependencies installed by makepkg during dependency auto-resolution and installation when using -s.")
+ clparser.add_option("-R", "--repackage", action="store_true", default=False, help="Repackage contents of the package without rebuilding the package.")
+ clparser.add_option("-s", "--syncdeps", action="store_true", default=False, help="Install missing dependencies using pacman.")
+ clparser.add_option("--allsource", action="store_true", default=False, help="Do not actually build the package, but build a source-only tarball that includes all sources, including those that are normally download via makepkg.")
+ clparser.add_option("--source", action="store_true", default=False, help="Do not actually build the package, but build a source-only tarball that does not include sources that can be fetched via a download URL.")
+ clparser.add_option("--pkg", action="store", help="Only build listed packages from a split package.")
+ clparser.add_option("--noconfirm", action="store_true", default=False, help="(Passed to pacman) Prevent pacman from waiting for user input before proceeding with operations.")
+ clparser.add_option("--noprogressbar", action="store_true", default=False, help="(Passed to pacman) Prevent pacman from displaying a progress bar.")
+ clparser.add_option("--rmold", action="store_true", default=False, help="BETA: Remove old src and software packages from repos. Use with caution. False positives may occur (i.e. nvidia pkgs)!")
+
+ (options, args) = clparser.parse_args()
+
+ options1 = ['config', 'p', 'pkg', 'pkgdir', 'pkglist']
+ options2 = ['asroot', 'ignorearch', 'bump', 'clean', 'cleancache', 'nodeps',
+ 'noextract', 'force', 'forcever', 'geninteg', 'skipinteg', 'holdver',
+ 'install', 'log', 'nocolor', 'nobuild', 'rmdeps', 'repackage',
+ 'syncdeps', 'allsource', 'source', 'noconfirm', 'noprogressbar',
+ 'rmold', 'md5','recurse','abs' ]
+
+ for o in options1:
+ cmd1 = eval('options.'+o)
+ if o is not 'p':
+ if cmd1 is not None:
+ cli_dict['--'+o] = cmd1
+ elif cmd1 is not None:
+ cli_dict['-'+o] = cmd1
+ for o in options2:
+ cmd2 = eval('options.'+o)
+ if cmd2 is True:
+ cli_list.append('--'+o)
+
+ # Create makepkg command
+ makepkg_cmd.extend(cli_list)
+ for k in cli_dict.keys():
+ v = cli_dict.get(k)
+ makepkg_cmd.append(k)
+ makepkg_cmd.append(v)
+
+ # Remove bump option from makepkg command if it exists
+ if "--bump" in makepkg_cmd:
+ makepkg_cmd.remove("--bump")
+ # Remove rmold option from makepkg command if it exists
+ if "--rmold" in makepkg_cmd:
+ makepkg_cmd.remove("--rmold")
+ # Remove "--geninteg" option (if it exists) from makepkg command.
+ # Checking for *sums is done automaticly by mp.py
+ if "--geninteg" in makepkg_cmd:
+ makepkg_cmd.remove("--geninteg")
+
+ if "--md5" in makepkg_cmd:
+ makepkg_cmd.remove("--md5")
+
+ if "--recurse" in makepkg_cmd:
+ makepkg_cmd.remove("--recurse")
+
+ if "--pkgdir" in makepkg_cmd:
+ makepkg_cmd.remove("--pkgdir")
+ makepkg_cmd.remove(cli_dict["--pkgdir"])
+
+ if "--pkglist" in makepkg_cmd:
+ makepkg_cmd.remove("--pkglist")
+ makepkg_cmd.remove(cli_dict["--pkglist"])
+
+ if "--abs" in makepkg_cmd:
+ makepkg_cmd.remove("--abs")
+
+
+ print "Makepkg Command:",makepkg_cmd
+ return makepkg_cmd, cli_list, cli_dict
+
+
+
+class Packagefile(object):
+ def __init__(self,cli_dict,cli_list,makepkg_cmd):
+ self.failure=0
+ self.attempt=0
+ self.REPO = "none"
+ self.DOCROOT = ""
+ self.pkgfile = "none"
+ self.pkgbase = ""
+ self.pkgname = ""
+ self.pkgver = ""
+ self.pkgrel = ""
+ self.pkglist=[]
+ self.epoch = ""
+ self.arch = ""
+ self.CARCH = ""
+ self.PKGDEST = ""
+ self.TOTALPKG = ""
+ self.GZPKG = ""
+ self.XZPKG = ""
+ self.repolist=["core", "extra",
+ "chroot-devel", "mv-core", "xmpl", "local"]
+ self.mydir = os.getcwd()
+ self.variables = ['pkgbase','pkgname','pkgver','pkgrel','arch','epoch','depends','makedepends','checkdepends']
+ self.pkgrel_incremented = False
+ self.makepkg_cmd = makepkg_cmd
+ self.cli_list = cli_list
+ self.cli_dict = cli_dict
+ self.pkg_dependencies = []
+ self.makepkg_dependencies = []
+ self.check_dependencies = []
+ self.backupdir="/tmp/backup/"
+ self.updateABS = "Update waiting or not requested"
+ self.compile_status = "Pending"
+ self.backup_pkgdir = ""
+ self.pkg_dep_available = []
+ self.pkg_dep_not_in_repo = []
+ if "-p" in cli_dict:
+ self.pkgfile = cli_dict["-p"]
+ else:
+ self.pkgfile = "PKGBUILD"
+
+ self.updateINIT()
+
+
+
+ def set_srcpkg(self):
+ if self.pkgbase:
+ self.SRCPKG = self.pkgbase + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + ".src.tar.gz"
+ else:
+ self.SRCPKG = self.pkglist[0] + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + ".src.tar.gz"
+ #print "Changed " + self.pkgname + " release to " + str(self.pkgrel)
+
+ def updateINIT(self):
+ # Check what file will be used for our PKGBUILD
+
+ # Does the file exist?
+ if not os.path.isfile(self.pkgfile):
+ print "ERROR in function config_file: Can't find ",self.pkgfile,"!"
+ sys.exit(2)
+ else:
+ # Loop over contents to get our variables
+ # Use bash to do it because PKGBUILDs are very loose with their format
+ for item in self.variables:
+ v = subprocess.Popen(['/bin/bash','-c', 'source ' +
+ self.pkgfile +
+ '; echo ${' + item + '[@]}'],
+ stdout = subprocess.PIPE,)
+ value = v.communicate()[0].strip('\n')
+ if item == "pkgbase":
+ self.pkgbase = value
+ elif item == "pkgname":
+ self.pkgname = value
+ self.pkglist = list(value.split())
+ elif item == "pkgver":
+ self.pkgver = value
+ elif item == "pkgrel":
+ self.pkgrel = value
+ elif item == "epoch":
+ if value:
+ self.epoch = "%s:" %value
+ elif item == "arch":
+ self.arch = value
+ elif item == "depends":
+ self.pkg_dependencies = value.split()
+
+ elif item == "makedepends":
+ self.makepkg_dependencies = value.split()
+ elif item == "checkdepends":
+ self.check_dependencies = value.split()
+
+ self.set_srcpkg()
+ # Get needed makepkg.conf variables
+ mpkg="/etc/makepkg.conf"
+ f=open(mpkg,"r")
+ # Read file contents into memory
+ makepkg_contents=f.readlines()
+ f.close()
+ # Loop over contents to get our variables
+ for mp_lines in makepkg_contents:
+ if mp_lines.strip().startswith("CARCH"):
+ self.CARCH = mp_lines.partition("=")[2].strip('\n').replace('\"','')
+ if self.arch != 'any':
+ self.arch = self.CARCH
+ if mp_lines.strip().startswith("PKGDEST"):
+ self.PKGDEST = mp_lines.partition("=")[2].strip('\n')
+
+
+
+ def print_vars(self):
+ print "####################################################"
+ print ""
+ print " Config File:",self.pkgfile
+ print " pkgbase:",self.pkgbase
+ print " pkgname:",self.pkgname
+ print " pkglist:",self.pkglist
+ print " pkgver:",self.pkgver
+ print " pkgrel:",self.pkgrel
+ print " epoch:" ,self.epoch
+ print " arch:",self.arch
+ print " Source package:",self.SRCPKG
+ print " CARCH is:",self.CARCH
+ print " Package destination:",self.PKGDEST
+ print " Architecture is:",self.arch
+ print " Repository : ", self.REPO
+ print " DOCROOT : ", self.DOCROOT
+ print " Current directory: ", self.mydir
+ print " Backup direcotory: ", self.get_backupdir()
+ print " Dependencies: ", self.pkg_dependencies
+ print " Make Dependencies: ", self.makepkg_dependencies
+ print " Unmet Dependencies - compile:" , self.pkg_dep_not_in_repo
+ print " Unmet Dependencies - available:" , self.pkg_dep_available
+ print " Update ABS Status: " ,self.get_ABS_status()
+ print " Package compile status: ", self.get_compile_status()
+ print ""
+ print "####################################################"
+
+ def getname(self):
+ return self.pkgname
+
+ def getfailure(self):
+ return self.failure
+
+ def getattempts(self):
+ return self.attempt
+
+ def getdir(self):
+ return self.mydir
+
+ def get_unmet_compile(self):
+ return self.pkg_dep_not_in_repo
+
+ def get_unmet_install(self):
+ return self.pkg_dep_available
+
+ def incrementcompile(self):
+ self.attempt = self.attempt + 1
+
+ def calculate_unmet_depends(self):
+ pkg_not_installed = []
+ pkg_not_in_repo = []
+ pkg_available = []
+ val=''
+ command = [ "pacman" , "-T" ]
+ pkg_list= self.pkg_dependencies + self.makepkg_dependencies + self.check_dependencies
+
+ if len(pkg_list) > 0:
+ command = command + pkg_list
+ print " Checking for dependencies against installed packages"
+ #print command
+ pkg_not_installed = subprocess.Popen(command, stdout = subprocess.PIPE,).communicate()[0].strip().split('\n')
+ while val in pkg_not_installed:
+ pkg_not_installed.remove(val)
+
+ for i in pkg_list:
+ split_package = i.replace('>',' ').replace('<',' ').replace('=',' ').split()
+
+ if len(split_package) > 1:
+ split_package_name = split_package[0]
+ package_version = split_package[1]
+ else:
+ split_package_name = split_package[0]
+ package_version = None
+
+ pkgregext="^%s$" %split_package_name
+ command = ["pacman" , "-Ss" , pkgregext ]
+ #print " Checking for dependencies in repository: %s" %i
+ p = subprocess.Popen(command, stdout = subprocess.PIPE,)
+ p.wait()
+ rc = p.returncode
+ if rc == 0 :
+ if i in pkg_not_installed:
+ print "- Dependency found in repository, but not installed: %s" %i
+ pkg_available.append(split_package_name)
+ else:
+ print "- Dependency not found, it must be built: %s" %i
+ pkg_not_in_repo.append(split_package_name)
+
+ self.pkg_dep_available = pkg_available
+ self.pkg_dep_not_in_repo = pkg_not_in_repo
+
+
+ return
+
+ def get_ABS_status(self):
+ return self.updateABS
+
+ def get_backupdir(self):
+ return self.backup_pkgdir
+
+ def get_compile_status(self):
+ return self.compile_status
+
+ def find_repo(self):
+ # Create a list with the directory names of our current directory tree
+ dir_tree = os.path.dirname(self.mydir).split("/")
+ # Loop through the dir_tree to see if we can find our repo
+ for item in self.repolist:
+ if item not in dir_tree:
+ continue
+ else:
+ repo_name = item
+ if repo_name == "extra":
+ self.REPO = "extra" + SFIX
+ elif repo_name == "core":
+ self.REPO = "core" + SFIX
+ elif repo_name == "xmpl":
+ self.REPO = "local"
+ elif repo_name not in repolist:
+ print "ERROR in function find_repo: Cannot determine repository!"
+ sys.exit(2)
+ else:
+ self.REPO = repo_name
+ # Ensure our DOCROOT exists and if not, create it
+ self.DOCROOT = "/data/pkg_repo/" + self.CARCH + "/" + self.REPO
+ print "DOCROOT:",self.DOCROOT
+ if os.path.exists(self.DOCROOT):
+ print "INFO: Repository is",self.REPO
+ else:
+ try:
+ os.mkdir(DOCROOT,0755)
+ print "INFO: ",self.DOCROOT,"directory created. Repository is",self.REPO
+ except:
+ print " Creating DOCROOT failed"
+ pass
+
+ def dup_check(self):
+ print "- Checking if package is a duplicate in core or extra:"
+ #if REPO in ("local", "mv-core", "xmpl", "chroot-devel"):
+ # return
+ for tmp_repo in ("core", "extra"):
+ if tmp_repo + SFIX != self.REPO:
+ p1 = subprocess.Popen(["pacman", "-Sl", tmp_repo + SFIX], stdout=subprocess.PIPE, stderr=subprocess.STDOUT,)
+ p2 = subprocess.Popen(["cut", "-d", " ", "-f", "2"], stdin=p1.stdout, stdout=subprocess.PIPE,)
+ p3 = subprocess.Popen(["grep","-w" , self.pkgname], stdin=p2.stdout, stdout = subprocess.PIPE,)
+ #output = p3.communicate()[0].strip('\n')
+ output = p3.communicate()[0].strip('\n')
+ #print tmp_repo + SFIX
+ #print output
+ #print "###############"
+
+ if output == self.pkgname:
+ if "--force" not in self.cli_list:
+ print "#######################################"
+ print self.pkgname," already exists in ",tmp_repo + SFIX
+ print
+ print "Use --force to overwite"
+ print "#######################################"
+ sys.exit(2)
+ else:
+ print "Force detected! Making package regardless!"
+ print "#######################################"
+ else:
+ if output != 0:
+ print " Package is not a duplicate"
+ return
+ def increase_pkgrel(self):
+ if not "--bump" in self.cli_list:
+ return
+ print "- Incrementing pkgrel for %s" %self.pkgname
+ # Backup the original pkgfile
+ shutil.copy2(self.pkgfile, self.pkgfile + ".old")
+ # Let's increase the pkgrel
+ for line in fileinput.input(self.pkgfile, inplace=1):
+ if line.strip().startswith("pkgrel"):
+ self.pkgrel = line.partition("=")[2].strip('\n')
+ # Add 1 to pkgrel
+ new_pkgrel = int(self.pkgrel) + 1
+ line = line.replace("pkgrel=" + self.pkgrel, "pkgrel=" + str(new_pkgrel))
+ self.pkgrel = str(new_pkgrel)
+ sys.stdout.write(line)
+ print " pkgrel:",self.pkgrel
+ self.set_srcpkg()
+ self.pkgrel_incremented = True
+
+ def clearMD5(self):
+ print " Clearing out old md5sums"
+ sumlist=['sha1sums', 'sha256sums', 'sha384sums', 'sha512sums', 'md5sums']
+ filename=self.pkgfile
+ file_contents=[]
+ f = open(filename,"r")
+ lineIter= iter(f)
+ for aLine in lineIter:
+ if aLine.strip().startswith("md5sum"):
+ if aLine.strip().endswith(")"):
+ continue
+ else:
+ for aModule in lineIter:
+ if aModule.strip().endswith(")"):
+ break
+ else:
+ file_contents.append(aLine)
+ f.close()
+ try:
+ f = open(filename,"w")
+ except:
+ outtext="Couldn't open file for writing: %s" %filename
+ print outtext
+ return False
+ for line in file_contents:
+ f.write(line)
+ f.close()
+
+ def updateMD5(self):
+ print "- Checking MD5 sums"
+ if "--md5" in self.cli_list:
+ self.clearMD5()
+
+ sums = []
+ pkgfile_contents=[]
+
+ # Open pkgfile for reading
+ f = open(self.pkgfile, 'r')
+ pkgfile_contents = f.readlines()
+ f.close()
+
+
+ # Iterate through the lines looking for a match pattern
+ for line in pkgfile_contents:
+ check = re.compile('sha1sums|sha256sums|sha384sums|sha512sums|md5sums')
+ sums = check.match(line)
+ if sums:
+ break
+ if not sums:
+ print " Adding md5 sums"
+ # If no matches are found, append md5sums to the end of the pkgfile
+ p = open(self.pkgfile, 'a')
+ md5gen = subprocess.Popen(["makepkg", "--asroot", "-g"], stdout = subprocess.PIPE,).communicate()[0]
+ p.writelines(md5gen)
+ p.close()
+
+ def make_package(self):
+ if self.arch == 'any':
+ self.CARCH = 'any'
+ #creates both binary and source package
+ print "- Making package"
+ retcode = subprocess.call(self.makepkg_cmd)
+ if retcode != 0:
+ self.compile_status = "Failed: %s" %retcode
+ self.failure = self.failure + 1
+ print " ERROR: makepkg failed with return code ",retcode
+ if self.pkgrel_incremented:
+ if os.path.isfile(self.pkgfile + '.old'):
+ shutil.move(self.pkgfile + '.old', self.pkgfile)
+ print " Reverted pkgrel increment."
+ return False
+ elif "--nobuild" in self.makepkg_cmd:
+ self.compile_status = "Failed: nobuild"
+ return False
+ else:
+ print "- Making source package"
+ retcode = subprocess.call(["makepkg", "--force", "--holdver", "--asroot", "--source"])
+ if retcode != 0:
+ self.compile_status = "Failed source: %s" %retcode
+ self.failure = self.failure + 1
+ print "ERROR: Source package creation failed with return code",retcode
+ sys.exit(2)
+ print "=============FINSHED CREATING PKG=================="
+ self.compile_status = "Success"
+ return True
+
+ def update_database(self):
+ print "- Copying files and updating database"
+ # pkgname could be a list of several pkgs. Since bash array format is
+ # loose, let bash parse the pkgname(s) first, then return a list for us.
+ os.chdir(self.DOCROOT)
+
+ print
+ print " INFO: Changed working dir to",self.DOCROOT
+ print
+
+ for i in self.pkglist:
+ print " Package name:",i
+
+ self.GZPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.gz"
+ self.XZPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.xz"
+ print self.GZPKG
+ print self.XZPKG
+
+ if os.path.isfile(self.PKGDEST + "/" + self.XZPKG):
+ self.TOTALPKG = self.XZPKG
+ elif os.path.isfile(self.PKGDEST + "/" + self.GZPKG):
+ self.TOTALPKG = GZPKG
+ else:
+ print ""
+ print "!!!!!!!!!!!!!!!!!"
+ print " ERROR in function update_repo: Couldn't find the new package",self.PKGDEST + "/" + self.TOTALPKG
+ sys.exit(2)
+
+ print " Package file:",self.TOTALPKG
+ # Remove old package(s) from local copy
+ oldpkgcheck = re.compile( i + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' )
+ dirlist = os.listdir(self.DOCROOT)
+ for n in dirlist:
+ if n.startswith(i):
+ if not oldpkgcheck.search(n):
+ print n,"does not match"
+ continue
+ else:
+ OLDPKG = glob.glob(oldpkgcheck.search(n).group() + "*.pkg.tar.?z")
+ print "OLDPKG =",OLDPKG
+ if OLDPKG:
+ for DELPKG in OLDPKG:
+ if "--rmold" in self.cli_list:
+ print "Deleting old package:",DELPKG
+ os.remove(DELPKG)
+ # Remove any symlinks to old packages
+ # We make it conditional on "--force" because force will overwrite
+ # an existing package and we want the symlink to stay, pointing to
+ # the newly built package with the same pkgrel.
+ if "--force" not in self.makepkg_cmd:
+ if os.path.islink(self.mydir + "/" + DELPKG):
+ os.remove(self.mydir + "/" + DELPKG)
+ # Copy in new package
+ print " Updating " + self.DOCROOT + " with " + self.TOTALPKG
+ print " Copying " + self.PKGDEST + "/" + self.TOTALPKG
+ shutil.copy2(self.PKGDEST + "/" + self.TOTALPKG, self.DOCROOT)
+ subprocess.call(["repo-add", self.DOCROOT+ "/" + self.REPO + ".db.tar.gz", self.DOCROOT + "/" + self.TOTALPKG])
+ print " Updating pacman database"
+ subprocess.call(["pacman","-Syyyyy" ])
+
+ def update_srcrepo(self):
+ print "- Updating source file repoistory"
+ print " SRCPKG:",self.SRCPKG
+ OLDSRCPKG=""
+ os.chdir(SRCPKGHOME + "/" + self.REPO)
+ print " INFO: Changed working dir to",SRCPKGHOME + "/" + self.REPO
+
+ if not SRCPKGHOME + "/" "/" + self.SRCPKG:
+ print "ERROR in function update_repo: Couldn't find the new package",SRCPKGHOME + "/" + self.REPO + "/" + self.SRCPKG
+ sys.exit(2)
+
+ i = self.pkglist[0]
+ oldpkgcheck = re.compile( i + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' )
+ dirlist = os.listdir(SRCPKGHOME + "/" )
+ if self.pkgbase:
+ # Remove old src package(s) from local copy
+ if "--rmold" in self.cli_list:
+ for n in dirlist:
+ if n.startswith(self.pkgbase):
+ if not oldpkgcheck.search(n):
+ print n,"does not match"
+ continue
+ else:
+ OLDSRCPKG = glob.glob(oldpkgcheck.search(n).group() + "*.src.tar.gz")
+ else:
+ # Remove old src package(s) from local copy
+ if "--rmold" in self.cli_list:
+ for n in dirlist:
+ if n.startswith(self.pkgname):
+ if not oldpkgcheck.search(n):
+ print n,"does not match"
+ continue
+ else:
+ OLDSRCPKG = glob.glob(oldpkgcheck.search(n).group() + "*.src.tar.gz")
+ if OLDSRCPKG:
+ print "OLDSRCPKG =",OLDSRCPKG
+ for DELSRCPKG in OLDSRCPKG:
+ print "Deleting old source package",DELSRCPKG
+ os.remove(DELSRCPKG)
+ print "Copying new source package to",SRCPKGHOME + "/" + self.REPO + "/" + self.SRCPKG
+ shutil.copy2(SRCPKGHOME + "/" + self.SRCPKG, SRCPKGHOME + "/" + self.REPO + "/")
+
+
+ def cleanup(self):
+ os.chdir(self.mydir)
+ if os.path.isfile(self.pkgfile + '.old'):
+ os.remove(self.pkgfile + '.old')
+ print "Removed temporary backup file",self.pkgfile + '.old'
+ #remove src.tar.gz so it stops cluttering up git
+ if os.path.islink(self.SRCPKG):
+ os.unlink(self.SRCPKG)
+ # Remove any old -build and -package log files
+ os.chdir(self.mydir)
+ build_log = self.pkgname + "-" + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + "-build.log"
+ pkg_log = self.pkgname + "-" + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + "-package.log"
+ all_logs = glob.glob("*.log*")
+ #print "All Logs:",all_logs
+ saved_logs = []
+ #save the logrotate files
+ saved_logs = glob.glob("*.logrotate.d")
+ if os.path.isfile(build_log):
+ saved_logs.append(build_log)
+ if os.path.isfile(pkg_log):
+ saved_logs.append(pkg_log)
+ #print "Saved Logs:",saved_logs
+ for log in all_logs:
+ if log not in saved_logs:
+ os.remove(log)
+ pass
+
+ def findPKGdir(self,directory, spackage_list):
+ #"""
+ # Searches for PKGBUILDS with name recursively in directory.
+ # Returns False if the file was not found.
+ # This is used by promote and update
+ #"""
+ print "\n Searching for %s in %s" %(spackage_list,directory)
+ for root, dirs, files in os.walk(directory):
+ if "PKGBUILD" in files:
+ try:
+ currentfile=root+"/PKGBUILD"
+ package = parched.PKGBUILD(currentfile)
+ pkgname = package.name
+ pkgversion = str(package.version)
+ for spackage in spackage_list:
+ if type(pkgname) == str:
+ pkgname = package.name.strip()
+ if pkgname == spackage:
+ print " Found package in %s" %root
+ return root
+ if type(pkgname) == list:
+ pkgname = package.name
+ if spackage in pkgname:
+ print " Found package in %s" %root
+ return root
+
+ except Exception, reason:
+ #print reason
+ pass
+ return False
+
+ def check_for_changelog(self, pkgdir):
+ filename = "%s/__changelog" %pkgdir
+ return os.path.exists(filename)
+
+
+ def rsync_files(self,src_dir,dest_dir,rsync_flags="ar"):
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir)
+ if os.path.exists(dest_dir):
+ try:
+ cmd="rsync -%s --exclude src --exclude pkg %s/ %s" %(rsync_flags,src_dir,dest_dir )
+ # print cmd
+ retcode = subprocess.call(cmd, shell=True)
+ if retcode == 0 :
+ outtext=" Copy was a succeses"
+ print outtext
+ else:
+ outtext="- Copying Failed"
+ print outtext
+ return False
+ except OSError, e:
+ outtext="- Copying Failed: %s " %(e)
+ print outtext
+ return False
+ return True
+ else:
+ outtext = "- Dest dir %s does not exist" %(dest_dir)
+ print outtext
+ return False
+
+
+ def backupAction(self):
+ current_time=time.strftime("%Y-%m-%d_%H-%M-%S")
+ src_dir=self.mydir
+ last_bit_of_path=src_dir.split("/")[-1]
+ dest_dir="%s/%s-%s" %(self.backupdir,last_bit_of_path,current_time)
+ self.backup_pkgdir = dest_dir
+ if self.rsync_files(src_dir,dest_dir):
+ outText=" Backup of %s was a success: %s" %(last_bit_of_path,dest_dir)
+ else:
+ outText="- Backup failed"
+ print outText
+
+ def update_pkg_from_archabs(self):
+ if "--abs" in self.cli_list:
+ print " Searching if ABS has an updated PKGBUILD"
+ dest_dir = self.mydir
+ has_changelog = self.check_for_changelog(dest_dir)
+ if has_changelog :
+ outText = " Will not update, because changelog was detected: %s" %self.pkgname
+ print outText
+ self.updateABS="Failed, Changelog detected"
+ return False
+ #search /var/abs for pkg
+ src_dir = self.findPKGdir(ARCHABS, self.pkglist)
+ if src_dir:
+ #Check for diff in PKGBUILD HERE
+ diffpkg1 = "%s/PKGBUILD" %dest_dir
+ diffpkg2 = "%s/PKGBUILD" %src_dir
+ command = ["diff" , diffpkg1, diffpkg2 ]
+ p = subprocess.Popen(command, stdout = subprocess.PIPE,)
+ p.wait()
+ rc = p.returncode
+ if rc == 0 :
+ print " Will not update, PKGBUILDS look to be the same"
+ self.updateABS=" Failed, PKGBUILD are the same"
+ return False
+ self.backupAction()
+ print " Updated pkg found, started copy from %s" %src_dir
+ rsync_flags="ar --delete"
+ if self.rsync_files(src_dir,dest_dir,rsync_flags):
+ outText=" Update of %s was a success: %s" %(self.pkgname,src_dir)
+ self.updateABS = "Success"
+ self.updateINIT()
+ else:
+ outText="- Update failed"
+ self.updateABS = "Failed"
+ print outText
+ else:
+ outtext="- Search did not package %s in ABS" %self.pkgname
+ print outtext
+ self.updateABS = "Failed, did not find package"
+ return False
+ return True
+ else:
+ outtext = "- ABS update not requested"
+ print outtext
+ self.updateABS = "Update not requested"
+ return False
+
+ def install_available_deps(self):
+ print " Installing available dependencies from repo"
+ self.calculate_unmet_depends()
+ for pkg in self.pkg_dep_available:
+ print " " + pkg
+ command=["pacman", "-S" ,"--noconfirm" , pkg]
+ p = subprocess.Popen(command, stdout = subprocess.PIPE,)
+ p.wait()
+
+
+ def uninstall_available_deps(self):
+ print " Removing installed deps"
+ for pkg in self.pkg_dep_available:
+ print " " + pkg
+ command=["pacman", "-R" ,"--noconfirm" , pkg]
+ p = subprocess.Popen(command, stdout = subprocess.PIPE,)
+ p.wait()
+
+
+def search_forpkg(directory, spackage):
+ #"""
+ # Searches for PKGBUILDS with name recursively in directory.
+ # Returns False if the file was not found.
+ # This is used by promote and update
+ #"""
+ pkgname = None
+ pkgname_list = None
+ pkgversion = None
+ pkgFound=False
+
+ split_package = spackage.replace('>',' ').replace('<',' ').replace('=',' ').split()
+
+ if len(split_package) > 1:
+ split_package_name = split_package[0]
+ package_version = split_package[1]
+ else:
+ split_package_name = split_package[0]
+ package_version = None
+ #directory="/data/linhes_pkgbuild/abs/core/ddcxinfo"
+ print " Searching for %s in %s" %(spackage,directory)
+ for root, dirs, files in os.walk(directory):
+ if "PKGBUILD" in files:
+ try:
+ currentfile=root+"/PKGBUILD"
+ package = parched.PKGBUILD(currentfile)
+ if type(package.name) is str:
+ pkgname = str(package.name).strip()
+ if type(package.name) is list:
+ pkgname_list = package.name
+
+ pkgname = str(package.name).strip()
+ #pkgbase = package.pkgbase
+ pkgversion = str(package.version)
+
+ if ( pkgname == split_package_name ) :
+ pkgFound = True
+ elif ( split_package_name in pkgname_list ):
+ pkgFound = True
+
+ if pkgFound:
+ return root
+
+ except Exception, reason:
+ #print reason
+ pass
+ print "- Failed to find %s" %spackage
+ return False
+
+
+def process_pkg(pkg_list,pkg_objdict,pkg_not_found,calling_pkg,recursive_failure):
+ LHABS="/data/linhes_pkgbuild/abs"
+
+ #makepkg_cmd = ['makepkg']
+ #makepkg_cmd, cli_list, cli_dict = commandline(makepkg_cmd)
+
+ if recursive_failure == True :
+ return pkg_list,pkg_objdict,pkg_not_found,calling_pkg,recursive_failure
+
+
+ print " Updating pacman database"
+ subprocess.call(["pacman","-Syyyyy" ])
+ for pkg in pkg_list:
+ #skip over blank ones
+
+ if pkg == '':
+ continue
+ pkgdir=''
+ packagefile = None
+ if pkg in pkg_not_found:
+ print "***********"
+ print " pkg not found, recursive_failure detected"
+ print "***********"
+ recursive_failure = True
+ break
+
+ for k, v in pkg_objdict.iteritems():
+ #print k
+ #print v
+ if k == pkg:
+ print " Found package in working list."
+ pkgdir = v.getdir()
+ packagefile = v
+
+ if pkgdir == '' :
+ pkgdir = search_forpkg(LHABS, pkg)
+
+ if pkgdir:
+ print "\n\n"
+ print "$-------------------start of new pkg----------------------$"
+ print " %s , called by %s " %(pkg,calling_pkg)
+ cwd = os.getcwd()
+ os.chdir(pkgdir)
+ #print os.getcwd()
+ if packagefile == None:
+ packagefile = Packagefile(cli_dict,cli_list,makepkg_cmd)
+ else:
+ packagefile.incrementcompile()
+ print "##########################################"
+ print "* Number of times failed compile %s" %packagefile.getfailure()
+ print "* Number of times seen for compile %s" %packagefile.getattempts()
+ print "##########################################"
+
+ if packagefile.getfailure() > 1:
+ recursive_failure = True
+ print "recursive failure detected, breaking out"
+ return pkg_list , pkg_objdict, pkg_not_found , recursive_failure
+
+ if packagefile.getattempts() > 3:
+ recursive_failure = True
+ print "recursive attempts detected, breaking out"
+ return pkg_list , pkg_objdict, pkg_not_found , recursive_failure
+
+
+ #do update here
+ packagefile.update_pkg_from_archabs()
+ packagefile.calculate_unmet_depends()
+ list_of_unmet_to_compile = packagefile.get_unmet_compile()
+ list_of_unmet_to_install = packagefile.get_unmet_install()
+ print "* compile these"
+ print list_of_unmet_to_compile
+
+ print "* install these"
+ print list_of_unmet_to_install
+ #print len(list_of_unmet_to_compile)
+ if len(list_of_unmet_to_compile) > 0 :
+
+ #check if recursive right here
+ if "--recurse" in cli_list:
+ print " Couldn't compile not all dependencies are met"
+ pkg_objdict[packagefile.getname()]=packagefile
+ recursive_failure=process_pkg(list_of_unmet_to_compile,pkg_objdict,pkg_not_found,pkg,recursive_failure)[3]
+ if recursive_failure:
+ break
+ print "* continue_working_on_pkg all deps found"
+ else:
+ print " Skipping recursive dependencies build"
+ packagefile.find_repo()
+ packagefile.dup_check()
+ packagefile.increase_pkgrel()
+ packagefile.updateMD5()
+ packagefile.print_vars()
+ packagefile.install_available_deps()
+ if packagefile.make_package():
+ packagefile.update_database()
+ packagefile.update_srcrepo()
+ packagefile.cleanup()
+ packagefile.uninstall_available_deps()
+ pkg_objdict[packagefile.getname()]=packagefile
+ os.chdir(cwd)
+ #pass
+
+ else:
+ print " Couldn't find package in LinHES ABS: %s" %pkg
+ pkg_not_found.append(pkg)
+
+ return pkg_list , pkg_objdict, pkg_not_found , recursive_failure
+
+def main():
+ #pkg_list=['bac', 'system-templates', 'xscreensaver']
+ pkg_list=['bac', 'system-templates', 'udisks' ]
+ pkg_list=['imagemagick']
+ pkg_list=['fluxbox']
+ #pkg_list=['libarchive']
+ pkg_list=['gnupg']
+
+
+
+ pkg_not_found=[]
+ pkg_objdict={}
+ pkg_list=[]
+
+
+ global makepkg_cmd
+ global cli_list
+ global cli_dict
+
+ makepkg_cmd = ['makepkg']
+ makepkg_cmd, cli_list, cli_dict = commandline(makepkg_cmd)
+
+ try:
+ pkg_list = cli_dict['--pkglist'].split(',')
+ except:
+ packagefile = None
+ packagefile = Packagefile(cli_dict,cli_list,makepkg_cmd)
+ pkg_objdict[packagefile.getname()]=packagefile
+ pkg_list=[packagefile.getname()]
+
+ #send the list of packages to compile and watch the magic happen
+ #print cli_list
+ #print cli_dict
+ #sys.exit(1)
+ pkg_list, pkg_objdict, pkg_not_found,recursive_failure = process_pkg(pkg_list,
+ pkg_objdict,
+ pkg_not_found,
+ 'Initialloop',False)
+
+
+ update_pkg = []
+ success_pkg = []
+ failed_compile = []
+ failed_update = []
+
+ #create lists
+ for k, v in pkg_objdict.iteritems():
+ #print k
+ #print v.print_vars()
+ if v.get_compile_status().strip() == "Success":
+ success_pkg.append(k)
+ else:
+ temp = "%s --- %s (%s)" %(k,v.get_compile_status(),v.getattempts())
+ failed_compile.append(temp)
+
+ if v.get_ABS_status() == "Success":
+ temp = "%s --- Backup location: %s" %(k,v.get_backupdir())
+ update_pkg.append(temp)
+ elif v.get_ABS_status() != 'Update waiting or not requested':
+ temp = "%s --- %s" %(k,v.get_ABS_status())
+ failed_update.append(temp)
+
+ print "\n\n\n\n"
+ print "#######################################################"
+
+ print "\n"
+ print "Successful updated from ABS:"
+ print "----------------------------"
+ pprint.pprint(update_pkg)
+
+ print "\n"
+ print "Failed updated from ABS:"
+ print "----------------------------"
+ pprint.pprint(failed_update)
+
+
+ print "\n"
+ print "Couldn't find these packages:"
+ print "----------------------------"
+ pprint.pprint(pkg_not_found)
+
+ print "\n"
+ print "Successful compiled :"
+ print "---------------------------"
+ pprint.pprint(success_pkg)
+
+ print "\n"
+ print "Failed to compile these packages:"
+ print "----------------------------"
+ pprint.pprint( failed_compile)
+
+if __name__ == "__main__":
+ main()
+ print "--------------------------"
+
+#libsig
+#libxml++ dep search and compile
+# \ No newline at end of file