summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBritney Fransen <brfransen@gmail.com>2022-11-10 21:42:34 (GMT)
committerBritney Fransen <brfransen@gmail.com>2022-11-10 21:42:34 (GMT)
commit384f17fbd123dfcf68f35a764df157a8ace08d16 (patch)
treec6b8548f16775c3c7699ec11a0a2af2236424974
parent05fcbd3195c30dfcfb40f4e7e409ff3698a1d099 (diff)
downloadlinhes_pkgbuild-384f17fbd123dfcf68f35a764df157a8ace08d16.zip
linhes_pkgbuild-384f17fbd123dfcf68f35a764df157a8ace08d16.tar.gz
linhes_pkgbuild-384f17fbd123dfcf68f35a764df157a8ace08d16.tar.bz2
linhes-dev: tools to build packages for linhes
-rw-r--r--linhes/linhes-dev/PKGBUILD19
-rwxr-xr-xlinhes/linhes-dev/lhsync.sh126
-rwxr-xr-xlinhes/linhes-dev/mp.py620
-rwxr-xr-xlinhes/linhes-dev/setup_lhdev.sh33
-rwxr-xr-xlinhes/linhes-dev/update_db_repo.sh22
5 files changed, 820 insertions, 0 deletions
diff --git a/linhes/linhes-dev/PKGBUILD b/linhes/linhes-dev/PKGBUILD
new file mode 100644
index 0000000..f6cb75b
--- /dev/null
+++ b/linhes/linhes-dev/PKGBUILD
@@ -0,0 +1,19 @@
+pkgname=linhes-dev
+pkgver=9.0
+pkgrel=1
+pkgdesc="Scripts to develop LinHES"
+arch=('x86_64')
+license=('GPL2')
+depends=('clean-chroot-manager' 'git' 'python-gitpython' 'rclone' 'yaah')
+url="http://linhes.org/"
+source=('lhsync.sh' 'mp.py' 'setup_lhdev.sh' 'update_db_repo.sh')
+
+package() {
+ cd ${srcdir}
+ install -d ${pkgdir}/usr/bin
+ install -D -m755 * ${pkgdir}/usr/bin/
+}
+sha256sums=('d781ffb509282d03f40612556a40455fc6ee58b4b3fc60e5457c90cacd65f99c'
+ 'fc2cb4e6ed3db09202abe2eb2ac5056cc92a19e26f33a29558e65f5c51e8026f'
+ 'b5d12503757471be72fa20fb86a0b1563916b4db85048dcd78b49eaade3db989'
+ '6c3adaea49b7a0bb1baa4ed2d2b1d9f9fbab2f78b6e7ec1e8bedf4abecda25ba')
diff --git a/linhes/linhes-dev/lhsync.sh b/linhes/linhes-dev/lhsync.sh
new file mode 100755
index 0000000..e8cd8b1
--- /dev/null
+++ b/linhes/linhes-dev/lhsync.sh
@@ -0,0 +1,126 @@
+#!/bin/bash
+
+#This script will create and synchronize a local package mirror with the repository
+#(as defined below) on linhes.org, and will also update the database.
+#This script uses a shared account on linhes.org.
+#DO NOT change the account name and don't ask for the password,
+#instead setup ssh keys and run ssh-agent.
+
+if [ -e /etc/makepkg.conf ]
+then
+ . /etc/makepkg.conf
+else
+ echo "Couldn't find /etc/makepkg.conf"
+fi
+
+LOCAL_DIR=/data/dev
+REMOTE_DIR=/srv/www/repo
+PKGROOT=$LOCAL_DIR/pkg_repo/$CARCH
+
+function sync_dirs {
+ REMOTE=$1
+ LOCAL=$2
+ echo "--------------------------------------------------------"
+ echo " Syncing $3 packages with linhes.org"
+ echo "--------------------------------------------------------"
+ echo "Remote: $REMOTE"
+ echo "Local: $LOCAL"
+ case $4 in
+ resync)
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE $LOCAL --verbose --resync --copy-links
+ ;;
+ force)
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE $LOCAL --verbose --force --copy-links
+ ;;
+ *)
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE $LOCAL --verbose --copy-links
+ ;;
+ esac
+
+ if [ ! $? = 0 ]
+ then
+ echo "############################################"
+ echo "## ERRORS OCCURED ##"
+ echo "############################################"
+ exit 1
+ fi
+
+ #update the local database
+ if [ "$4" == "update_db" ]
+ then
+ echo "--------"
+ echo "Updating the whole package db..."
+ echo "--------"
+ update_db_repo.sh $LOCAL $3
+
+ echo "--------"
+ echo "Pushing the package db to linhes.org"
+ echo "--------"
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE $LOCAL --verbose --copy-links
+ fi
+
+ echo "--------------------------------------------------------"
+ echo " Finished syncing $3 packages"
+ echo "--------------------------------------------------------"
+}
+
+function source_sync () {
+ echo "--------------------------------------------------------"
+ echo " Syncing $1 sources"
+ echo "--------------------------------------------------------"
+ REMOTE_SRC=$REMOTE_DIR/src_packages/$1/
+ LOCAL_SRC=$LOCAL_DIR/pkg_repo/src_packages/$1/
+ echo "Remote: $REMOTE_SRC"
+ echo "Remote: $LOCAL_SRC"
+ case $2 in
+ resync)
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE_SRC $LOCAL_SRC --verbose --resync --copy-links
+ ;;
+ force)
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE_SRC $LOCAL_SRC --verbose --force --copy-links
+ ;;
+ *)
+ rclone bisync :sftp,host=linhes.org,user=reposync:$REMOTE_SRC $LOCAL_SRC --verbose --copy-links
+ ;;
+ esac
+ echo "--------------------------------------------------------"
+ echo " Finished syncing $3 sources"
+ echo "--------------------------------------------------------"
+}
+
+function pacman_sync () {
+ echo "running 'pacman -Sy' to sync repos"
+ sudo pacman -Sy
+}
+
+#---------------------------------------------------------------
+case $1 in
+ testing)
+ sync_dirs $REMOTE_DIR/$CARCH/linhes-testing/ $PKGROOT/linhes-testing/ linhes-testing $2
+ source_sync linhes-testing $2
+ pacman_sync
+ ;;
+ release)
+ sync_dirs $REMOTE_DIR/$CARCH/linhes/ $PKGROOT/linhes/ linhes $2
+ source_sync linhes $2
+ pacman_sync
+ ;;
+ source)
+ if [ x = x$2 ]
+ then
+ echo "Missing source repository [testing|release]"
+ exit 1
+ fi
+ source_sync $1 $2
+ pacman_sync
+ ;;
+ *)
+ echo "Invalid Options"
+ echo "lhsync.sh (testing|release|source[testing|release]) (update_db|resync|force)"
+ echo
+ echo "force: force rclone to sync when too many deletes are detected"
+ echo "resync: overwrites the local package and source repos with ones from linhes.org"
+ echo "update_db: recreate the db files in the repo and syncs to linhes.org"
+ echo "EXAMPLE: lhsync.sh testing update_db <= will sync the testing repo with linhes.org update the local database and resync with linhes.org"
+ ;;
+esac
diff --git a/linhes/linhes-dev/mp.py b/linhes/linhes-dev/mp.py
new file mode 100755
index 0000000..3cd5c84
--- /dev/null
+++ b/linhes/linhes-dev/mp.py
@@ -0,0 +1,620 @@
+#!/usr/bin/env python
+# Version 3.0.0
+import os
+import sys
+import re
+import subprocess
+import gettext
+from git import Repo,Git
+import configparser
+import glob
+import shutil
+import optparse
+import fileinput
+import time
+
+PKGHOME = "/data/dev/pkg_repo/packages"
+SRCHOME = "/data/dev/pkg_repo/sources"
+SRCPKGHOME = "/data/dev/pkg_repo/src_packages"
+
+# See what git branch we're working under
+git_repo = Git()
+branches = git_repo.branch()
+git_branch = branches.split('*')[1].lstrip(' ')
+
+if git_branch == "testing":
+ SFIX = "-testing"
+elif git_branch == "master":
+ SFIX = ""
+else:
+ print("Can't determine which git branch is in use!")
+ sys.exit(2)
+#print("Git Branch suffix is:",SFIX)
+
+
+def commandline(makepkg_cmd):
+ #print makepkg_cmd
+ cli_dict = {}
+ cli_list = []
+
+ clparser = optparse.OptionParser()
+ clparser.add_option("-b", "--bump", action="store_true", default=False, help="Increase package release one unit.")
+ clparser.add_option("--pkglist", action="store", help="List of packages to compile")
+ clparser.add_option("--config", action="store", help="Use an alternate config file instead of the /etc/makepkg.conf default.")
+ clparser.add_option("-g", "--geninteg", action="store_true", default=False, help="For each source file in the source array of PKGBUILD, download the file if required and generate integrity checks.")
+ clparser.add_option("--rmold", action="store_true", default=False, help="BETA: Remove old src and software packages from repos. Use with caution. False positives may occur (i.e. nvidia pkgs)!")
+
+ (options, args) = clparser.parse_args()
+
+ options1 = ['config', 'pkglist']
+ options2 = ['bump', 'geninteg', 'rmold']
+
+ for o in options1:
+ cmd1 = eval('options.'+o)
+ if o != 'p':
+ if cmd1 is not None:
+ cli_dict['--'+o] = cmd1
+ elif cmd1 is not None:
+ cli_dict['-'+o] = cmd1
+ for o in options2:
+ cmd2 = eval('options.'+o)
+ if cmd2 is True:
+ cli_list.append('--'+o)
+
+ # Create makepkg command
+ makepkg_cmd.extend(cli_list)
+ for k in list(cli_dict.keys()):
+ v = cli_dict.get(k)
+ makepkg_cmd.append(k)
+ makepkg_cmd.append(v)
+
+ # Remove bump option from makepkg command if it exists
+ if "--bump" in makepkg_cmd:
+ makepkg_cmd.remove("--bump")
+ # Remove rmold option from makepkg command if it exists
+ if "--rmold" in makepkg_cmd:
+ makepkg_cmd.remove("--rmold")
+ # Remove "--geninteg" option (if it exists) from makepkg command.
+ # Checking for *sums is done automaticly by mp.py
+ if "--geninteg" in makepkg_cmd:
+ makepkg_cmd.remove("--geninteg")
+
+ #print("Makepkg Command:",makepkg_cmd)
+ return makepkg_cmd, cli_list, cli_dict
+
+
+
+class Packagefile(object):
+ def __init__(self,cli_dict,cli_list,makepkg_cmd):
+ self.failure=0
+ self.attempt=0
+ self.REPO = "none"
+ self.DOCROOT = ""
+ self.pkgfile = "none"
+ self.pkgbase = ""
+ self.pkgname = ""
+ self.pkgver = ""
+ self.pkgrel = ""
+ self.pkglist=[]
+ self.epoch = ""
+ self.arch = ""
+ self.CARCH = ""
+ self.PKGDEST = ""
+ self.SRCDEST = ""
+ self.SRCPKGDEST = ""
+ self.TOTALPKG = ""
+ self.GZPKG = ""
+ self.XZPKG = ""
+ self.ZSTPKG = ""
+ self.repolist=["core", "extra", "linhes",
+ "chroot-devel", "mv-core", "xmpl", "local"]
+ self.mydir = os.getcwd()
+ self.variables = ['pkgbase','pkgname','pkgver','pkgrel','arch','epoch','depends','makedepends','makedepends_x86_64','checkdepends']
+ self.pkgrel_incremented = False
+ self.makepkg_cmd = makepkg_cmd
+ self.cli_list = cli_list
+ self.cli_dict = cli_dict
+ self.pkg_dependencies = []
+ self.makepkg_dependencies = []
+ self.check_dependencies = []
+ self.backupdir="/tmp/backup/"
+ self.compile_status = "Pending"
+ self.backup_pkgdir = ""
+ self.pkg_dep_available = []
+ self.pkg_dep_not_in_repo = []
+ if "-p" in cli_dict:
+ self.pkgfile = cli_dict["-p"]
+ else:
+ self.pkgfile = "PKGBUILD"
+
+ self.updateINIT()
+
+
+ def set_srcpkg(self):
+ if self.pkgbase:
+ self.SRCPKG = self.pkgbase + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + ".src.tar.gz"
+ else:
+ self.SRCPKG = self.pkglist[0] + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + ".src.tar.gz"
+ #print "Changed " + self.pkgname + " release to " + str(self.pkgrel)
+
+
+ def updateINIT(self):
+ # Check what file will be used for our PKGBUILD
+
+ # Does the file exist?
+ if not os.path.isfile(self.pkgfile):
+ print("ERROR in function config_file: Can't find ",self.pkgfile,"!")
+ sys.exit(2)
+ else:
+ # Loop over contents to get our variables
+ # Use bash to do it because PKGBUILDs are very loose with their format
+ for item in self.variables:
+ v = subprocess.Popen(['/bin/bash','-c', 'source ' +
+ self.pkgfile +
+ '; echo ${' + item + '[@]}'],
+ stdout = subprocess.PIPE,)
+ value = v.communicate()[0].strip(b'\n')
+ value = value.decode('utf-8')
+ if item == "pkgbase":
+ self.pkgbase = value
+ elif item == "pkgname":
+ self.pkgname = value
+ self.pkglist = list(value.split())
+ elif item == "pkgver":
+ self.pkgver = value
+ elif item == "pkgrel":
+ self.pkgrel = value
+ elif item == "epoch":
+ if value:
+ self.epoch = "%s:" %value
+ elif item == "arch":
+ self.arch = value
+ elif item == "depends":
+ self.pkg_dependencies = value.split()
+
+ elif item == "makedepends":
+ self.makepkg_dependencies = self.makepkg_dependencies + value.split()
+ elif item == "makedepends_x86_64":
+ self.makepkg_dependencies = self.makepkg_dependencies + value.split()
+ elif item == "checkdepends":
+ self.check_dependencies = value.split()
+
+ self.set_srcpkg()
+ # Get needed makepkg.conf variables
+ mpkg="/etc/makepkg.conf"
+ f=open(mpkg,"r")
+ # Read file contents into memory
+ makepkg_contents=f.readlines()
+ f.close()
+
+ self.PKGDEST = PKGHOME
+ self.SRCDEST = SRCHOME
+ self.SRCPKGDEST = SRCPKGHOME
+ # Loop over contents to get our variables
+ for mp_lines in makepkg_contents:
+ if mp_lines.strip().startswith("CARCH"):
+ self.CARCH = mp_lines.partition("=")[2].strip('\n').replace('\"','')
+ if self.arch != 'any':
+ self.arch = self.CARCH
+ if mp_lines.strip().startswith("PKGDEST"):
+ self.PKGDEST = mp_lines.partition("=")[2].strip('\n')
+
+
+
+ def print_vars(self):
+ print("####################################################")
+ print("")
+ print(" Config File:",self.pkgfile)
+ print(" pkgbase:",self.pkgbase)
+ print(" pkgname:",self.pkgname)
+ print(" pkglist:",self.pkglist)
+ print(" epoch:" ,self.epoch)
+ print(" pkgver:",self.pkgver)
+ print(" pkgrel:",self.pkgrel)
+ print(" arch:",self.arch)
+ print(" Source package:",self.SRCPKG)
+ print(" CARCH is:",self.CARCH)
+ print(" Architecture is:",self.arch)
+ print(" Repository : ", self.REPO)
+ print(" DOCROOT : ", self.DOCROOT)
+ print(" Current directory: ", self.mydir)
+ #print(" Backup directory: ", self.get_backupdir())
+ print(" Package destination:",self.PKGDEST)
+ print(" Source destination:",self.SRCDEST)
+ print(" Dependencies: ", self.pkg_dependencies)
+ print(" Make Dependencies: ", self.makepkg_dependencies)
+ print(" Package compile status: ", self.get_compile_status())
+ print("")
+ print("####################################################")
+
+ def getname(self):
+ return self.pkgname
+
+ def getfailure(self):
+ return self.failure
+
+ def getattempts(self):
+ return self.attempt
+
+ def getdir(self):
+ return self.mydir
+
+ def get_unmet_compile(self):
+ return self.pkg_dep_not_in_repo
+
+ def get_unmet_install(self):
+ return self.pkg_dep_available
+
+ def incrementcompile(self):
+ self.attempt = self.attempt + 1
+
+ def get_backupdir(self):
+ return self.backup_pkgdir
+
+ def get_compile_status(self):
+ return self.compile_status
+
+ def find_repo(self):
+ # Create a list with the directory names of our current directory tree
+ dir_tree = os.path.dirname(self.mydir).split("/")
+ # Loop through the dir_tree to see if we can find our repo
+ for item in self.repolist:
+ if item not in dir_tree:
+ continue
+ else:
+ repo_name = item
+ if repo_name == "extra":
+ self.REPO = "extra" + SFIX
+ elif repo_name == "core":
+ self.REPO = "core" + SFIX
+ elif repo_name == "linhes":
+ self.REPO = "linhes" + SFIX
+ elif repo_name == "xmpl":
+ self.REPO = "local"
+ elif repo_name not in repolist:
+ print("ERROR in function find_repo: Cannot determine repository!")
+ sys.exit(2)
+ else:
+ self.REPO = repo_name
+ # Ensure our DOCROOT exists and if not, create it
+ self.DOCROOT = "/data/dev/pkg_repo/" + self.CARCH + "/" + self.REPO
+ #print("DOCROOT:",self.DOCROOT)
+ if os.path.exists(self.DOCROOT):
+ print("")
+ #print("INFO: Repository is",self.REPO)
+ else:
+ try:
+ os.mkdir(self.DOCROOT,0o755)
+ print("INFO: ",self.DOCROOT,"directory created. Repository is",self.REPO)
+ except Exception as e:
+ print("ERROR: Creating DOCROOT failed: ", e)
+ pass
+
+ def increase_pkgrel(self):
+ if not "--bump" in self.cli_list:
+ return
+ print("- Incrementing pkgrel for %s" %self.pkgname)
+ # Backup the original pkgfile
+ shutil.copy2(self.pkgfile, self.pkgfile + ".old")
+ # Let's increase the pkgrel
+ for line in fileinput.input(self.pkgfile, inplace=1):
+ if line.strip().startswith("pkgrel"):
+ self.pkgrel = line.partition("=")[2].strip('\n')
+ # Add 1 to pkgrel
+ new_pkgrel = int(self.pkgrel) + 1
+ line = line.replace("pkgrel=" + self.pkgrel, "pkgrel=" + str(new_pkgrel))
+ self.pkgrel = str(new_pkgrel)
+ sys.stdout.write(line)
+ print(" pkgrel:",self.pkgrel)
+ self.set_srcpkg()
+ self.pkgrel_incremented = True
+
+ def clearSUMS(self):
+ print(" Clearing out old checksums...")
+ filename=self.pkgfile
+ file_contents=[]
+ f = open(filename,"r")
+ lineIter= iter(f)
+ for aLine in lineIter:
+ if (aLine.strip().startswith("md5sums") or aLine.strip().startswith("sha1sums") or
+ aLine.strip().startswith("sha256sums") or aLine.strip().startswith("sha224sums") or
+ aLine.strip().startswith("sha384sums") or aLine.strip().startswith("sha512sums") or
+ aLine.strip().startswith("b2sums") or aLine.strip().startswith("cksums")):
+ if aLine.strip().endswith(")"):
+ continue
+ else:
+ for aModule in lineIter:
+ if aModule.strip().endswith(")"):
+ break
+ else:
+ file_contents.append(aLine)
+ f.close()
+ try:
+ f = open(filename,"w")
+ except:
+ outtext="Couldn't open file for writing: %s" %filename
+ print(outtext)
+ return False
+ for line in file_contents:
+ f.write(line)
+ f.close()
+
+ def updateSUMS(self):
+ print("- Checking checksums...")
+ if "--geninteg" in self.cli_list:
+ self.clearSUMS()
+
+ sums = []
+ pkgfile_contents=[]
+
+ # Open pkgfile for reading
+ f = open(self.pkgfile, 'r')
+ pkgfile_contents = f.readlines()
+ f.close()
+
+ # Iterate through the lines looking for a match pattern
+ for line in pkgfile_contents:
+ check = re.compile('cksums|sha1sums|sha224sums|sha256sums|sha384sums|sha512sums|md5sums|b2sums')
+ sums = check.match(line)
+ if sums:
+ print(" Found checksums in PKGBUILD")
+ break
+ if not sums:
+ print(" Adding checksums...")
+ # If no matches are found, append md5sums to the end of the pkgfile
+ p = open(self.pkgfile, 'a')
+ sumsgen = subprocess.Popen(["makepkg", "-g"], stdout = subprocess.PIPE,).communicate()[0]
+ sumsgen = sumsgen.decode('utf-8')
+ p.writelines(sumsgen)
+ p.close()
+
+ def make_package(self):
+ if self.arch == 'any':
+ self.CARCH = 'any'
+ #creates both binary and source package
+ print("- Making package...")
+ retcode = subprocess.call(["sudo", "ccm", "s"])
+ if retcode != 0:
+ self.compile_status = "Failed: %s" %retcode
+ self.failure = self.failure + 1
+ print(" ERROR: ccm failed with return code ",retcode)
+ if self.pkgrel_incremented:
+ if os.path.isfile(self.pkgfile + '.old'):
+ shutil.move(self.pkgfile + '.old', self.pkgfile)
+ print(" Reverted pkgrel increment.")
+ return False
+ elif "--nobuild" in self.makepkg_cmd:
+ self.compile_status = "Failed: nobuild"
+ return False
+ else:
+ print("-------------Making source package-------------")
+ retcode = subprocess.call(["makepkg", "--force", "--holdver", "--source"])
+ if retcode != 0:
+ self.compile_status = "Failed source: %s" %retcode
+ self.failure = self.failure + 1
+ print("ERROR: Source package creation failed with return code",retcode)
+ sys.exit(2)
+ print("=============FINISHED CREATING PKG=============")
+ self.compile_status = "Success"
+ return True
+
+ def update_database(self):
+ print("- Copying files and updating database")
+ # pkgname could be a list of several pkgs. Since bash array format is
+ # loose, let bash parse the pkgname(s) first, then return a list for us.
+ self.updateINIT()
+ if self.arch == 'any':
+ self.CARCH = 'any'
+
+ for i in self.pkglist:
+ print(" Package name:",i)
+
+ self.GZPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.gz"
+ self.XZPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.xz"
+ self.ZSTPKG = i + "-" + self.epoch + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + ".pkg.tar.zst"
+ #print(self.GZPKG)
+ #print(self.XZPKG)
+ #print(self.ZSTPKG)
+
+ if os.path.isfile(self.ZSTPKG):
+ self.TOTALPKG = self.ZSTPKG
+ elif os.path.isfile(self.XZPKG):
+ self.TOTALPKG = self.XZPKG
+ elif os.path.isfile(self.GZPKG):
+ self.TOTALPKG = GZPKG
+ else:
+ print("")
+ print("!!!!!!!!!!!!!!!!!")
+ print(" ERROR in function update_repo: Couldn't find the new package",self.TOTALPKG)
+ sys.exit(2)
+
+ print(" Copying " + self.TOTALPKG + " to " + self.PKGDEST)
+ if shutil.copy2(self.TOTALPKG, self.PKGDEST):
+ os.remove(self.mydir + "/" + self.TOTALPKG)
+
+ # Remove old package(s) from local copy
+ #print i + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)'
+ oldpkgcheck = re.compile( re.escape(i) + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' )
+
+ dirlist = os.listdir(self.DOCROOT)
+ for n in dirlist:
+ if n.startswith(i):
+ if not oldpkgcheck.search(n):
+ print(n,"does not match")
+ continue
+ else:
+ OLDPKG = glob.glob(oldpkgcheck.search(n).group() + "*.pkg.tar.*")
+ print(" OLDPKG =",OLDPKG)
+ if OLDPKG:
+ for DELPKG in OLDPKG:
+ if "--rmold" in self.cli_list:
+ print("Deleting old package:",DELPKG)
+ os.remove(DELPKG)
+ #subprocess.call(["repo-remove", self.DOCROOT+ "/" + self.REPO + ".db.tar.gz", DELPKG])
+ # Remove any symlinks to old packages
+ # We make it conditional on "--force" because force will overwrite
+ # an existing package and we want the symlink to stay, pointing to
+ # the newly built package with the same pkgrel.
+ if "--force" not in self.makepkg_cmd:
+ if os.path.islink(self.mydir + "/" + DELPKG):
+ os.remove(self.mydir + "/" + DELPKG)
+ # Copy in new package
+ print(" Updating " + self.DOCROOT + " with " + self.TOTALPKG)
+ print(" Copying " + self.PKGDEST + "/" + self.TOTALPKG)
+ shutil.copy2(self.PKGDEST + "/" + self.TOTALPKG, self.DOCROOT)
+ print(" Creating symlink " + self.PKGDEST + "/" + self.TOTALPKG)
+ if os.path.islink(self.mydir + "/" + self.TOTALPKG):
+ os.remove(self.mydir + "/" + self.TOTALPKG)
+ os.symlink(self.DOCROOT + "/" + self.TOTALPKG, self.mydir + "/" + self.TOTALPKG)
+ subprocess.call(["repo-add", self.DOCROOT+ "/" + self.REPO + ".db.tar.gz", self.DOCROOT + "/" + self.TOTALPKG])
+ #print(" Updating pacman database")
+ #subprocess.call(["pacman","-Syyyyy" ])
+
+ def update_srcrepo(self):
+ print("- Updating source file repository")
+ print(" SRCPKG:",self.SRCPKG)
+ OLDSRCPKG=""
+
+ if not os.path.isfile(self.SRCPKGDEST + "/" +self.SRCPKG):
+ print("ERROR in function update_repo: Couldn't find the new package",self.SRCPKG)
+ sys.exit(2)
+
+ i = self.pkglist[0]
+ oldpkgcheck = re.compile( re.escape(i) + "-" + self.epoch + '(pkgver|[\d.]+)' + "-" + '(?!pkgrel)' )
+ dirlist = os.listdir(self.SRCPKGDEST + "/" )
+ if self.pkgbase:
+ # Remove old src package(s) from local copy
+ if "--rmold" in self.cli_list:
+ for n in dirlist:
+ if n.startswith(self.pkgbase):
+ if not oldpkgcheck.search(n):
+ print(n,"does not match")
+ continue
+ else:
+ OLDSRCPKG = glob.glob(oldpkgcheck.search(n).group() + "*.src.tar.gz")
+ else:
+ # Remove old src package(s) from local copy
+ if "--rmold" in self.cli_list:
+ for n in dirlist:
+ if n.startswith(self.pkgname):
+ if not oldpkgcheck.search(n):
+ print(n,"does not match")
+ continue
+ else:
+ OLDSRCPKG = glob.glob(oldpkgcheck.search(n).group() + "*.src.tar.gz")
+ if OLDSRCPKG:
+ print("OLDSRCPKG =",OLDSRCPKG)
+ for DELSRCPKG in OLDSRCPKG:
+ print("Deleting old source package",DELSRCPKG)
+ os.remove(DELSRCPKG)
+
+ print(" Copying new source package to",self.SRCPKGDEST + "/" + self.REPO + "/" + self.SRCPKG)
+ if shutil.copy2(self.SRCPKGDEST + "/" + self.SRCPKG, self.SRCPKGDEST + "/" + self.REPO + "/"):
+ os.remove(self.SRCPKGDEST + "/" + self.SRCPKG)
+
+
+ def cleanup(self):
+ os.chdir(self.mydir)
+ if os.path.isfile(self.pkgfile + '.old'):
+ os.remove(self.pkgfile + '.old')
+ print("Removed temporary backup file",self.pkgfile + '.old')
+ #remove src.tar.gz so it stops cluttering up git
+ if os.path.islink(self.SRCPKG):
+ os.unlink(self.SRCPKG)
+ #remove src dir if empty
+ if os.path.exists(self.mydir + "/src") and not os.path.isfile(self.mydir + "/src"):
+ if not os.listdir(self.mydir + "/src"):
+ os.rmdir(self.mydir + "/src")
+ # Remove any old -build and -package log files
+ os.chdir(self.mydir)
+ build_log = self.pkgname + "-" + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + "-build.log"
+ pkg_log = self.pkgname + "-" + self.pkgver + "-" + self.pkgrel + "-" + self.CARCH + "-package.log"
+ all_logs = glob.glob("*.log*")
+ #print "All Logs:",all_logs
+ saved_logs = []
+ #save the logrotate files
+ saved_logs = glob.glob("*.logrotate*")
+ #if os.path.isfile(build_log):
+ # saved_logs.append(build_log)
+ #if os.path.isfile(pkg_log):
+ # saved_logs.append(pkg_log)
+ #print ("Saved Logs:",saved_logs)
+ for log in all_logs:
+ if log not in saved_logs:
+ os.remove(log)
+ pass
+
+
+ def check_for_changelog(self, pkgdir):
+ filename = "%s/__changelog" %pkgdir
+ return os.path.exists(filename)
+
+
+
+def main():
+ pkg_not_found=[]
+ pkg_objdict={}
+ pkg_list=[]
+
+ global makepkg_cmd
+ global cli_list
+ global cli_dict
+
+ makepkg_cmd = ['makepkg']
+ makepkg_cmd, cli_list, cli_dict = commandline(makepkg_cmd)
+
+ try:
+ pkg_list = cli_dict['--pkglist'].split(',')
+ except:
+ packagefile = None
+ packagefile = Packagefile(cli_dict,cli_list,makepkg_cmd)
+ pkg_objdict[packagefile.getname()]=packagefile
+ pkg_list=[packagefile.getname()]
+
+
+ packagefile.find_repo()
+ packagefile.increase_pkgrel()
+ packagefile.print_vars()
+ packagefile.updateSUMS()
+ if packagefile.make_package():
+ packagefile.update_database()
+ packagefile.update_srcrepo()
+ packagefile.cleanup()
+ pkg_objdict[packagefile.getname()]=packagefile
+
+ update_pkg = []
+ success_pkg = []
+ failed_compile = []
+ failed_update = []
+
+ #create lists
+ for k, v in pkg_objdict.items():
+ #print k
+ #print v.print_vars()
+ if v.get_compile_status().strip() == "Success":
+ success_pkg.append(k)
+ else:
+ temp = "%s --- %s (%s)" %(k,v.get_compile_status(),v.getattempts())
+ failed_compile.append(temp)
+
+ print("\n\n\n\n")
+ print("#######################################################")
+
+ print("\n")
+ print("Couldn't find these packages:")
+ print("----------------------------")
+ print(pkg_not_found)
+
+ print("\n")
+ print("Failed to compile these packages:")
+ print("----------------------------")
+ print( failed_compile)
+
+ print("\n")
+ print("Successful compiled :")
+ print("---------------------------")
+ print(success_pkg)
+
+if __name__ == "__main__":
+ main()
+ print("--------------------------")
diff --git a/linhes/linhes-dev/setup_lhdev.sh b/linhes/linhes-dev/setup_lhdev.sh
new file mode 100755
index 0000000..df034c5
--- /dev/null
+++ b/linhes/linhes-dev/setup_lhdev.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+echo "Creating repo directories..."
+sudo mkdir -p /data/dev/
+sudo chmod a+w /data/dev/
+mkdir -p /data/dev/LH_buildroot
+mkdir -p /data/dev/pkg_repo/x86_64/linhes
+mkdir -p /data/dev/pkg_repo/x86_64/linhes-testing
+mkdir -p /data/dev/pkg_repo/packages
+mkdir -p /data/dev/pkg_repo/sources
+mkdir -p /data/dev/pkg_repo/src_packages/linhes
+mkdir -p /data/dev/pkg_repo/src_packages/linhes-testing
+
+
+echo -e "\n\nSetup clean-chroot-manager (ccm) and edit ~/.config/clean-chroot-manager.conf"...
+sudo ccm l
+sed -i 's/CHROOTPATH64=.*/CHROOTPATH64=\"\/data\/dev\/LH_buildroot\"/' ~/.config/clean-chroot-manager.conf
+sudo ccm c
+
+echo -e "\n\nChanging setting in /etc/makepkg.conf..."
+#sudo sed -i 's/#PKGDEST=.*/PKGDEST=\/data\/dev\/pkg_repo\/packages/' /etc/makepkg.conf
+sudo sed -i 's/#SRCDEST=.*/SRCDEST=\/data\/dev\/pkg_repo\/sources/' /etc/makepkg.conf
+sudo sed -i 's/#SRCPKGDEST=.*/SRCPKGDEST=\/data\/dev\/pkg_repo\/src_packages/' /etc/makepkg.conf
+sudo sed -i 's/#PACKAGER=.*/PACKAGER=\"LinHESDEV <LinHESDev\@linhes.org>\"/' /etc/makepkg.conf
+
+
+# clone the git linhes_pkgbuild
+# Checkout the repo. You will need to have been given a dev account on
+# linhes.org. This involves creating a public ssh key and supplying it to
+# one of the linhes.org admins. The key is created using ssh-keygen. The
+# resulting public key will be stored in .ssh/id_rsa.pub.
+echo -e "\n\nCheckout the repo manually. You will need to be given a dev account on linhes.org. This involves creating a public ssh key and supplying it toone of the linhes.org admins. The key is created using ssh-keygen. The resulting public key will be stored in .ssh/id_rsa.pub."
+echo -e "EXAMPLE: \ncd /data/dev\ngit clone git@linhes.org:linhes_pkgbuild\n\n"
diff --git a/linhes/linhes-dev/update_db_repo.sh b/linhes/linhes-dev/update_db_repo.sh
new file mode 100755
index 0000000..b277a1f
--- /dev/null
+++ b/linhes/linhes-dev/update_db_repo.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# tiny util to regenreate the database on the LOCAL machine
+
+if [ $# -ne 2 ]
+then
+ echo "incorrect number of args"
+ echo "update_db_repo.sh directory repo"
+ echo "EX: update_db_repo.sh /data/dev/pkg_repo/x86_64/linhes-testing linhes-testing"
+ exit 1
+fi
+
+
+cd $1
+REPO=$2
+echo " --------------------------"
+echo "updating database for $REPO"
+echo " --------------------------"
+#pwd
+ #forcefully remove the old db to start clean
+ rm -f $REPO.db.tar.gz
+ repo-add -q $REPO.db.tar.gz *.pkg.tar.*
+cd -