Reduce legacy pungi script to gather phase only (#1792)
This reduces the legacy 'pungi' script to only its gather phase, and removes related stuff in gather.py. The gather phase is used in the yum path through phases/gather/methods/method_deps.py, so it cannot be entirely removed until all users of that are gone. But we can at least get rid of the non-Koji support for creating install trees, ISOs and repos. Merges: https://pagure.io/pungi/pull-request/1793 Signed-off-by: Adam Williamson <awilliam@redhat.com>
This commit is contained in:
parent
c8fe99b1aa
commit
3bc35a9a27
4 changed files with 39 additions and 974 deletions
|
|
@ -61,7 +61,6 @@ class Config(SafeConfigParser):
|
|||
self.set("pungi", "destdir", os.getcwd())
|
||||
self.set("pungi", "workdirbase", "/work")
|
||||
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
|
||||
self.set("pungi", "cdsize", "695.0")
|
||||
self.set("pungi", "debuginfo", "True")
|
||||
self.set("pungi", "alldeps", "True")
|
||||
self.set("pungi", "isfinal", "False")
|
||||
|
|
|
|||
712
pungi/gather.py
712
pungi/gather.py
|
|
@ -16,22 +16,16 @@
|
|||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import lockfile
|
||||
import urlgrabber.progress
|
||||
import yum
|
||||
from productmd.common import SortedConfigParser
|
||||
import ConfigParser
|
||||
|
||||
import arch as arch_module
|
||||
import multilib_yum as multilib
|
||||
import pungi.util
|
||||
from pungi.wrappers.createrepo import CreaterepoWrapper
|
||||
|
||||
|
||||
class ReentrantYumLock(object):
|
||||
|
|
@ -347,7 +341,6 @@ class Pungi(PungiBase):
|
|||
self.is_sources = not self.config.getboolean("pungi", "nosource")
|
||||
self.is_debuginfo = not self.config.getboolean("pungi", "nodebuginfo")
|
||||
self.is_resolve_deps = self.config.getboolean("pungi", "resolve_deps")
|
||||
self.is_nomacboot = self.config.getboolean("pungi", "nomacboot")
|
||||
|
||||
self.fulltree_excludes = set(self.ksparser.handler.fulltree_excludes)
|
||||
|
||||
|
|
@ -1529,24 +1522,6 @@ class Pungi(PungiBase):
|
|||
|
||||
# pungi.util._doRunCommand(compsfilter, self.logger)
|
||||
|
||||
@yumlocked
|
||||
def downloadSRPMs(self):
|
||||
"""Cycle through the list of srpms and
|
||||
find the package objects for them, Then download them."""
|
||||
|
||||
# do the downloads
|
||||
self._downloadPackageList(self.srpm_po_list, os.path.join("source", "SRPMS"))
|
||||
|
||||
@yumlocked
|
||||
def downloadDebuginfo(self):
|
||||
"""Cycle through the list of debuginfo rpms and
|
||||
download them."""
|
||||
|
||||
# do the downloads
|
||||
self._downloadPackageList(
|
||||
self.debuginfo_po_list, os.path.join(self.tree_arch, "debug")
|
||||
)
|
||||
|
||||
def _list_packages(self, po_list):
|
||||
"""Cycle through the list of packages and return their paths."""
|
||||
result = []
|
||||
|
|
@ -1608,690 +1583,3 @@ class Pungi(PungiBase):
|
|||
def list_debuginfo(self):
|
||||
"""Cycle through the list of DEBUGINFO RPMs and return their paths."""
|
||||
return self._list_packages(self.debuginfo_po_list)
|
||||
|
||||
def _size_packages(self, po_list):
|
||||
return sum([po.size for po in po_list if po.repoid not in self.lookaside_repos])
|
||||
|
||||
def size_packages(self):
|
||||
return self._size_packages(self.po_list)
|
||||
|
||||
def size_srpms(self):
|
||||
return self._size_packages(self.srpm_po_list)
|
||||
|
||||
def size_debuginfo(self):
|
||||
return self._size_packages(self.debuginfo_po_list)
|
||||
|
||||
def writeinfo(self, line):
|
||||
"""Append a line to the infofile in self.infofile"""
|
||||
f = open(self.infofile, "a+")
|
||||
f.write(line.strip() + "\n")
|
||||
f.close()
|
||||
|
||||
def mkrelative(self, subfile):
|
||||
"""Return the relative path for 'subfile' underneath the version dir."""
|
||||
|
||||
basedir = os.path.join(self.destdir, self.config.get("pungi", "version"))
|
||||
if subfile.startswith(basedir):
|
||||
return subfile.replace(basedir + os.path.sep, "")
|
||||
|
||||
def _makeMetadata(
|
||||
self,
|
||||
path,
|
||||
cachedir,
|
||||
comps=False,
|
||||
repoview=False,
|
||||
repoviewtitle=False,
|
||||
baseurl=False,
|
||||
output=False,
|
||||
basedir=False,
|
||||
update=True,
|
||||
compress_type=None,
|
||||
):
|
||||
"""Create repodata and repoview."""
|
||||
|
||||
# Define outputdir
|
||||
if output:
|
||||
outputdir = output
|
||||
else:
|
||||
outputdir = path
|
||||
|
||||
# Define revision if SOURCE_DATE_EPOCH exists in env
|
||||
if "SOURCE_DATE_EPOCH" in os.environ:
|
||||
revision = os.environ["SOURCE_DATE_EPOCH"]
|
||||
else:
|
||||
revision = None
|
||||
|
||||
createrepo_wrapper = CreaterepoWrapper(createrepo_c=True)
|
||||
createrepo = createrepo_wrapper.get_createrepo_cmd(
|
||||
directory=path,
|
||||
update=update,
|
||||
outputdir=outputdir,
|
||||
unique_md_filenames=True,
|
||||
database=True,
|
||||
groupfile=comps,
|
||||
basedir=basedir,
|
||||
baseurl=baseurl,
|
||||
revision=revision,
|
||||
compress_type=compress_type,
|
||||
)
|
||||
|
||||
self.logger.info("Making repodata")
|
||||
pungi.util._doRunCommand(createrepo, self.logger)
|
||||
|
||||
if repoview:
|
||||
# setup the repoview call
|
||||
repoview = ["/usr/bin/repoview"]
|
||||
repoview.append("--quiet")
|
||||
|
||||
repoview.append("--state-dir")
|
||||
repoview.append(os.path.join(cachedir, "repoviewcache"))
|
||||
|
||||
if repoviewtitle:
|
||||
repoview.append("--title")
|
||||
repoview.append(repoviewtitle)
|
||||
|
||||
repoview.append(path)
|
||||
|
||||
# run the command
|
||||
pungi.util._doRunCommand(repoview, self.logger)
|
||||
|
||||
def doCreaterepo(self, comps=True):
|
||||
"""Run createrepo to generate repodata in the tree."""
|
||||
compsfile = None
|
||||
if comps:
|
||||
compsfile = os.path.join(
|
||||
self.workdir,
|
||||
"%s-%s-comps.xml"
|
||||
% (
|
||||
self.config.get("pungi", "family"),
|
||||
self.config.get("pungi", "version"),
|
||||
),
|
||||
)
|
||||
|
||||
# setup the cache dirs
|
||||
for target in ["createrepocache", "repoviewcache"]:
|
||||
pungi.util._ensuredir(
|
||||
os.path.join(self.config.get("pungi", "cachedir"), target),
|
||||
self.logger,
|
||||
force=True,
|
||||
)
|
||||
|
||||
repoviewtitle = "%s %s - %s" % (
|
||||
self.config.get("pungi", "family"),
|
||||
self.config.get("pungi", "version"),
|
||||
self.tree_arch,
|
||||
)
|
||||
|
||||
cachedir = self.config.get("pungi", "cachedir")
|
||||
compress_type = self.config.get("pungi", "compress_type")
|
||||
|
||||
# setup the createrepo call
|
||||
self._makeMetadata(
|
||||
self.topdir,
|
||||
cachedir,
|
||||
compsfile,
|
||||
repoview=True,
|
||||
repoviewtitle=repoviewtitle,
|
||||
compress_type=compress_type,
|
||||
)
|
||||
|
||||
# create repodata for debuginfo
|
||||
if self.config.getboolean("pungi", "debuginfo"):
|
||||
path = os.path.join(self.archdir, "debug")
|
||||
if not os.path.isdir(path):
|
||||
self.logger.debug("No debuginfo for %s" % self.tree_arch)
|
||||
return
|
||||
self._makeMetadata(
|
||||
path, cachedir, repoview=False, compress_type=compress_type
|
||||
)
|
||||
|
||||
def _shortenVolID(self):
|
||||
"""shorten the volume id to make sure its under 32 characters"""
|
||||
|
||||
substitutions = {
|
||||
"Workstation": "WS",
|
||||
"Server": "S",
|
||||
"Cloud": "C",
|
||||
"Alpha": "A",
|
||||
"Beta": "B",
|
||||
"TC": "T",
|
||||
}
|
||||
if self.config.get("pungi", "variant"):
|
||||
name = "%s-%s" % (
|
||||
self.config.get("pungi", "family"),
|
||||
self.config.get("pungi", "variant"),
|
||||
)
|
||||
else:
|
||||
name = self.config.get("pungi", "family")
|
||||
version = self.config.get("pungi", "version")
|
||||
arch = self.tree_arch
|
||||
|
||||
for k, v in substitutions.iteritems():
|
||||
if k in name:
|
||||
name = name.replace(k, v)
|
||||
if k in version:
|
||||
version = version.replace(k, v)
|
||||
volid = "%s-%s-%s" % (name, version, arch)
|
||||
if len(volid) > 32:
|
||||
raise RuntimeError("Volume ID %s is longer than 32 characters" % volid)
|
||||
else:
|
||||
return volid
|
||||
|
||||
def doBuildinstall(self):
|
||||
"""Run lorax on the tree."""
|
||||
|
||||
cmd = ["lorax"]
|
||||
cmd.extend(["--workdir", self.workdir])
|
||||
cmd.extend(
|
||||
[
|
||||
"--logfile",
|
||||
os.path.join(
|
||||
self.config.get("pungi", "destdir"),
|
||||
"logs/lorax-%s.log" % (self.config.get("pungi", "arch")),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
try:
|
||||
# Convert url method to a repo
|
||||
self.ksparser.handler.repo.methodToRepo()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for repo in self.ksparser.handler.repo.repoList:
|
||||
if repo.mirrorlist:
|
||||
# The not bool() thing is because pykickstart is yes/no on
|
||||
# whether to ignore groups, but yum is a yes/no on whether to
|
||||
# include groups. Awkward.
|
||||
repo.mirrorlist = yum.parser.varReplace(
|
||||
repo.mirrorlist, self.ayum.conf.yumvar
|
||||
)
|
||||
cmd.extend(["--mirrorlist", repo.mirrorlist])
|
||||
else:
|
||||
repo.baseurl = yum.parser.varReplace(
|
||||
repo.baseurl, self.ayum.conf.yumvar
|
||||
)
|
||||
cmd.extend(["--source", repo.baseurl])
|
||||
|
||||
# Add the repo in the destdir to our yum object
|
||||
cmd.extend(["--source", "file://%s" % self.topdir])
|
||||
cmd.extend(["--product", self.config.get("pungi", "family")])
|
||||
cmd.extend(["--version", self.config.get("pungi", "version")])
|
||||
cmd.extend(
|
||||
[
|
||||
"--release",
|
||||
"%s %s"
|
||||
% (
|
||||
self.config.get("pungi", "family"),
|
||||
self.config.get("pungi", "version"),
|
||||
),
|
||||
]
|
||||
)
|
||||
if self.config.get("pungi", "variant"):
|
||||
cmd.extend(["--variant", self.config.get("pungi", "variant")])
|
||||
cmd.extend(["--bugurl", self.config.get("pungi", "bugurl")])
|
||||
if self.config.getboolean("pungi", "isfinal"):
|
||||
cmd.append("--isfinal")
|
||||
cmd.extend(["--volid", self._shortenVolID()])
|
||||
|
||||
# on ppc64 we need to tell lorax to only use ppc64 packages so that
|
||||
# the media will run on all 64 bit ppc boxes
|
||||
if self.tree_arch == "ppc64":
|
||||
cmd.extend(["--buildarch", "ppc64"])
|
||||
elif self.tree_arch == "ppc64le":
|
||||
cmd.extend(["--buildarch", "ppc64le"])
|
||||
|
||||
# Only supported mac hardware is x86 make sure we only enable mac
|
||||
# support on arches that need it
|
||||
if self.tree_arch in ["x86_64"] and not self.is_nomacboot:
|
||||
cmd.append("--macboot")
|
||||
else:
|
||||
cmd.append("--nomacboot")
|
||||
|
||||
try:
|
||||
cmd.extend(["--conf", self.config.get("lorax", "conf_file")])
|
||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||
pass
|
||||
|
||||
try:
|
||||
cmd.extend(["--installpkgs", self.config.get("lorax", "installpkgs")])
|
||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||
pass
|
||||
|
||||
if self.rootfs_size != "False":
|
||||
cmd.extend(["--rootfs-size", self.rootfs_size])
|
||||
|
||||
# Allow the output directory to exist.
|
||||
cmd.append("--force")
|
||||
|
||||
# MUST be last in the list
|
||||
cmd.append(self.topdir)
|
||||
|
||||
self.logger.info(" ".join(cmd))
|
||||
pungi.util._doRunCommand(cmd, self.logger)
|
||||
|
||||
# write out the tree data for snake
|
||||
self.writeinfo("tree: %s" % self.mkrelative(self.topdir))
|
||||
|
||||
# Write out checksums for verifytree
|
||||
# First open the treeinfo file so that we can config parse it
|
||||
treeinfofile = os.path.join(self.topdir, ".treeinfo")
|
||||
|
||||
try:
|
||||
treefile = open(treeinfofile, "r")
|
||||
except IOError:
|
||||
self.logger.error("Could not read .treeinfo file: %s" % treefile)
|
||||
sys.exit(1)
|
||||
|
||||
# Create a ConfigParser object out of the contents so that we can
|
||||
# write it back out later and not worry about formatting
|
||||
treeinfo = SortedConfigParser()
|
||||
treeinfo.readfp(treefile)
|
||||
treefile.close()
|
||||
treeinfo.add_section("checksums")
|
||||
|
||||
# Create a function to use with os.path.walk to sum the files
|
||||
# basepath is used to make the sum output relative
|
||||
sums = []
|
||||
|
||||
def getsum(basepath, dir, files):
|
||||
for file in files:
|
||||
path = os.path.join(dir, file)
|
||||
# don't bother summing directories. Won't work.
|
||||
if os.path.isdir(path):
|
||||
continue
|
||||
sum = pungi.util._doCheckSum(path, "sha256", self.logger)
|
||||
outpath = path.replace(basepath, "")
|
||||
sums.append((outpath, sum))
|
||||
|
||||
# Walk the os/images path to get sums of all the files
|
||||
os.path.walk(os.path.join(self.topdir, "images"), getsum, self.topdir + "/")
|
||||
|
||||
# Capture PPC images
|
||||
if self.tree_arch in ["ppc", "ppc64", "ppc64le"]:
|
||||
os.path.walk(os.path.join(self.topdir, "ppc"), getsum, self.topdir + "/")
|
||||
|
||||
# Get a checksum of repomd.xml since it has within it sums for other files
|
||||
repomd = os.path.join(self.topdir, "repodata", "repomd.xml")
|
||||
sum = pungi.util._doCheckSum(repomd, "sha256", self.logger)
|
||||
sums.append((os.path.join("repodata", "repomd.xml"), sum))
|
||||
|
||||
# Now add the sums, and write the config out
|
||||
try:
|
||||
treefile = open(treeinfofile, "w")
|
||||
except IOError:
|
||||
self.logger.error("Could not open .treeinfo for writing: %s" % treefile)
|
||||
sys.exit(1)
|
||||
|
||||
for path, sum in sums:
|
||||
treeinfo.set("checksums", path, sum)
|
||||
|
||||
# Extract name of kernel images
|
||||
pr = re.compile("images-(.*)")
|
||||
images = []
|
||||
for img in treeinfo.sections():
|
||||
if pr.match(img):
|
||||
images.append(pr.match(img).group(1))
|
||||
|
||||
# Extract information from pre-productmd treeinfos 'general' section
|
||||
name = treeinfo.get("general", "family")
|
||||
version = treeinfo.get("general", "version")
|
||||
arch = treeinfo.get("general", "arch")
|
||||
platforms = ",".join(images)
|
||||
timestamp = int(float(treeinfo.get("general", "timestamp")))
|
||||
|
||||
# Set/modify 'general' section
|
||||
treeinfo.set("general", "variant", name)
|
||||
treeinfo.set("general", "timestamp", timestamp)
|
||||
treeinfo.set("general", "packagedir", "Packages")
|
||||
treeinfo.set("general", "repository", ".")
|
||||
treeinfo.set("general", "platforms", platforms)
|
||||
|
||||
# Add 'header' section
|
||||
treeinfo.add_section("header")
|
||||
treeinfo.set("header", "version", "1.0")
|
||||
|
||||
# Add 'release' section
|
||||
treeinfo.add_section("release")
|
||||
treeinfo.set("release", "name", name)
|
||||
treeinfo.set("release", "short", name)
|
||||
treeinfo.set("release", "version", version)
|
||||
|
||||
# Add 'tree' section
|
||||
treeinfo.add_section("tree")
|
||||
treeinfo.set("tree", "arch", arch)
|
||||
treeinfo.set("tree", "build_timestamp", timestamp)
|
||||
treeinfo.set("tree", "platforms", platforms)
|
||||
treeinfo.set("tree", "variants", name)
|
||||
|
||||
# Add 'variant-VARIANTNAME' section
|
||||
variant_section_name = "variant-" + name
|
||||
treeinfo.add_section(variant_section_name)
|
||||
treeinfo.set(variant_section_name, "id", name)
|
||||
treeinfo.set(variant_section_name, "name", name)
|
||||
treeinfo.set(variant_section_name, "packages", "Packages")
|
||||
treeinfo.set(variant_section_name, "repository", ".")
|
||||
treeinfo.set(variant_section_name, "type", "variant")
|
||||
treeinfo.set(variant_section_name, "uid", name)
|
||||
|
||||
treeinfo.write(treefile)
|
||||
treefile.close()
|
||||
|
||||
def doGetRelnotes(self):
|
||||
"""Get extra files from packages in the tree to put in the topdir of
|
||||
the tree."""
|
||||
docsdir = os.path.join(self.workdir, "docs")
|
||||
relnoterpms = self.config.get("pungi", "relnotepkgs").split()
|
||||
|
||||
fileres = []
|
||||
for pattern in self.config.get("pungi", "relnotefilere").split():
|
||||
fileres.append(re.compile(pattern))
|
||||
|
||||
dirres = []
|
||||
for pattern in self.config.get("pungi", "relnotedirre").split():
|
||||
dirres.append(re.compile(pattern))
|
||||
|
||||
pungi.util._ensuredir(
|
||||
docsdir,
|
||||
self.logger,
|
||||
force=self.config.getboolean("pungi", "force"),
|
||||
clean=True,
|
||||
)
|
||||
|
||||
# Expload the packages we list as relnote packages
|
||||
pkgs = os.listdir(
|
||||
os.path.join(self.topdir, self.config.get("pungi", "product_path"))
|
||||
)
|
||||
|
||||
rpm2cpio = ["/usr/bin/rpm2cpio"]
|
||||
cpio = ["cpio", "-imud"]
|
||||
|
||||
for pkg in pkgs:
|
||||
pkgname = pkg.rsplit("-", 2)[0]
|
||||
for relnoterpm in relnoterpms:
|
||||
if pkgname == relnoterpm:
|
||||
extraargs = [
|
||||
os.path.join(
|
||||
self.topdir, self.config.get("pungi", "product_path"), pkg
|
||||
)
|
||||
]
|
||||
try:
|
||||
p1 = subprocess.Popen(
|
||||
rpm2cpio + extraargs, cwd=docsdir, stdout=subprocess.PIPE
|
||||
)
|
||||
(out, err) = subprocess.Popen(
|
||||
cpio,
|
||||
cwd=docsdir,
|
||||
stdin=p1.stdout,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
).communicate()
|
||||
except Exception:
|
||||
self.logger.error("Got an error from rpm2cpio")
|
||||
self.logger.error(err)
|
||||
raise
|
||||
|
||||
if out:
|
||||
self.logger.debug(out)
|
||||
|
||||
# Walk the tree for our files
|
||||
for dirpath, dirname, filelist in os.walk(docsdir):
|
||||
for filename in filelist:
|
||||
for regex in fileres:
|
||||
if regex.match(filename) and not os.path.exists(
|
||||
os.path.join(self.topdir, filename)
|
||||
):
|
||||
self.logger.info("Linking release note file %s" % filename)
|
||||
pungi.util._link(
|
||||
os.path.join(dirpath, filename),
|
||||
os.path.join(self.topdir, filename),
|
||||
self.logger,
|
||||
force=self.config.getboolean("pungi", "force"),
|
||||
)
|
||||
self.common_files.append(filename)
|
||||
|
||||
# Walk the tree for our dirs
|
||||
for dirpath, dirname, filelist in os.walk(docsdir):
|
||||
for directory in dirname:
|
||||
for regex in dirres:
|
||||
if regex.match(directory) and not os.path.exists(
|
||||
os.path.join(self.topdir, directory)
|
||||
):
|
||||
self.logger.info("Copying release note dir %s" % directory)
|
||||
shutil.copytree(
|
||||
os.path.join(dirpath, directory),
|
||||
os.path.join(self.topdir, directory),
|
||||
)
|
||||
|
||||
def _doIsoChecksum(self, path, csumfile):
|
||||
"""Simple function to wrap creating checksums of iso files."""
|
||||
|
||||
try:
|
||||
checkfile = open(csumfile, "a")
|
||||
except IOError:
|
||||
self.logger.error("Could not open checksum file: %s" % csumfile)
|
||||
|
||||
self.logger.info("Generating checksum of %s" % path)
|
||||
checksum = pungi.util._doCheckSum(path, "sha256", self.logger)
|
||||
if checksum:
|
||||
checkfile.write(
|
||||
"SHA256 (%s) = %s\n"
|
||||
% (os.path.basename(path), checksum.replace("sha256:", ""))
|
||||
)
|
||||
else:
|
||||
self.logger.error("Failed to generate checksum for %s" % checkfile)
|
||||
sys.exit(1)
|
||||
checkfile.close()
|
||||
|
||||
def doCreateIsos(self):
|
||||
"""Create iso of the tree."""
|
||||
|
||||
if self.tree_arch.startswith("arm"):
|
||||
self.logger.info("ARCH: arm, not doing doCreateIsos().")
|
||||
return
|
||||
|
||||
ppcbootinfo = "/usr/share/lorax/config_files/ppc"
|
||||
|
||||
pungi.util._ensuredir(
|
||||
self.isodir,
|
||||
self.logger,
|
||||
force=self.config.getboolean("pungi", "force"),
|
||||
clean=True,
|
||||
) # This is risky...
|
||||
|
||||
# setup the base command
|
||||
mkisofs = ["/usr/bin/xorriso", "-as", "mkisofs"]
|
||||
mkisofs.extend(
|
||||
[
|
||||
"-v",
|
||||
"-U",
|
||||
"-J",
|
||||
"--joliet-long",
|
||||
"-R",
|
||||
"-T",
|
||||
"-m",
|
||||
"repoview",
|
||||
"-m",
|
||||
"boot.iso",
|
||||
]
|
||||
) # common mkisofs flags
|
||||
|
||||
x86bootargs = [
|
||||
"-b",
|
||||
"isolinux/isolinux.bin",
|
||||
"-c",
|
||||
"isolinux/boot.cat",
|
||||
"-no-emul-boot",
|
||||
"-boot-load-size",
|
||||
"4",
|
||||
"-boot-info-table",
|
||||
]
|
||||
|
||||
efibootargs = [
|
||||
"-eltorito-alt-boot",
|
||||
"-e",
|
||||
"images/efiboot.img",
|
||||
"-no-emul-boot",
|
||||
]
|
||||
|
||||
macbootargs = [
|
||||
"-eltorito-alt-boot",
|
||||
"-e",
|
||||
"images/macboot.img",
|
||||
"-no-emul-boot",
|
||||
]
|
||||
|
||||
ia64bootargs = ["-b", "images/boot.img", "-no-emul-boot"]
|
||||
|
||||
ppcbootargs = [
|
||||
"-part",
|
||||
"-hfs",
|
||||
"-r",
|
||||
"-l",
|
||||
"-sysid",
|
||||
"PPC",
|
||||
"-no-desktop",
|
||||
"-allow-multidot",
|
||||
"-chrp-boot",
|
||||
]
|
||||
|
||||
ppcbootargs.append("-map")
|
||||
ppcbootargs.append(os.path.join(ppcbootinfo, "mapping"))
|
||||
|
||||
ppcbootargs.append("-hfs-bless") # must be last
|
||||
|
||||
isohybrid = ["/usr/bin/isohybrid"]
|
||||
isohybrid.extend(["--id", "42"])
|
||||
|
||||
# Check the size of the tree
|
||||
# This size checking method may be bunk, accepting patches...
|
||||
if not self.tree_arch == "source":
|
||||
treesize = int(
|
||||
subprocess.Popen(
|
||||
mkisofs + ["-print-size", "-quiet", self.topdir],
|
||||
stdout=subprocess.PIPE,
|
||||
).communicate()[0]
|
||||
)
|
||||
else:
|
||||
srcdir = os.path.join(
|
||||
self.config.get("pungi", "destdir"),
|
||||
self.config.get("pungi", "version"),
|
||||
self.config.get("pungi", "variant"),
|
||||
"source",
|
||||
"SRPMS",
|
||||
)
|
||||
|
||||
treesize = int(
|
||||
subprocess.Popen(
|
||||
mkisofs + ["-print-size", "-quiet", srcdir], stdout=subprocess.PIPE
|
||||
).communicate()[0]
|
||||
)
|
||||
# Size returned is 2KiB clusters or some such. This translates that to MiB.
|
||||
treesize = treesize * 2048 / 1024 / 1024
|
||||
|
||||
if treesize > 700: # we're larger than a 700meg CD
|
||||
isoname = "%s-DVD-%s-%s.iso" % (
|
||||
self.config.get("pungi", "iso_basename"),
|
||||
self.tree_arch,
|
||||
self.config.get("pungi", "version"),
|
||||
)
|
||||
else:
|
||||
isoname = "%s-%s-%s.iso" % (
|
||||
self.config.get("pungi", "iso_basename"),
|
||||
self.tree_arch,
|
||||
self.config.get("pungi", "version"),
|
||||
)
|
||||
|
||||
isofile = os.path.join(self.isodir, isoname)
|
||||
|
||||
# setup the extra mkisofs args
|
||||
extraargs = []
|
||||
|
||||
if self.tree_arch == "i386" or self.tree_arch == "x86_64":
|
||||
extraargs.extend(x86bootargs)
|
||||
if self.tree_arch == "x86_64":
|
||||
extraargs.extend(efibootargs)
|
||||
isohybrid.append("-u")
|
||||
if (not self.is_nomacboot) and os.path.exists(
|
||||
os.path.join(self.topdir, "images", "macboot.img")
|
||||
):
|
||||
extraargs.extend(macbootargs)
|
||||
isohybrid.append("-m")
|
||||
elif self.tree_arch == "ia64":
|
||||
extraargs.extend(ia64bootargs)
|
||||
elif self.tree_arch.startswith("ppc"):
|
||||
extraargs.extend(ppcbootargs)
|
||||
extraargs.append(os.path.join(self.topdir, "ppc/mac"))
|
||||
elif self.tree_arch.startswith("aarch64"):
|
||||
extraargs.extend(efibootargs)
|
||||
|
||||
# NOTE: if this doesn't match what's in the bootloader config, the
|
||||
# image won't be bootable!
|
||||
extraargs.append("-V")
|
||||
extraargs.append(self._shortenVolID())
|
||||
|
||||
extraargs.extend(["-o", isofile])
|
||||
|
||||
isohybrid.append(isofile)
|
||||
|
||||
if not self.tree_arch == "source":
|
||||
extraargs.append(self.topdir)
|
||||
else:
|
||||
extraargs.append(os.path.join(self.archdir, "SRPMS"))
|
||||
|
||||
if self.config.get("pungi", "no_dvd") == "False":
|
||||
# run the command
|
||||
pungi.util._doRunCommand(mkisofs + extraargs, self.logger)
|
||||
|
||||
# Run isohybrid on the iso as long as its not the source iso
|
||||
if os.path.exists("/usr/bin/isohybrid") and not self.tree_arch == "source":
|
||||
pungi.util._doRunCommand(isohybrid, self.logger)
|
||||
|
||||
# implant md5 for mediacheck on all but source arches
|
||||
if not self.tree_arch == "source":
|
||||
pungi.util._doRunCommand(
|
||||
["/usr/bin/implantisomd5", isofile], self.logger
|
||||
)
|
||||
|
||||
# shove the checksum into a file
|
||||
csumfile = os.path.join(
|
||||
self.isodir,
|
||||
"%s-%s-%s-CHECKSUM"
|
||||
% (
|
||||
self.config.get("pungi", "iso_basename"),
|
||||
self.config.get("pungi", "version"),
|
||||
self.tree_arch,
|
||||
),
|
||||
)
|
||||
# Write a line about what checksums are used.
|
||||
# sha256sum is magic...
|
||||
file = open(csumfile, "w")
|
||||
file.write("# The image checksum(s) are generated with sha256sum.\n")
|
||||
file.close()
|
||||
if self.config.get("pungi", "no_dvd") == "False":
|
||||
self._doIsoChecksum(isofile, csumfile)
|
||||
|
||||
# Write out a line describing the media
|
||||
self.writeinfo("media: %s" % self.mkrelative(isofile))
|
||||
|
||||
# Now link the boot iso
|
||||
if not self.tree_arch == "source" and os.path.exists(
|
||||
os.path.join(self.topdir, "images", "boot.iso")
|
||||
):
|
||||
isoname = "%s-netinst-%s-%s.iso" % (
|
||||
self.config.get("pungi", "iso_basename"),
|
||||
self.tree_arch,
|
||||
self.config.get("pungi", "version"),
|
||||
)
|
||||
isofile = os.path.join(self.isodir, isoname)
|
||||
|
||||
# link the boot iso to the iso dir
|
||||
pungi.util._link(
|
||||
os.path.join(self.topdir, "images", "boot.iso"), isofile, self.logger
|
||||
)
|
||||
|
||||
# shove the checksum into a file
|
||||
self._doIsoChecksum(isofile, csumfile)
|
||||
|
||||
self.logger.info("CreateIsos is done.")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from __future__ import absolute_import
|
|||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import selinux
|
||||
import sys
|
||||
|
||||
from argparse import ArgumentParser, Action
|
||||
|
|
@ -35,37 +34,6 @@ def get_arguments(config):
|
|||
parser.add_argument("--version", action="version", version=get_full_version())
|
||||
|
||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
dest="family",
|
||||
type=str,
|
||||
action=SetConfig,
|
||||
help='the name for your distribution (defaults to "Fedora"), DEPRECATED',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--family",
|
||||
dest="family",
|
||||
action=SetConfig,
|
||||
help='the family name for your distribution (defaults to "Fedora")',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ver",
|
||||
dest="version",
|
||||
action=SetConfig,
|
||||
help="the version of your distribution (defaults to datestamp)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--flavor",
|
||||
dest="variant",
|
||||
action=SetConfig,
|
||||
help="the flavor of your distribution spin (optional), DEPRECATED",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--variant",
|
||||
dest="variant",
|
||||
action=SetConfig,
|
||||
help="the variant of your distribution spin (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--destdir",
|
||||
dest="destdir",
|
||||
|
|
@ -78,12 +46,6 @@ def get_arguments(config):
|
|||
action=SetConfig,
|
||||
help="package cache directory (defaults to /var/cache/pungi)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--bugurl",
|
||||
dest="bugurl",
|
||||
action=SetConfig,
|
||||
help="the url for your bug system (defaults to http://bugzilla.redhat.com)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--selfhosting",
|
||||
action="store_true",
|
||||
|
|
@ -114,12 +76,6 @@ def get_arguments(config):
|
|||
dest="nodownload",
|
||||
help="disable downloading of packages. instead, print the package URLs (optional)", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--norelnotes",
|
||||
action="store_true",
|
||||
dest="norelnotes",
|
||||
help="disable gathering of release notes (optional); DEPRECATED",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nogreedy",
|
||||
action="store_true",
|
||||
|
|
@ -133,25 +89,12 @@ def get_arguments(config):
|
|||
default=True,
|
||||
help="disable resolving dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sourceisos",
|
||||
default=False,
|
||||
action="store_true",
|
||||
dest="sourceisos",
|
||||
help="Create the source isos (other arch runs must be done)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Force reuse of an existing destination directory (will overwrite files)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--isfinal",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Specify this is a GA tree, which causes betanag to be turned off during install", # noqa: E501
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nohash",
|
||||
default=False,
|
||||
|
|
@ -216,13 +159,6 @@ def get_arguments(config):
|
|||
required=True,
|
||||
help="Path to kickstart config file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all-stages",
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="do_all",
|
||||
help="Enable ALL stages",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
action="store_true",
|
||||
|
|
@ -230,53 +166,6 @@ def get_arguments(config):
|
|||
dest="do_gather",
|
||||
help="Flag to enable processing the Gather stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_createrepo",
|
||||
help="Flag to enable processing the Createrepo stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_buildinstall",
|
||||
help="Flag to enable processing the BuildInstall stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="do_createiso",
|
||||
help="Flag to enable processing the CreateISO stage",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--relnotepkgs",
|
||||
dest="relnotepkgs",
|
||||
action=SetConfig,
|
||||
help="Rpms which contain the release notes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--relnotefilere",
|
||||
dest="relnotefilere",
|
||||
action=SetConfig,
|
||||
help="Which files are the release notes -- GPL EULA",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nomacboot",
|
||||
action="store_true",
|
||||
dest="nomacboot",
|
||||
help="disable setting up macboot as no hfs support ",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--rootfs-size",
|
||||
dest="rootfs_size",
|
||||
action=SetConfig,
|
||||
default=False,
|
||||
help="Size of root filesystem in GiB. If not specified, use lorax default value", # noqa: E501
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--pungirc",
|
||||
|
|
@ -286,36 +175,7 @@ def get_arguments(config):
|
|||
help="Read pungi options from config file ",
|
||||
)
|
||||
|
||||
opts = parser.parse_args()
|
||||
|
||||
if (
|
||||
not config.get("pungi", "variant").isalnum()
|
||||
and not config.get("pungi", "variant") == ""
|
||||
):
|
||||
parser.error("Variant must be alphanumeric")
|
||||
|
||||
if (
|
||||
opts.do_gather
|
||||
or opts.do_createrepo
|
||||
or opts.do_buildinstall
|
||||
or opts.do_createiso
|
||||
):
|
||||
opts.do_all = False
|
||||
|
||||
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
||||
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
||||
|
||||
# set the iso_basename.
|
||||
if not config.get("pungi", "variant") == "":
|
||||
config.set(
|
||||
"pungi",
|
||||
"iso_basename",
|
||||
"%s-%s" % (config.get("pungi", "family"), config.get("pungi", "variant")),
|
||||
)
|
||||
else:
|
||||
config.set("pungi", "iso_basename", config.get("pungi", "family"))
|
||||
|
||||
return opts
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
|
|
@ -327,33 +187,9 @@ def main():
|
|||
config = pungi.config.Config(pungirc=opts.pungirc)
|
||||
opts = get_arguments(config)
|
||||
|
||||
# You must be this high to ride if you're going to do root tasks
|
||||
if os.geteuid() != 0 and (opts.do_all or opts.do_buildinstall):
|
||||
print("You must run pungi as root", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
try:
|
||||
enforcing = selinux.security_getenforce()
|
||||
except Exception:
|
||||
print("INFO: selinux disabled")
|
||||
enforcing = False
|
||||
if enforcing:
|
||||
print(
|
||||
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled." # noqa: E501
|
||||
)
|
||||
print("Consider running with setenforce 0.")
|
||||
|
||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
||||
|
||||
if opts.sourceisos:
|
||||
config.set("pungi", "arch", "source")
|
||||
|
||||
for part in ksparser.handler.partition.partitions:
|
||||
if part.mountpoint == "iso":
|
||||
config.set("pungi", "cdsize", str(part.size))
|
||||
|
||||
config.set("pungi", "force", str(opts.force))
|
||||
|
||||
if config.get("pungi", "workdirbase") == "/work":
|
||||
|
|
@ -406,8 +242,6 @@ def main():
|
|||
else:
|
||||
config.set("pungi", "greedy", "all")
|
||||
config.set("pungi", "resolve_deps", str(bool(opts.resolve_deps)))
|
||||
if opts.isfinal:
|
||||
config.set("pungi", "isfinal", "True")
|
||||
if opts.nohash:
|
||||
config.set("pungi", "nohash", "True")
|
||||
if opts.full_archlist:
|
||||
|
|
@ -418,96 +252,53 @@ def main():
|
|||
config.set("pungi", "multilib", " ".join(opts.multilib))
|
||||
if opts.lookaside_repos:
|
||||
config.set("pungi", "lookaside_repos", " ".join(opts.lookaside_repos))
|
||||
if opts.no_dvd:
|
||||
config.set("pungi", "no_dvd", "True")
|
||||
if opts.nomacboot:
|
||||
config.set("pungi", "nomacboot", "True")
|
||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
||||
config.set("pungi", "nodebuginfo", str(bool(opts.nodebuginfo)))
|
||||
|
||||
if opts.lorax_conf:
|
||||
config.set("lorax", "conf_file", opts.lorax_conf)
|
||||
if opts.installpkgs:
|
||||
config.set("lorax", "installpkgs", " ".join(opts.installpkgs))
|
||||
|
||||
# Actually do work.
|
||||
mypungi = pungi.gather.Pungi(config, ksparser)
|
||||
|
||||
with mypungi.yumlock:
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_gather or opts.do_buildinstall:
|
||||
mypungi._inityum() # initialize the yum object for things that need it
|
||||
if opts.do_all or opts.do_gather:
|
||||
mypungi.gather()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadPackages()
|
||||
mypungi.makeCompsFile()
|
||||
if not opts.nodebuginfo:
|
||||
mypungi.getDebuginfoList()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write(
|
||||
"DEBUGINFO%s: %s\n" % (flags_str, line["path"])
|
||||
)
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadDebuginfo()
|
||||
if not opts.nosource:
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadSRPMs()
|
||||
mypungi._inityum() # initialize the yum object for things that need it
|
||||
mypungi.gather()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadPackages()
|
||||
mypungi.makeCompsFile()
|
||||
if not opts.nodebuginfo:
|
||||
mypungi.getDebuginfoList()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadDebuginfo()
|
||||
if not opts.nosource:
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadSRPMs()
|
||||
|
||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
||||
if not opts.nodebuginfo:
|
||||
print(
|
||||
"DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024**2)
|
||||
)
|
||||
if not opts.nosource:
|
||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
||||
|
||||
# Furthermore (but without the yumlock...)
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_createrepo:
|
||||
mypungi.doCreaterepo()
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
if not opts.norelnotes:
|
||||
mypungi.doGetRelnotes()
|
||||
mypungi.doBuildinstall()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
# Do things slightly different for src.
|
||||
if opts.sourceisos:
|
||||
# we already have all the content gathered
|
||||
mypungi.topdir = os.path.join(
|
||||
config.get("pungi", "destdir"),
|
||||
config.get("pungi", "version"),
|
||||
config.get("pungi", "variant"),
|
||||
"source",
|
||||
"SRPMS",
|
||||
)
|
||||
mypungi.doCreaterepo(comps=False)
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024**2))
|
||||
if not opts.nodebuginfo:
|
||||
print("DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024**2))
|
||||
if not opts.nosource:
|
||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024**2))
|
||||
|
||||
print("All done!")
|
||||
|
|
|
|||
|
|
@ -136,17 +136,6 @@ class PungiWrapper(object):
|
|||
# want it mandatory here
|
||||
cmd.append("--destdir=%s" % destdir)
|
||||
|
||||
# name
|
||||
cmd.append("--name=%s" % name)
|
||||
|
||||
# version; optional, defaults to datestamp
|
||||
if version:
|
||||
cmd.append("--ver=%s" % version)
|
||||
|
||||
# rhel variant; optional
|
||||
if flavor:
|
||||
cmd.append("--flavor=%s" % flavor)
|
||||
|
||||
# turn selfhosting on
|
||||
if selfhosting:
|
||||
cmd.append("--selfhosting")
|
||||
|
|
@ -293,8 +282,6 @@ class PungiWrapper(object):
|
|||
ksparser = ks.get_ksparser(ks_path=ks_file)
|
||||
cfg = config.Config()
|
||||
cfg.set("pungi", "destdir", destdir)
|
||||
cfg.set("pungi", "family", name)
|
||||
cfg.set("pungi", "iso_basename", name)
|
||||
cfg.set("pungi", "fulltree", str(fulltree))
|
||||
cfg.set("pungi", "selfhosting", str(selfhosting))
|
||||
cfg.set("pungi", "cachedir", cache_dir)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue