... and redundant checks for booleans against True
and False
. P.I.N.J.! (Python Is Not Java!)
--- sysdiff.py-orig 2022-05-03 15:23:27.830589810 +1200
+++ sysdiff.py 2022-05-03 15:38:24.052224454 +1200
@@ -1,4 +1,4 @@
-#!/usr/bin/python3.7
+#!/usr/bin/python3
#
# Copyright (c) 2020, 2021, Oracle and/or its affiliates.
#
@@ -48,15 +48,15 @@
self.preserve = None
self.owner = get_user(si.st_uid)
- if (self.owner is None):
+ if self.owner is None:
self.comment += ' Unmatched uid: {}'.format(si.st_uid)
self.group = get_group(si.st_gid)
- if (self.group is None):
+ if self.group is None:
self.comment += ' Unmatched gid: {}'.format(si.st_gid)
#
- # We cannot distinguish between file and hardlink for 'new' files -
+ # We cannot distinguish between file and hardlink for 'new' files -
# all hardlinks for given file are identical from the OS
# point of view - so we initially set every regular file as 'file',
# process the contents to take advantage of known links, and,
@@ -69,33 +69,33 @@
# are plain symlinks into /devices/, not special files. All of these
# files will end up as the 'unhandled' type, as we cannot package them.
#
- if (stat.S_ISLNK(si.st_mode)):
+ if stat.S_ISLNK(si.st_mode):
self.type = 'link'
self.target = os.readlink(entry.path)
- elif (stat.S_ISDIR(si.st_mode)):
+ elif stat.S_ISDIR(si.st_mode):
self.type = 'dir'
- elif (stat.S_ISREG(si.st_mode)):
+ elif stat.S_ISREG(si.st_mode):
self.type = 'file'
- if (self.nlink > 1):
+ if self.nlink > 1:
self.type='hardlink'
- if (self.inode in hardlinks):
+ if self.inode in hardlinks:
self.target=hardlinks[self.inode]
- elif (stat.S_ISFIFO(si.st_mode)):
+ elif stat.S_ISFIFO(si.st_mode):
self.type = 'unhandled'
self.comment += "file type: FIFO"
- elif (stat.S_ISCHR(si.st_mode)):
+ elif stat.S_ISCHR(si.st_mode):
self.type = 'unhandled'
self.comment += "file type: character device"
- elif (stat.S_ISBLK(si.st_mode)):
+ elif stat.S_ISBLK(si.st_mode):
self.type = 'unhandled'
self.comment += "file type: block device"
- elif (stat.S_ISSOCK(si.st_mode)):
+ elif stat.S_ISSOCK(si.st_mode):
self.type = 'unhandled'
self.comment += "file type: socket"
- elif (stat.S_ISDOOR(si.st_mode)):
+ elif stat.S_ISDOOR(si.st_mode):
self.type = 'unhandled'
self.comment += "file type: door"
- elif (stat.S_ISPORT(si.st_mode)):
+ elif stat.S_ISPORT(si.st_mode):
self.type = 'unhandled'
self.comment += "file type: event port"
@@ -111,7 +111,7 @@
matched_files_fname = 'matched_files'
pkg_fname = 'pkg'
param_fname = 'params'
-param_section = 'params'
+param_section = 'params'
params_read = False
pkg_file = None
defcache = 'cache'
@@ -147,7 +147,7 @@
def parse_group():
for line in fileinput.input(zone_root + "/etc/group"):
s = line.split(':')
- if (s[2] in group):
+ if s[2] in group:
print("Duplicate entry in /etc/group|" + line + "|\n")
continue
group[s[2]] = s[0]
@@ -155,42 +155,42 @@
def parse_passwd():
for line in fileinput.input(zone_root + "/etc/passwd"):
s = line.split(':')
- if (s[2] in passwd):
+ if s[2] in passwd:
print("Duplicate entry in /etc/passwd|" + line + "|\n")
continue
passwd[s[2]] = s[0]
def get_group(gid):
- if (str(gid) in group):
+ if str(gid) in group:
return group[str(gid)]
command = ['getent', 'group', str(gid)]
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
- if (quiet == False):
+ if sp.returncode != 0:
+ if not quiet:
print("command\n", " ".join(command), "\nfailed")
return None
s = sp.stdout.split(':')
- if (len(s) == 4):
+ if len(s) == 4:
group[str(gid)] = s[0]
return s[0]
return None
def get_user(uid):
- if (str(uid) in passwd):
+ if str(uid) in passwd:
return passwd[str(uid)]
command = ['getent', 'passwd', str(uid)]
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
- if (quiet == False):
+ if sp.returncode != 0:
+ if not quiet:
print("command\n", " ".join(command), "\nfailed")
return None
s = sp.stdout.split(':')
- if (len(s) == 7):
+ if len(s) == 7:
passwd[str(uid)] = s[0]
return s[0]
@@ -200,29 +200,29 @@
sp = subprocess.run(("/usr/bin/sum", path),
capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
sys.stderr.write('sum ' + path + ' failed\n')
sys.stderr.write(sp.stderr)
exit(1)
s = sp.stdout.split(' ')
-
+
return s[0]
-
+
def parse_contents(cpath):
for line in fileinput.input(cpath):
line = line.rstrip('\n')
#
# Skip comments
#
- if (line[0] == "#"):
+ if line[0] == "#":
continue
#
# New style entries always start with /
#
- if (line[0] == "/"):
+ if line[0] == "/":
#
# We split the entry into path, type, class and remainder,
# which will be further split depending on type
@@ -231,7 +231,7 @@
path = s[0]
entry = ContentsEntry()
entry.ftype = s[1]
- if (entry.ftype in fileTypes):
+ if entry.ftype in fileTypes:
s = s[3].split(' ', 6)
entry.mode = int(s[0], 8)
entry.owner = s[1]
@@ -240,13 +240,13 @@
entry.cksum = s[4]
entry.modtime = int(s[5])
entry.pkglist = s[6].split(' ')
- elif (entry.ftype in dirTypes):
+ elif entry.ftype in dirTypes:
s = s[3].split(' ', 3)
entry.mode = int(s[0], 8)
entry.owner = s[1]
entry.group = s[2]
entry.pkglist = s[3].split(' ')
- elif (entry.ftype in devTypes):
+ elif entry.ftype in devTypes:
s = s[3].split(' ', 5)
entry.major = s[0]
entry.minor = s[1]
@@ -254,16 +254,16 @@
entry.owner = s[3]
entry.group = s[4]
entry.pkglist = s[5].split(' ')
- elif (entry.ftype in linkTypes):
+ elif entry.ftype in linkTypes:
entry.pkglist = s[3].split(' ')
- #if (entry.ftype == "l"):
+ #if entry.ftype == "l":
# print(entry.__dict__)
# exit()
else:
#
# Parse old style entries
#
- if (quiet == False):
+ if not quiet:
print("Parsing old style entry |" + line + "|\n")
s = line.split(' ', 3)
entry = ContentsEntry()
@@ -271,13 +271,13 @@
entry.path = s[2]
entry.pkglist = s[3].split(' ')
- if (entry.ftype in linkTypes):
+ if entry.ftype in linkTypes:
#
# Link entries have path in format path=rpath - we need to split
# them for easier matching
#
s = path.split('=')
- if (len(s) != 2):
+ if len(s) != 2:
sys.stdout.flush()
print("Invalid link entry |" + path + "|\n", file=sys.stderr)
exit(1)
@@ -288,7 +288,7 @@
# This should never happen - there can be only single entry for each
# path
#
- if (path in contents):
+ if path in contents:
sys.stdout.flush()
print("Duplicate entry for |" + path + "|\n", file=sys.stderr)
print(entry.__dict__, file=sys.stderr)
@@ -298,9 +298,9 @@
# We check the pkglist for given entry against the 'clean' list
# to find non-Solaris pkgs installed
#
- if (len(pkglist) != 0):
+ if len(pkglist) != 0:
for d in entry.pkglist:
- if (d.split(':')[0].lstrip('*') not in pkglist):
+ if d.split(':')[0].lstrip('*') not in pkglist:
newpkgs.add(d)
contents[path] = entry
@@ -314,7 +314,7 @@
path = entry.path[len(zone_root):]
f = FileDesc(path, si, entry)
new_files[entry.path[len(zone_root):]] = f
- if (entry.is_dir(follow_symlinks=False)):
+ if entry.is_dir(follow_symlinks=False):
walk_new_dirtree(entry.path)
continue
@@ -338,9 +338,9 @@
# We skip the excluded dirs first - no point processing them
# any further
#
- if (entry.is_dir(follow_symlinks=False)):
- if (path in excludeDirs):
- if (quiet == False):
+ if entry.is_dir(follow_symlinks=False):
+ if path in excludeDirs:
+ if not quiet:
print("Skipping dir |" + path + "|")
continue
@@ -351,9 +351,9 @@
# we just record it as 'new'. For directories we do a simple
# traversal, just adding the contents to new files as well.
#
- if (path not in contents):
+ if path not in contents:
new_files[path] = f
- if (entry.is_dir(follow_symlinks=False)):
+ if entry.is_dir(follow_symlinks=False):
walk_new_dirtree(entry.path)
continue
continue
@@ -362,15 +362,15 @@
# attributes, only the target. We can handle them ahead of
# anything else
#
- if (entry.is_symlink()):
- if (contents[path].ftype != 's'):
+ if entry.is_symlink():
+ if contents[path].ftype != 's':
f.comment += ' S ' + ' file replaced by link'
mod_files[path] = f
continue
- if (os.readlink(entry.path) == contents[path].target):
+ if os.readlink(entry.path) == contents[path].target:
matched_files[path] = f
continue
- f.comment += ' S |' + os.readlink(entry.path) + '| vs |'\
+ f.comment += ' S |' + os.readlink(entry.path) + '| vs |' \
+ contents[path].target + '|\n'
mod_files[path] = f
continue
@@ -380,7 +380,7 @@
# modified, which is unexpected, and the volatile files, which
# are expected to change.
#
- if (contents[path].ftype in volTypes):
+ if contents[path].ftype in volTypes:
modified_files = mod_vol_files
f.preserve = 'true'
else:
@@ -394,7 +394,7 @@
# the list where the target entry belongs, and use it for
# subsequent hardlinks.
#
- if (contents[path].ftype == 'l'):
+ if contents[path].ftype == 'l':
first_pass = False
ldir = dir[len(zone_root):]
rpath = os.path.normpath(
@@ -403,31 +403,31 @@
# If the link target is not in contents, or it's not a file
# it's a fatal error because the database is corrupted.
#
- if (rpath not in contents):
+ if rpath not in contents:
sys.stdout.flush()
print(rpath + " not found in contents.",
file=sys.stderr)
exit(1)
- if (contents[rpath].ftype not in fileTypes):
+ if contents[rpath].ftype not in fileTypes:
sys.stdout.flush()
print(path + " target path " + rpath
+ " is not a file, type: " + contents[rpath].ftype,
file=sys.stderr)
exit(1)
- if (f.nlink == 1):
- f.comment += " plain file replaced a hardlink to "\
+ if f.nlink == 1:
+ f.comment += " plain file replaced a hardlink to " \
+ contents[path].target
modified_files[path] = f
continue
- if (contents[rpath].inode is None):
+ if contents[rpath].inode is None:
tsi = os.stat(zone_root + rpath)
contents[rpath].inode = (tsi.st_dev, tsi.st_ino)
- if (contents[rpath].inode not in hardlinks):
+ if contents[rpath].inode not in hardlinks:
hardlinks[contents[rpath].inode] = rpath
- elif (hardlinks[contents[rpath].inode] != rpath):
+ elif hardlinks[contents[rpath].inode] != rpath:
sys.stdout.flush()
print("hardlinks table contains different entry for"
+ rpath + " : "
@@ -440,15 +440,15 @@
first_pass = True
contents[rpath].list = modified_files
- if (f.inode == contents[rpath].inode):
+ if f.inode == contents[rpath].inode:
f.type = 'hardlink'
f.target = contents[path].target
- if (first_pass != True):
+ if not first_pass:
contents[rpath].list[path] = f
continue
else:
f.type = 'hardlink'
- f.comment +=\
+ f.comment += \
" hardlink pointing to different target"
modified_files[path] = f
continue
@@ -459,13 +459,13 @@
# the recursion, then pass through the attributes matching
# and handle the directory entry itself
#
- if (entry.is_dir(follow_symlinks=False)):
+ if entry.is_dir(follow_symlinks=False):
#
# It might happen that a directory has replaced a regular
# file registered in contents - hopefully it should not
# happen often.
#
- if (contents[rpath].ftype not in dirTypes):
+ if contents[rpath].ftype not in dirTypes:
modified_files[path] = f
walk_new_dirtree(entry.path)
continue
@@ -476,9 +476,9 @@
# volatile and editable files probably need to be treated
# more leniently - we expect them to not match.
#
- if (f.owner is not None):
- if (f.owner != contents[rpath].owner):
- f.comment += " owner differs " + contents[rpath].owner\
+ if f.owner is not None:
+ if f.owner != contents[rpath].owner:
+ f.comment += " owner differs " + contents[rpath].owner \
+ ' vs ' + f.owner
modified_files[path] = f
continue
@@ -489,9 +489,9 @@
#
modified_files[path] = f
continue
- if (f.group is not None):
- if (f.group != contents[rpath].group):
- f.comment += " group differs " + contents[rpath].group\
+ if f.group is not None:
+ if f.group != contents[rpath].group:
+ f.comment += " group differs " + contents[rpath].group \
+ ' vs ' + f.group
modified_files[path] = f
continue
@@ -502,59 +502,58 @@
#
modified_files[path] = f
continue
- if (f.mode != contents[rpath].mode):
- f.comment += " mode differs %04o vs %04o"\
+ if f.mode != contents[rpath].mode:
+ f.comment += " mode differs %04o vs %04o" \
% (contents[rpath].mode, f.mode)
modified_files[path] = f
continue
- if (entry.is_dir(follow_symlinks=False)):
+ if entry.is_dir(follow_symlinks=False):
matched_files[path] = f
continue
- if (stat.S_ISCHR(si.st_mode)):
- if (contents[rpath].ftype != 'c'):
- f.comment += ' file type mismatch char device vs '\
+ if stat.S_ISCHR(si.st_mode):
+ if contents[rpath].ftype != 'c':
+ f.comment += ' file type mismatch char device vs ' \
+ contents[rpath].ftype
modified_files[path] = f
continue
- if (os.major(si.rdev) != contents[rpath].major):
+ if os.major(si.rdev) != contents[rpath].major:
modified_files[path] = f
continue
- if (os.minor(si.rdev) != contents[rpath].minor):
+ if os.minor(si.rdev) != contents[rpath].minor:
modified_files[path] = f
continue
matched_files[path] = f
continue
- if (stat.S_ISBLK(si.st_mode)):
- if (contents[rpath].ftype != 'b'):
- f.comment += ' file type mismatch block device vs '\
+ if stat.S_ISBLK(si.st_mode):
+ if contents[rpath].ftype != 'b':
+ f.comment += ' file type mismatch block device vs ' \
+ contents[rpath].ftype
modified_files[path] = f
continue
- if (os.major(si.rdev) != contents[rpath].major):
+ if os.major(si.rdev) != contents[rpath].major:
modified_files[path] = f
continue
- if (os.minor(si.rdev) != contents[rpath].minor):
+ if os.minor(si.rdev) != contents[rpath].minor:
modified_files[path] = f
continue
matched_files[path] = f
continue
- if (stat.S_ISREG(si.st_mode)):
- if (si.st_size != contents[rpath].size):
- f.comment += " size mismatch %d vs %d"\
+ if stat.S_ISREG(si.st_mode):
+ if si.st_size != contents[rpath].size:
+ f.comment += " size mismatch %d vs %d" \
% (contents[rpath].size, si.st_size)
modified_files[path] = f
continue
- if (f.modtime != contents[rpath].modtime):
+ if f.modtime != contents[rpath].modtime:
f.comment += " timestamp mismatch {}".format(
contents[rpath].modtime) + " vs {}".format(
f.modtime)
modified_files[path] = f
continue
- if (enable_checksums == True or
- contents[path].ftype in volTypes):
+ if enable_checksums or contents[path].ftype in volTypes:
cksum = get_cksum(entry.path)
- if (cksum != contents[rpath].cksum):
- f.comment += "checksum differs %s vs %s"\
+ if cksum != contents[rpath].cksum:
+ f.comment += "checksum differs %s vs %s" \
% (contents[rpath].cksum, cksum)
modified_files[path] = f
continue
@@ -585,8 +584,8 @@
for files in (matched_files, mod_files, mod_vol_files, new_files):
for path in list(files):
e = files[path]
- if ((e.type == 'hardlink') and (e.target is None)):
- if (e.inode in hardlinks):
+ if e.type == 'hardlink' and e.target is None:
+ if e.inode in hardlinks:
e.target = os.path.relpath(hardlinks[e.inode],
os.path.split(path)[0])
else:
@@ -596,7 +595,7 @@
def print_out_files(filename, filelist, known_list = None):
global preserve_age, now, plain_output
- if (preserve_age > 0):
+ if preserve_age > 0:
cutoff = now - (preserve_age * 86400)
else:
cutoff = 0
@@ -604,33 +603,33 @@
try:
with open(filename, 'a') as f:
for path in sorted(list(filelist)):
- if (known_list is not None):
- if (path in known_list):
+ if known_list is not None:
+ if path in known_list:
continue
- if (plain_output == True):
+ if plain_output:
f.write(path + '\n')
continue
e = filelist[path]
- if (e.comment):
+ if e.comment:
f.write('# ' + e.comment + '\n')
- if (e.type == 'unhandled'):
+ if e.type == 'unhandled':
f.write('# ')
f.write(e.type)
f.write(' path="{}"'.format(path[1:]))
- if (e.type == 'link'):
+ if e.type == 'link':
f.write(' target="{}"'.format(e.target))
- elif (e.type == 'hardlink'):
+ elif e.type == 'hardlink':
f.write(' target="{}"'.format(e.target))
else:
f.write(' mode=%04o' % e.mode)
- if (e.owner is not None):
+ if e.owner is not None:
f.write(' owner=' + e.owner)
- if (e.group is not None):
+ if e.group is not None:
f.write(' group=' + e.group)
- if (e.preserve == 'true'):
+ if e.preserve == 'true':
f.write(' preserve=true')
- elif (cutoff > 0):
- if (e.modtime > cutoff):
+ elif cutoff > 0:
+ if e.modtime > cutoff:
f.write(' preserve=true')
f.write('\n')
@@ -645,7 +644,7 @@
global matched_files_fname, zone_name, plain_output
global pkg_fmri, pkg_summary, pkg_desc
- if (plain_output == True):
+ if plain_output:
ext = 'out'
filename = "{}/{}.{}".format(output_dir, new_files_fname, ext)
else:
@@ -660,11 +659,11 @@
'pkg_file' : filename}
try:
- if (plain_output != True):
+ if not plain_output:
with open(filename, 'x') as f:
f.write('set name=pkg.fmri value={}\n'.format(pkg_fmri))
f.write('set name=pkg.summary value="{}"\n'.format(pkg_summary))
- if (pkg_desc is not None):
+ if pkg_desc is not None:
f.write(
'set name=pkg.description value="{}"\n'.format(
pkg_desc))
@@ -672,19 +671,19 @@
sys.stdout.flush()
print(error, file=sys.stderr)
- if (quiet == False):
+ if not quiet:
print('Writing ' + filename)
print_out_files(filename, new_files, knownNewFiles)
filename = "{}/{}.{}".format(output_dir, mod_files_fname, ext)
- if (quiet == False):
+ if not quiet:
print('Writing ' + filename)
print_out_files(filename, mod_files, knownModFiles)
filename = "{}/{}.{}".format(output_dir, mod_vol_files_fname, ext)
- if (quiet == False):
+ if not quiet:
print('Writing ' + filename)
print_out_files(filename, mod_vol_files)
filename = "{}/{}.{}".format(output_dir, matched_files_fname, ext)
- if (quiet == False):
+ if not quiet:
print('Writing ' + filename)
print_out_files(filename, matched_files)
try:
@@ -695,7 +694,7 @@
sys.stdout.flush()
print(error, file=sys.stderr)
- if (plain_output == True):
+ if plain_output:
print("File lists generated in: " + output_dir)
else:
print("IPS package manifest generated:\n" +
@@ -709,7 +708,7 @@
def parse_ignore(ignore):
for p in ignore.split(','):
p = p.strip().rstrip('/')
- if (p[0] != '/'):
+ if p[0] != '/':
sys.stdout.flush()
print("excluded dir\n" + p + "\nis not an absolute path",
file=sys.stderr)
@@ -717,18 +716,18 @@
excludeDirs.add(p)
def load_excludeDirs(path):
- if (path is None):
+ if path is None:
path = excludeDirsPath
- if (quiet == False):
+ if not quiet:
print("Loading excluded dirs list from file:\n{}".format(path))
try:
for p in fileinput.input(path):
p = p.strip().rstrip('/')
- if (p[0] != '/'):
+ if p[0] != '/':
sys.stdout.flush()
- print("In file\n{}\n".format(path) +
+ print("In file\n{}\n".format(path) +
"excluded dir\n{}\nis not an absolute path".format(p),
file=sys.stderr)
exit(1)
@@ -777,23 +776,23 @@
zone_name = args.zonename
enable_checksums = args.chksum
plain_output = args.plain_output
- if (args.preserve_age is not None):
+ if args.preserve_age is not None:
preserve_age = args.preserve_age
load_excludeDirs(args.exclude_dirs)
- if (args.ignore):
+ if args.ignore:
parse_ignore(args.ignore)
- if (args.output_dir is not None):
+ if args.output_dir is not None:
output_dir = args.output_dir
- if (output_dir[0] != '/'):
+ if output_dir[0] != '/':
sys.stdout.flush()
print('results dir parent must be an absolute path', file=sys.stderr)
exit(1)
- if (os.path.isdir(output_dir) == False):
+ if not os.path.isdir(output_dir):
sys.stdout.flush()
print('results dir parent must be a directory', file=sys.stderr)
exit(1)
@@ -803,7 +802,7 @@
sp = subprocess.run(("/usr/sbin/zoneadm", "-z", zone_name, "list", "-p"),
capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
sys.stderr.write('zoneadm failed\n')
sys.stderr.write(sp.stderr)
@@ -811,21 +810,21 @@
zi = sp.stdout.split(':')
- if (zi[5] != 'solaris10'):
+ if zi[5] != 'solaris10':
sys.stdout.flush()
sys.stderr.write('zone brand must be solaris10\n')
exit(1)
- if (zi[2] != "installed"):
+ if zi[2] != "installed":
sys.stdout.flush()
sys.stderr.write('zone must be in the "installed" state.\n')
exit(1)
zone_root = zi[3] + "/root"
- if (args.pkg_name is not None):
+ if args.pkg_name is not None:
pkg_fmri = args.pkg_name
- if ('@' in pkg_fmri):
+ if '@' in pkg_fmri:
sys.stdout.flush()
print("Please don't use '@' in pkg_name", file=sys.stderr)
exit(1)
@@ -837,21 +836,21 @@
#
pkg_fname = pkg_fmri
- if (args.pkg_version is not None):
+ if args.pkg_version is not None:
pkg_fmri = pkg_fmri + "@" + args.pkg_version
else:
pkg_fmri = pkg_fmri + "@1.0"
- if (args.pkg_summary is not None):
+ if args.pkg_summary is not None:
pkg_summary = args.pkg_summary
else:
pkg_summary = "package generated from " + zone_name
- if (args.pkg_desc is not None):
+ if args.pkg_desc is not None:
pkg_desc = args.pkg_desc
-
+
try:
os.mkdir(output_dir)
except OSError as error:
@@ -861,34 +860,34 @@
exit(1)
parse_passwd()
- if (quiet == False):
+ if not quiet:
print("passwd entries: {}".format(len(passwd)))
parse_group()
- if (quiet == False):
+ if not quiet:
print("group entries: {}".format(len(group)))
- if (args.full_scan != True):
+ if not args.full_scan:
load_known_files()
- if (quiet == False):
+ if not quiet:
print("known new files loaded: {}".format(len(knownNewFiles)))
print("known mod files loaded: {}".format(len(knownModFiles)))
else:
- if (quiet == False):
+ if not quiet:
print("skipping known new/modified files lists")
load_pkglist()
- if (quiet == False):
+ if not quiet:
print("pkgs loaded: {}".format(len(pkglist)))
parse_contents(zone_root + "/var/sadm/install/contents")
- if (quiet == False):
+ if not quiet:
print("contents entries: {}".format(len(contents)))
- if (len(newpkgs) != 0):
+ if len(newpkgs) != 0:
print("{} non-Solaris pkgs found:".format(len(newpkgs)))
for p in sorted(newpkgs):
print(p)
else:
- if (quiet == False):
+ if not quiet:
print('no non-Solaris pkgs found')
walk_and_compare(zone_root)
- if (quiet == False):
+ if not quiet:
print("new_files: {}".format(len(new_files)))
print("matched_files: {}".format(len(matched_files)))
print("mod_files: {}".format(len(mod_files)))
@@ -912,27 +911,27 @@
print(error, file=sys.stderr)
exit(1)
- if (param_section not in config.sections()):
+ if param_section not in config.sections():
sys.stdout.flush()
print("Invalid parameters file - no {} section".format(param_section),
file=sys.stderr)
exit(1)
- if ('zone_root' in config[param_section]):
+ if 'zone_root' in config[param_section]:
zone_root = config[param_section]['zone_root']
else:
sys.stdout.flush()
print("Invalid parameters file - 'zone_root' key is missing",
file=sys.stderr)
exit(1)
- if ('pkg_file' in config[param_section]):
+ if 'pkg_file' in config[param_section]:
pkg_file = config[param_section]['pkg_file']
else:
sys.stdout.flush()
print("Invalid parameters file - 'pkg_file' key is missing",
file=sys.stderr)
exit(1)
- if ('plain_output' in config[param_section]):
+ if 'plain_output' in config[param_section]:
plain_output = config[param_section].getboolean('plain_output')
else:
sys.stdout.flush()
@@ -943,12 +942,12 @@
# pkg_fmri is used only for the archive subcommand, and it can work without
# it - we don't have to bail out if it's missing
#
- if ('pkg_fmri' in config[param_section]):
+ if 'pkg_fmri' in config[param_section]:
pkg_fmri = config[param_section]['pkg_fmri']
else:
pkg_fmri = None
- if(plain_output == True):
+ if plain_output:
sys.stdout.flush()
print("IPS manifest is needed for IPS package generation.",
file=sys.stderr)
@@ -956,7 +955,7 @@
file=sys.stderr)
exit(1)
- if(pkg_file.startswith(output_dir) == False):
+ if not pkg_file.startswith(output_dir):
sys.stdout.flush()
print("Corrupt params: path to the manifest does not match results dir",
file=sys.stderr)
@@ -971,25 +970,25 @@
#
def do_depresolve(args):
global output_dir, params_read
-
- if (params_read == False):
+
+ if not params_read:
output_dir = args.results_dir
- if (output_dir[0] != '/'):
+ if output_dir[0] != '/':
sys.stdout.flush()
print('results_dir must be an absolute path', file=sys.stderr)
exit(1)
- if (os.path.isdir(output_dir) == False):
+ if not os.path.isdir(output_dir):
sys.stdout.flush()
print('results_dir must be a directory', file=sys.stderr)
exit(1)
read_params()
-
+
command = ['pkgdepend', 'generate', '-md', zone_root, pkg_file]
oname = pkg_file + '.dep'
try:
with open(oname, "w") as outfile:
- if (quiet == False):
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, stdout=outfile, stderr=subprocess.PIPE,
text=True)
@@ -1000,25 +999,25 @@
print(error, file=sys.stderr)
exit(1)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
exit(1)
command = ['pkgdepend', 'resolve', '-m', oname]
- if (quiet == False):
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
exit(1)
- print("'sysdiff depresolve' successful, next step is 'sysdiff lint'")
+ print("'sysdiff depresolve' successful, next step is 'sysdiff lint'")
#
# function for handling the lint subcommand, executed via args.func(args)
@@ -1026,23 +1025,23 @@
#
def do_lint(args):
global output_dir, params_read, defcache
-
- if (params_read == False):
+
+ if not params_read:
output_dir = args.results_dir
- if (output_dir[0] != '/'):
+ if output_dir[0] != '/':
sys.stdout.flush()
print('results_dir must be an absolute path', file=sys.stderr)
exit(1)
- if (os.path.isdir(output_dir) == False):
+ if not os.path.isdir(output_dir):
sys.stdout.flush()
print('results_dir must be a directory', file=sys.stderr)
exit(1)
read_params()
-
- if (args.cache_dir is not None):
+
+ if args.cache_dir is not None:
cache_dir = args.cache_dir
- if (cache_dir[0] != '/'):
+ if cache_dir[0] != '/':
sys.stdout.flush()
print('cache_dir must be an absolute path', file=sys.stderr)
exit(1)
@@ -1050,7 +1049,7 @@
cache_dir = "{}.{}".format(output_dir, defcache)
- if (os.path.isdir(cache_dir) == False):
+ if not os.path.isdir(cache_dir):
try:
os.mkdir(cache_dir)
except OSError as error:
@@ -1061,19 +1060,19 @@
command = ['pkglint', '-c', cache_dir, '-r', args.repo,
pkg_file + ".dep.res"]
-
- if (quiet == False):
+
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
exit(1)
- print("'sysdiff lint' successful, next step is 'sysdiff publish'")
+ print("'sysdiff lint' successful, next step is 'sysdiff publish'")
#
# function for handling the publish subcommand, executed via args.func(args)
@@ -1081,104 +1080,104 @@
#
def do_publish(args):
global output_dir, params_read, defcache
-
- if (params_read == False):
+
+ if not params_read:
output_dir = args.results_dir
- if (output_dir[0] != '/'):
+ if output_dir[0] != '/':
sys.stdout.flush()
print('results_dir must be an absolute path', file=sys.stderr)
exit(1)
- if (os.path.isdir(output_dir) == False):
+ if not os.path.isdir(output_dir):
sys.stdout.flush()
print('results_dir must be a directory', file=sys.stderr)
exit(1)
read_params()
- if (args.create == True):
- if (args.publisher is None):
+ if args.create:
+ if args.publisher is None:
sys.stdout.flush()
print("publisher prefix is mandatory for repo creation, " +
"use --publisher option to provide it",
file=sys.stderr)
exit(1)
command = ['pkgrepo', 'create', args.repo]
- if (quiet == False):
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
exit(1)
command = ['pkgrepo', '-s', args.repo, 'set',
'publisher/prefix={}'.format(args.publisher)]
- if (quiet == False):
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
exit(1)
command = ['pkgsend', '-s', args.repo, 'publish', '-d', zone_root,
pkg_file + ".dep.res"]
- if (quiet == False):
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
exit(1)
print("'sysdiff publish' successful, the package can be converted to" +
- " an archive using 'sysdiff archive'")
-
+ " an archive using 'sysdiff archive'")
+
#
# function for handling the archive subcommand, executed via args.func(args)
# in main
#
def do_archive(args):
global output_dir, params_read, defcache, pkg_fmri
-
- if (params_read == False):
+
+ if not params_read:
output_dir = args.results_dir
- if (output_dir[0] != '/'):
+ if output_dir[0] != '/':
sys.stdout.flush()
print('results_dir must be an absolute path', file=sys.stderr)
exit(1)
- if (os.path.isdir(output_dir) == False):
+ if not os.path.isdir(output_dir):
sys.stdout.flush()
print('results_dir must be a directory', file=sys.stderr)
exit(1)
read_params()
- if (args.archive is None):
+ if args.archive is None:
archive = pkg_file.rstrip('p5m') + 'p5p'
else:
archive = args.archive
-
+
command = ['pkgrecv', '-s', args.repo, '-a', '-d', archive]
- if (args.pkg_fmri is not None):
+ if args.pkg_fmri is not None:
pkg_fmri = args.pkg_fmri
- if (pkg_fmri is not None):
+ if pkg_fmri is not None:
command.append(pkg_fmri)
- if (quiet == False):
+ if not quiet:
print("running command:\n", " ".join(command), "\n")
sp = subprocess.run(command, capture_output=True, text=True)
- if (sp.returncode != 0):
+ if sp.returncode != 0:
sys.stdout.flush()
print("command\n", " ".join(command), "\nfailed", file=sys.stderr)
print(sp.stderr, file=sys.stderr)
@@ -1186,11 +1185,11 @@
print("{} created successfully".format(archive))
-
+
def main():
global quiet
-
+
desc = 'Produce a diff between current Solaris 10 branded zone '
desc += 'contents and the state based on the SVR4 packaging data '
desc += 'and optionally package them in an IPS package.'
@@ -1303,4 +1302,3 @@
if __name__ == "__main__":
main()
-