[wip] tools: python3

Signed-off-by: Wataru Ishida <ishida@nel-america.com>
This commit is contained in:
Wataru Ishida
2020-02-18 01:43:13 -08:00
parent b04e65f58a
commit 224fe6a7cf
17 changed files with 120 additions and 116 deletions

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
import sys import sys
import os import os
import argparse import argparse

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# This script provides the file naming scheme for # This script provides the file naming scheme for
@@ -19,8 +19,8 @@ versions['UARCH'] = manifest['arch'].upper().replace("POWERPC","PPC")
versions['OS_RELEASE_VERSION_ID'] = manifest['os-release']['VERSION_ID'] versions['OS_RELEASE_VERSION_ID'] = manifest['os-release']['VERSION_ID']
if ops.type == 'swi': if ops.type == 'swi':
print "%(FNAME_PRODUCT_VERSION)s_ONL-OS%(OS_RELEASE_VERSION_ID)s_%(FNAME_BUILD_ID)s_%(UARCH)s.swi" % versions print(("%(FNAME_PRODUCT_VERSION)s_ONL-OS%(OS_RELEASE_VERSION_ID)s_%(FNAME_BUILD_ID)s_%(UARCH)s.swi" % versions))
elif ops.type == 'installer': elif ops.type == 'installer':
print "%(FNAME_PRODUCT_VERSION)s_ONL-OS%(OS_RELEASE_VERSION_ID)s_%(FNAME_BUILD_ID)s_%(UARCH)s_INSTALLER" % versions print(("%(FNAME_PRODUCT_VERSION)s_ONL-OS%(OS_RELEASE_VERSION_ID)s_%(FNAME_BUILD_ID)s_%(UARCH)s_INSTALLER" % versions))
else: else:
raise ValueError("Unknown type '%s'" % ops.type) raise ValueError("Unknown type '%s'" % ops.type)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# Flat Image Tree Generator # Flat Image Tree Generator
@@ -212,7 +212,7 @@ class FlatImageTree(object):
self.add_dict(name, d) self.add_dict(name, d)
def add_platform_package(self, package): def add_platform_package(self, package):
print package print(package)
platform = package.replace(":%s" % ops.arch, "").replace("onl-platform-config-", "") platform = package.replace(":%s" % ops.arch, "").replace("onl-platform-config-", "")
vpkg = "onl-vendor-config-onl:all" vpkg = "onl-vendor-config-onl:all"
@@ -264,22 +264,22 @@ class FlatImageTree(object):
f.write(""" images {\n\n""") f.write(""" images {\n\n""")
f.write(""" /* Kernel Images */\n""") f.write(""" /* Kernel Images */\n""")
for k in kdict.values(): for k in list(kdict.values()):
k.write(f) k.write(f)
f.write("""\n""") f.write("""\n""")
f.write(""" /* DTB Images */\n""") f.write(""" /* DTB Images */\n""")
for d in ddict.values(): for d in list(ddict.values()):
d.write(f) d.write(f)
f.write("""\n""") f.write("""\n""")
f.write(""" /* Initrd Images */\n""") f.write(""" /* Initrd Images */\n""")
for i in idict.values(): for i in list(idict.values()):
i.write(f) i.write(f)
f.write(""" };\n""") f.write(""" };\n""")
f.write(""" configurations {\n""") f.write(""" configurations {\n""")
for (name, (kernel, dtb, initrd)) in self.configurations.iteritems(): for (name, (kernel, dtb, initrd)) in list(self.configurations.items()):
f.write(""" %s {\n""" % name) f.write(""" %s {\n""" % name)
f.write(""" description = "%s";\n""" % name) f.write(""" description = "%s";\n""" % name)
f.write(""" kernel = "%s";\n""" % (KernelImage(kernel, ops.arch).name)) f.write(""" kernel = "%s";\n""" % (KernelImage(kernel, ops.arch).name))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
import os import os
import sys import sys

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# Build an ONL Installer # Build an ONL Installer
@@ -142,7 +142,7 @@ class InstallerShar(object):
shutil.copy(f, self.work_dir) shutil.copy(f, self.work_dir)
for d in self.dirs: for d in self.dirs:
print "Copying %s -> %s..." % (d, self.work_dir) print(("Copying %s -> %s..." % (d, self.work_dir)))
subprocess.check_call(["cp", "-R", d, self.work_dir]) subprocess.check_call(["cp", "-R", d, self.work_dir])
with open(os.path.join(self.work_dir, 'installer.sh'), "w") as f: with open(os.path.join(self.work_dir, 'installer.sh'), "w") as f:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/python3
################################################################ ################################################################
# #
# Copyright 2013, Big Switch Networks, Inc. # Copyright 2013, Big Switch Networks, Inc.

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# This script produces a YAML list on stdout of all # This script produces a YAML list on stdout of all
@@ -12,7 +12,7 @@ ap.add_argument('init', metavar='INIT-SYSTEM', choices=['sysvinit', 'systemd'],
ops = ap.parse_args() ops = ap.parse_args()
if ops.init == 'sysvinit': if ops.init == 'sysvinit':
print '- sysvinit-core' print('- sysvinit-core')
elif ops.init == 'systemd': elif ops.init == 'systemd':
print '- systemd' print('- systemd')
print '- systemd-sysv' print('- systemd-sysv')

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/python3
############################################################ ############################################################
# #
# This script generates a repository skeleton for a new # This script generates a repository skeleton for a new
@@ -57,7 +57,7 @@ class NOSFile(object):
'bootconfig' : 'boot-config', 'bootconfig' : 'boot-config',
} }
clsname = map(lambda t: replacements.get(t, t), clsname) clsname = [replacements.get(t, t) for t in clsname]
return os.path.join(*clsname) return os.path.join(*clsname)
@@ -88,7 +88,7 @@ class NOSFile(object):
def write(self, stdout=False, overwrite=False, dry=False): def write(self, stdout=False, overwrite=False, dry=False):
if stdout: if stdout:
print self.etemplate print((self.etemplate))
else: else:
abspath = os.path.join(self.root, self.epath) abspath = os.path.join(self.root, self.epath)
if not os.path.isdir(os.path.dirname(abspath)): if not os.path.isdir(os.path.dirname(abspath)):
@@ -1168,6 +1168,6 @@ if __name__ == '__main__':
for obj in OBJECTS: for obj in OBJECTS:
if ops.list_files: if ops.list_files:
print "%-60s" % (obj.epath) print(("%-60s" % (obj.epath)))
if ops.write_files: if ops.write_files:
obj.write(overwrite=ops.overwrite, dry=ops.dry) obj.write(overwrite=ops.overwrite, dry=ops.dry)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# This script expects a yaml file containing the list # This script expects a yaml file containing the list
@@ -9,6 +9,7 @@
# packages necessary to support the given list # packages necessary to support the given list
# of platforms. # of platforms.
# #
import onlyaml import onlyaml
import argparse import argparse
import onlu import onlu
@@ -41,10 +42,4 @@ if not ops.no_builtins:
for p in platforms: for p in platforms:
for pattern in PATTERNS: for pattern in PATTERNS:
print "- ", pattern % dict(platform=p) print(("- {}".format(pattern % dict(platform=p))))

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
"""onlplatform.py """onlplatform.py
@@ -75,11 +75,11 @@ def extractVendor(platform, arch):
return "\n".join(l) return "\n".join(l)
if ops.key in ('kernel', 'initrd', 'dtb', 'itb',): if ops.key in ('kernel', 'initrd', 'dtb', 'itb',):
print extractKey(ops.platform, ops.arch, ops.key) print((extractKey(ops.platform, ops.arch, ops.key)))
sys.exit(0) sys.exit(0)
if ops.key == 'vendor': if ops.key == 'vendor':
print extractVendor(ops.platform, ops.arch) print((extractVendor(ops.platform, ops.arch)))
sys.exit(0) sys.exit(0)
raise SystemExit("invalid key %s" % ops.key) raise SystemExit("invalid key %s" % ops.key)

View File

@@ -1,9 +1,11 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# ONL Package Management # ONL Package Management
# #
############################################################ ############################################################
import argparse import argparse
import os import os
import sys import sys
@@ -21,7 +23,7 @@ from string import Template
import re import re
import json import json
import lsb_release import lsb_release
import cPickle as pickle import pickle as pickle
g_dist_codename = lsb_release.get_distro_information().get('CODENAME') g_dist_codename = lsb_release.get_distro_information().get('CODENAME')
@@ -173,7 +175,7 @@ class OnlPackage(object):
for d in reversed(results): for d in reversed(results):
if d: if d:
ddict.update(d) ddict.update(d)
except Exception, e: except Exception as e:
sys.stderr.write("%s\n" % e) sys.stderr.write("%s\n" % e)
sys.stderr.write("package file: %s\n" % pkg) sys.stderr.write("package file: %s\n" % pkg)
raise raise
@@ -214,7 +216,7 @@ class OnlPackage(object):
# #
# The key value precedence is package dict, common dict, default dict. # The key value precedence is package dict, common dict, default dict.
# #
self.pkg = dict(ddict.items() + cdict.items() + pdict.items()) self.pkg = dict(list(ddict.items()) + list(cdict.items()) + list(pdict.items()))
# Validate all required package keys are present and well-formed. # Validate all required package keys are present and well-formed.
if not 'external' in self.pkg: if not 'external' in self.pkg:
@@ -339,7 +341,7 @@ class OnlPackage(object):
dstpath = os.path.join(root, dst) dstpath = os.path.join(root, dst)
try: try:
os.makedirs(dstpath) os.makedirs(dstpath)
except OSError, e: except OSError as e:
if e.errno != os.errno.EEXIST: if e.errno != os.errno.EEXIST:
raise raise
shutil.copy(src, dstpath) shutil.copy(src, dstpath)
@@ -398,7 +400,7 @@ class OnlPackage(object):
if os.path.exists(src): if os.path.exists(src):
OnlPackage.copyf(src, dst, root) OnlPackage.copyf(src, dst, root)
for (link, src) in self.pkg.get('links', {}).iteritems(): for (link, src) in list(self.pkg.get('links', {}).items()):
logger.info("Linking %s -> %s..." % (link, src)) logger.info("Linking %s -> %s..." % (link, src))
# The source must be relative to the existing root directory. # The source must be relative to the existing root directory.
if link.startswith('/'): if link.startswith('/'):
@@ -622,7 +624,7 @@ class OnlPackageGroup(object):
def __str__(self): def __str__(self):
return "\n".join( self.list().keys() ) return "\n".join( list(self.list().keys()) )
def list(self): def list(self):
rv = {} rv = {}
@@ -855,7 +857,7 @@ class OnlPackageRepoUnlocked(object):
def contents(self, pkg): def contents(self, pkg):
path = self.lookup(pkg) path = self.lookup(pkg)
if path: if path:
print "** %s contents:" % path print("** %s contents:" % path)
onlu.execute(['dpkg', '-c', path]) onlu.execute(['dpkg', '-c', path])
@@ -981,7 +983,7 @@ class OnlPackageManager(object):
try: try:
self.package_groups = pickle.load(open(cache, "rb")) self.package_groups = pickle.load(open(cache, "rb"))
except Exception, e: except Exception as e:
logger.warn("The existing package cache is corrupted. It will be rebuilt.") logger.warn("The existing package cache is corrupted. It will be rebuilt.")
return False return False
@@ -1021,7 +1023,7 @@ class OnlPackageManager(object):
logger.debug(' Loaded package file %s' % os.path.join(root, f)) logger.debug(' Loaded package file %s' % os.path.join(root, f))
if pg.distcheck() and pg.buildercheck(builder_arches): if pg.distcheck() and pg.buildercheck(builder_arches):
self.package_groups.append(pg) self.package_groups.append(pg)
except OnlPackageError, e: except OnlPackageError as e:
logger.error("%s: " % e) logger.error("%s: " % e)
logger.warn("Skipping %s due to errors." % os.path.join(root, f)) logger.warn("Skipping %s due to errors." % os.path.join(root, f))
@@ -1074,7 +1076,7 @@ class OnlPackageManager(object):
try: try:
manager = submodules.OnlSubmoduleManager(root) manager = submodules.OnlSubmoduleManager(root)
manager.require(path, depth=depth, recursive=recursive) manager.require(path, depth=depth, recursive=recursive)
except submodules.OnlSubmoduleError, e: except submodules.OnlSubmoduleError as e:
raise OnlPackageError(e.value) raise OnlPackageError(e.value)
# Process prerequisite packages # Process prerequisite packages
@@ -1138,7 +1140,7 @@ class OnlPackageManager(object):
def list(self): def list(self):
rv = {} rv = {}
for pg in self.filtered_package_groups(): for pg in self.filtered_package_groups():
for (p,d) in pg.list().iteritems(): for (p,d) in list(pg.list().items()):
rv[p] = d rv[p] = d
return rv return rv
@@ -1149,7 +1151,7 @@ class OnlPackageManager(object):
TARGETS={} TARGETS={}
ARCHS={} ARCHS={}
for (p,d) in packages.iteritems(): for (p,d) in list(packages.items()):
(name,arch) = p.split(':') (name,arch) = p.split(':')
target = p.replace(':', '_') target = p.replace(':', '_')
depends = " ".join(d.get('packages', [])).replace(':', '_') depends = " ".join(d.get('packages', [])).replace(':', '_')
@@ -1185,12 +1187,12 @@ class OnlPackageManager(object):
handle.write("#\n") handle.write("#\n")
handle.write("############################################################\n") handle.write("############################################################\n")
for (t, d) in TARGETS.iteritems(): for (t, d) in list(TARGETS.items()):
handle.write("%s : %s\n" % (t, d['depends'])) handle.write("%s : %s\n" % (t, d['depends']))
handle.write("\tset -o pipefail && onlpm.py --ro-cache --require %s |& tee $(BUILDING)/$@\n" % (d['package'])) handle.write("\tset -o pipefail && onlpm.py --ro-cache --require %s |& tee $(BUILDING)/$@\n" % (d['package']))
handle.write("\tmv $(BUILDING)/$@ $(FINISHED)/\n") handle.write("\tmv $(BUILDING)/$@ $(FINISHED)/\n")
for (arch, targets) in ARCHS.iteritems(): for (arch, targets) in list(ARCHS.items()):
handle.write("############################################################\n") handle.write("############################################################\n")
handle.write("#\n") handle.write("#\n")
handle.write("# These rules represent the build stages for arch='%s'\n" % arch) handle.write("# These rules represent the build stages for arch='%s'\n" % arch)
@@ -1204,7 +1206,7 @@ class OnlPackageManager(object):
for stage in range(0, 10): for stage in range(0, 10):
handle.write("arch_%s_stage%s: %s\n\n" % (arch, stage, " ".join(STAGES.get(stage, [])))) handle.write("arch_%s_stage%s: %s\n\n" % (arch, stage, " ".join(STAGES.get(stage, []))))
for arch in ARCHS.keys(): for arch in list(ARCHS.keys()):
handle.write("arch_%s:\n" % arch) handle.write("arch_%s:\n" % arch)
for stage in range(0, 10): for stage in range(0, 10):
handle.write("\t$(MAKE) arch_%s_stage%s\n" % (arch, stage)) handle.write("\t$(MAKE) arch_%s_stage%s\n" % (arch, stage))
@@ -1237,7 +1239,7 @@ def defaultPm():
if envJson: if envJson:
for j in envJson.split(':'): for j in envJson.split(':'):
data = json.load(open(j)) data = json.load(open(j))
for (k, v) in data.iteritems(): for (k, v) in list(data.items()):
try: try:
v = v.encode('ascii') v = v.encode('ascii')
except UnicodeEncodeError: except UnicodeEncodeError:
@@ -1307,7 +1309,7 @@ if __name__ == '__main__':
if ops.include_env_json: if ops.include_env_json:
for j in ops.include_env_json.split(':'): for j in ops.include_env_json.split(':'):
data = json.load(open(j)) data = json.load(open(j))
for (k, v) in data.iteritems(): for (k, v) in list(data.items()):
try: try:
v = v.encode('ascii') v = v.encode('ascii')
except UnicodeEncodeError: except UnicodeEncodeError:
@@ -1347,7 +1349,7 @@ if __name__ == '__main__':
if ops.in_repo: if ops.in_repo:
for p in ops.in_repo: for p in ops.in_repo:
print "%s: %s" % (p, p in pm.opr) print("%s: %s" % (p, p in pm.opr))
sys.exit(0) sys.exit(0)
for pdir in ops.packagedirs: for pdir in ops.packagedirs:
@@ -1360,10 +1362,10 @@ if __name__ == '__main__':
for p in pg.packages: for p in pg.packages:
if p.tagged(ops.list_tagged): if p.tagged(ops.list_tagged):
if ops.arch in [ p.pkg['arch'], "all", None ]: if ops.arch in [ p.pkg['arch'], "all", None ]:
print "%-64s" % p.id(), print("{:<64}".format(p.id()), end=' ')
if ops.show_group: if ops.show_group:
print "[ ", pg._pkgs['__source'], "]", print("[ ", pg._pkgs['__source'], "]", end=' ')
print print()
if ops.list_platforms: if ops.list_platforms:
if not ops.arch: if not ops.arch:
@@ -1371,14 +1373,14 @@ if __name__ == '__main__':
sys.exit(1) sys.exit(1)
platforms = pm.list_platforms(ops.arch) platforms = pm.list_platforms(ops.arch)
if ops.csv: if ops.csv:
print ','.join(platforms) print(','.join(platforms))
else: else:
for p in platforms: for p in platforms:
print "%-64s" % p print("%-64s" % p)
# List all packages, no filtering # List all packages, no filtering
if ops.list_all: if ops.list_all:
print pm print(pm)
if ops.pmake: if ops.pmake:
pm.pmake() pm.pmake()
@@ -1387,10 +1389,10 @@ if __name__ == '__main__':
pm.filter(subdir = ops.subdir, arches=ops.arches) pm.filter(subdir = ops.subdir, arches=ops.arches)
if ops.list: if ops.list:
print pm print(pm)
if ops.pkg_info: if ops.pkg_info:
print pm.pkg_info() print(pm.pkg_info())
############################################################ ############################################################
@@ -1422,13 +1424,13 @@ if __name__ == '__main__':
(p, f) = ops.find_file (p, f) = ops.find_file
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing) pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
path = pm.opr.get_file(p, f) path = pm.opr.get_file(p, f)
print path print(path)
if ops.find_dir: if ops.find_dir:
(p, d) = ops.find_dir (p, d) = ops.find_dir
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing) pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
path = pm.opr.get_dir(p, d) path = pm.opr.get_dir(p, d)
print path print(path)
if ops.link_file: if ops.link_file:
for (p, f, dst) in ops.link_file: for (p, f, dst) in ops.link_file:
@@ -1481,7 +1483,7 @@ if __name__ == '__main__':
path = pm.opr.get_file(ops.platform_manifest, 'manifest.json') path = pm.opr.get_file(ops.platform_manifest, 'manifest.json')
if path: if path:
m = json.load(open(path)) m = json.load(open(path))
print " ".join(m['platforms']) print(" ".join(m['platforms']))
############################################################ ############################################################
@@ -1495,8 +1497,8 @@ if __name__ == '__main__':
if ops.lookup: if ops.lookup:
logger.debug("looking up %s", ops.lookup) logger.debug("looking up %s", ops.lookup)
for p in pm.opr.lookup_all(ops.lookup): for p in pm.opr.lookup_all(ops.lookup):
print p print(p)
except (OnlPackageError, onlyaml.OnlYamlError), e: except (OnlPackageError, onlyaml.OnlYamlError) as e:
logger.error(e) logger.error(e)
sys.exit(1) sys.exit(1)

View File

@@ -1,9 +1,10 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# ONL Root Filesystem Generator # ONL Root Filesystem Generator
# #
############################################################ ############################################################
import argparse import argparse
import os import os
import sys import sys
@@ -16,7 +17,7 @@ import fcntl
import subprocess import subprocess
import glob import glob
import submodules import submodules
import StringIO import io
from collections import Iterable from collections import Iterable
import onlyaml import onlyaml
import onlu import onlu
@@ -37,7 +38,7 @@ def onlu_execute_sudo(*args, **kwargs):
def onlu_execute_pivot_root(pivot, cmd, **kwargs): def onlu_execute_pivot_root(pivot, cmd, **kwargs):
script = "/tmp/pivot_root.sh" script = "/tmp/pivot_root.sh"
with open(script, "w") as f: with open(script, "w") as f:
os.chmod(script, 0700) os.chmod(script, 0o700)
f.write("""#!/bin/bash -eux f.write("""#!/bin/bash -eux
rm -rf /tmp/newroot && mkdir /tmp/newroot rm -rf /tmp/newroot && mkdir /tmp/newroot
rm -rf $1/oldroot && mkdir $1/oldroot rm -rf $1/oldroot && mkdir $1/oldroot
@@ -105,11 +106,11 @@ class OnlRfsSystemAdmin(object):
# Can't use the userdel command because of potential uid 0 in-user problems while running ourselves # Can't use the userdel command because of potential uid 0 in-user problems while running ourselves
for line in fileinput.input(pf, inplace=True): for line in fileinput.input(pf, inplace=True):
if not line.startswith('%s:' % username): if not line.startswith('{}:'.format(username)):
print line, print(line, end=' ')
for line in fileinput.input(sf, inplace=True): for line in fileinput.input(sf, inplace=True):
if not line.startswith('%s:' % username): if not line.startswith('{}:'.format(username)):
print line, print(line, end=' ')
self.chmod("go-wx", pf); self.chmod("go-wx", pf);
self.chmod("go-wx", sf); self.chmod("go-wx", sf);
@@ -234,9 +235,9 @@ class OnlMultistrapConfig(object):
self.localrepos = [] self.localrepos = []
def generate_handle(self, handle): def generate_handle(self, handle):
for (name, fields) in self.config.iteritems(): for (name, fields) in list(self.config.items()):
handle.write("[%s]\n" % name) handle.write("[{}]\n".format(name).encode('utf8'))
for (k,v) in fields.iteritems(): for (k,v) in list(fields.items()):
if type(v) is bool: if type(v) is bool:
v = 'true' if v == True else 'false' v = 'true' if v == True else 'false'
@@ -250,8 +251,8 @@ class OnlMultistrapConfig(object):
if k == 'packages' and type(v) is list: if k == 'packages' and type(v) is list:
raise OnlRfsError("packages=%s" % v) raise OnlRfsError("packages=%s" % v)
handle.write("%s=%s\n" % (k, v)) handle.write("{}={}\n".format(k, v).encode('utf8'))
handle.write("\n") handle.write("\n".encode('utf8'))
def generate_file(self, fname=None): def generate_file(self, fname=None):
if fname is None: if fname is None:
@@ -266,19 +267,20 @@ class OnlMultistrapConfig(object):
def get_packages(self): def get_packages(self):
pkgs = [] pkgs = []
for (name, fields) in self.config.iteritems(): for (name, fields) in list(self.config.items()):
for (k,v) in fields.iteritems(): for (k,v) in list(fields.items()):
if k == 'packages': if k == 'packages':
if type(v) is list: if type(v) is list:
print('HELLO', v)
pkgs = pkgs + list(onlu.sflatten(v)) pkgs = pkgs + list(onlu.sflatten(v))
else: else:
pkgs = pkgs + v.split() pkgs = pkgs + v.split()
return pkgs return pkgs
def __str__(self): def __str__(self):
handle = StringIO.StringIO() handle = io.BytesIO()
self.generate_handle(handle) self.generate_handle(handle)
return handle.getvalue() return handle.getvalue().decode('utf8')
@@ -311,7 +313,7 @@ class OnlRfsContext(object):
ex=OnlRfsError("Could install new resolv.conf")) ex=OnlRfsError("Could install new resolv.conf"))
return self return self
except Exception, e: except Exception as e:
logger.error("Exception %s in OnlRfsContext::__enter__" % e) logger.error("Exception %s in OnlRfsContext::__enter__" % e)
self.__exit__(None, None, None) self.__exit__(None, None, None)
raise e raise e
@@ -425,7 +427,7 @@ class OnlRfsBuilder(object):
OnlRfsSystemAdmin.chmod('1777', '%s/tmp' % dir_) OnlRfsSystemAdmin.chmod('1777', '%s/tmp' % dir_)
script = os.path.join(dir_, "tmp/configure.sh") script = os.path.join(dir_, "tmp/configure.sh")
with open(script, "w") as f: with open(script, "w") as f:
os.chmod(script, 0700) os.chmod(script, 0o700)
f.write("""#!/bin/bash -ex f.write("""#!/bin/bash -ex
/bin/echo -e "#!/bin/sh\\nexit 101" >/usr/sbin/policy-rc.d /bin/echo -e "#!/bin/sh\\nexit 101" >/usr/sbin/policy-rc.d
chmod +x /usr/sbin/policy-rc.d chmod +x /usr/sbin/policy-rc.d
@@ -510,10 +512,12 @@ rm -f /usr/sbin/policy-rc.d
ua = OnlRfsSystemAdmin(dir_) ua = OnlRfsSystemAdmin(dir_)
for (group, values) in Configure.get('groups', {}).iteritems(): for (group, values) in list(Configure.get('groups', {}).items()):
ua.groupadd(group=group, **values if values else {}) if not values:
values = {}
ua.groupadd(group=group, **values)
for (user, values) in Configure.get('users', {}).iteritems(): for (user, values) in list(Configure.get('users', {}).items()):
if user == 'root': if user == 'root':
if 'password' in values: if 'password' in values:
ua.user_password_set(user, values['password']) ua.user_password_set(user, values['password'])
@@ -568,7 +572,7 @@ rm -f /usr/sbin/policy-rc.d
for line in fileinput.input(f, inplace=True): for line in fileinput.input(f, inplace=True):
if re.match("^[123456]:.*", line): if re.match("^[123456]:.*", line):
line = "#" + line line = "#" + line
print line, print(line, end=' ')
ua.chmod('go-w', f) ua.chmod('go-w', f)
ua.chmod('go-w', os.path.dirname(f)) ua.chmod('go-w', os.path.dirname(f))
@@ -594,7 +598,7 @@ rm -f /usr/sbin/policy-rc.d
OnlRfsSystemAdmin.chmod('777', os.path.dirname(asrf)) OnlRfsSystemAdmin.chmod('777', os.path.dirname(asrf))
asro.format(os.path.join(dir_, asropts['file']), fmt=asropts['format']) asro.format(os.path.join(dir_, asropts['file']), fmt=asropts['format'])
for (mf, fields) in Configure.get('manifests', {}).iteritems(): for (mf, fields) in list(Configure.get('manifests', {}).items()):
logger.info("Configuring manifest %s..." % mf) logger.info("Configuring manifest %s..." % mf)
if mf.startswith('/'): if mf.startswith('/'):
mf = mf[1:] mf = mf[1:]
@@ -612,7 +616,7 @@ rm -f /usr/sbin/policy-rc.d
else: else:
md['platforms'] = fields['platforms'].split(',') md['platforms'] = fields['platforms'].split(',')
for (k, v) in fields.get('keys', {}).iteritems(): for (k, v) in list(fields.get('keys', {}).items()):
if k in md: if k in md:
md[k].update(v) md[k].update(v)
else: else:
@@ -625,7 +629,7 @@ rm -f /usr/sbin/policy-rc.d
for v in Configure.get('files', {}).get('link', []): for v in Configure.get('files', {}).get('link', []):
onlu_execute_sudo("ln {} {} {}/{}".format('-s' if v.get('symbolic', True) else '', v['src'], dir_, v['dst'])) onlu_execute_sudo("ln {} {} {}/{}".format('-s' if v.get('symbolic', True) else '', v['src'], dir_, v['dst']))
for (fname, v) in Configure.get('files', {}).get('add', {}).iteritems(): for (fname, v) in list(Configure.get('files', {}).get('add', {}).items()):
if fname.startswith('/'): if fname.startswith('/'):
fname = fname[1:] fname = fname[1:]
dst = os.path.join(dir_, fname) dst = os.path.join(dir_, fname)
@@ -763,7 +767,7 @@ if __name__ == '__main__':
sys.exit(0) sys.exit(0)
if ops.show_packages: if ops.show_packages:
print "\n".join(x.get_packages()) print("\n".join(x.get_packages()))
sys.exit(0) sys.exit(0)
if ops.dir is None: if ops.dir is None:
@@ -805,5 +809,5 @@ if __name__ == '__main__':
os.unlink(ops.squash) os.unlink(ops.squash)
raise OnlRfsError("Squash creation failed.") raise OnlRfsError("Squash creation failed.")
except (OnlRfsError, onlyaml.OnlYamlError), e: except (OnlRfsError, onlyaml.OnlYamlError) as e:
logger.error(e.value) logger.error(e.value)

View File

@@ -1,9 +1,11 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# Common utilities for the ONL python tools. # Common utilities for the ONL python tools.
# #
############################################################ ############################################################
import logging import logging
import subprocess import subprocess
from collections import Iterable from collections import Iterable
@@ -13,6 +15,7 @@ import fcntl
import glob import glob
from string import Template from string import Template
import time import time
from builtins import str
logger = None logger = None
@@ -91,7 +94,7 @@ class Profiler(object):
# #
def execute(args, sudo=False, chroot=None, ex=None, env=False): def execute(args, sudo=False, chroot=None, ex=None, env=False):
if isinstance(args, basestring): if isinstance(args, str):
# Must be executed through the shell # Must be executed through the shell
shell = True shell = True
else: else:
@@ -102,13 +105,13 @@ def execute(args, sudo=False, chroot=None, ex=None, env=False):
sudo = True sudo = True
if chroot: if chroot:
if isinstance(args, basestring): if isinstance(args, str):
args = "chroot %s %s" % (chroot, args) args = "chroot %s %s" % (chroot, args)
elif type(args) in (list,tuple): elif type(args) in (list,tuple):
args = ['chroot', chroot] + list(args) args = ['chroot', chroot] + list(args)
if sudo: if sudo:
if isinstance(args, basestring): if isinstance(args, str):
if env: if env:
args = "sudo -E %s" % (args) args = "sudo -E %s" % (args)
else: else:
@@ -127,7 +130,7 @@ def execute(args, sudo=False, chroot=None, ex=None, env=False):
try: try:
subprocess.check_call(args, shell=shell) subprocess.check_call(args, shell=shell)
rv = 0 rv = 0
except subprocess.CalledProcessError, e: except subprocess.CalledProcessError as e:
if ex: if ex:
raise ex raise ex
rv = e.returncode rv = e.returncode
@@ -138,7 +141,7 @@ def execute(args, sudo=False, chroot=None, ex=None, env=False):
# Flatten lists if string lists # Flatten lists if string lists
def sflatten(coll): def sflatten(coll):
for i in coll: for i in coll:
if isinstance(i, Iterable) and not isinstance(i, basestring): if isinstance(i, Iterable) and not isinstance(i, str):
for subc in sflatten(i): for subc in sflatten(i):
if subc: if subc:
yield subc yield subc
@@ -167,10 +170,10 @@ def userdel(username):
# Can't use the userdel command because of potential uid 0 in-user problems while running ourselves # Can't use the userdel command because of potential uid 0 in-user problems while running ourselves
for line in fileinput.input('/etc/passwd', inplace=True): for line in fileinput.input('/etc/passwd', inplace=True):
if not line.startswith('%s:' % username): if not line.startswith('%s:' % username):
print line, print(line, end=' ')
for line in fileinput.input('/etc/shadow', inplace=True): for line in fileinput.input('/etc/shadow', inplace=True):
if not line.startswith('%s:' % username): if not line.startswith('%s:' % username):
print line, print(line, end='')
############################################################ ############################################################
# #
@@ -246,12 +249,12 @@ def filepath(absdir, relpath, eklass, required=True):
def validate_src_dst_file_tuples(absdir, data, dstsubs, eklass, required=True): def validate_src_dst_file_tuples(absdir, data, dstsubs, eklass, required=True):
files = [] files = []
if type(data) is dict: if type(data) is dict:
for (s,d) in data.iteritems(): for (s,d) in list(data.items()):
files.append((s,d)) files.append((s,d))
elif type(data) is list: elif type(data) is list:
for e in data: for e in data:
if type(e) is dict: if type(e) is dict:
for (s,d) in e.iteritems(): for (s,d) in list(e.items()):
files.append((s,d)) files.append((s,d))
elif type(e) in [ list, tuple ]: elif type(e) in [ list, tuple ]:
if len(e) != 2: if len(e) != 2:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python2 #!/usr/bin/python3
############################################################ ############################################################
# #
# Extended YAML Support # Extended YAML Support
@@ -102,15 +102,15 @@ def loadf(fname, vard={}):
# First load: grab the variables dict # First load: grab the variables dict
string = open(fname).read() string = open(fname).read()
try: try:
data = yaml.load(string) data = yaml.load(string, Loader=yaml.FullLoader)
except Exception, e: except Exception as e:
raise OnlYamlError("%s\n(filename: %s)" % (e, fname)) raise OnlYamlError("%s\n(filename: %s)" % (e, fname))
if type(data) is dict: if type(data) is dict:
_v = dflatten({}, data.get('variables', {})) _v = dflatten({}, data.get('variables', {}))
variables.update(_v) variables.update(_v)
for (k,v) in _v.iteritems(): for (k,v) in list(_v.items()):
k = interpolate(k, variables) k = interpolate(k, variables)
v = interpolate(v, variables) v = interpolate(v, variables)
variables[k] = v variables[k] = v
@@ -125,10 +125,10 @@ def loadf(fname, vard={}):
string = interpolate(string, variables) string = interpolate(string, variables)
try: try:
data = yaml.load(string) data = yaml.load(string, Loader=yaml.FullLoader)
except OnlYamlError, e: except OnlYamlError as e:
raise e raise e
except Exception, e: except Exception as e:
raise OnlYamlError("Interpolation produced invalid results:\n%s\n" % string) raise OnlYamlError("Interpolation produced invalid results:\n%s\n" % string)
return data return data
@@ -138,10 +138,10 @@ if __name__ == '__main__':
import sys import sys
try: try:
if len(sys.argv) == 2: if len(sys.argv) == 2:
print yaml.dump(loadf(sys.argv[1])) print((yaml.dump(loadf(sys.argv[1]))))
else: else:
sys.stderr.write("usage: %s <yamlfile>\n" % sys.argv[0]) sys.stderr.write("usage: %s <yamlfile>\n" % sys.argv[0])
except OnlYamlError, e: except OnlYamlError as e:
sys.stderr.write("error: %s\n" % e.value) sys.stderr.write("error: %s\n" % e.value)

View File

@@ -78,7 +78,7 @@ if ops.kl:
out=sys.stdout out=sys.stdout
if ops.out and ops.out not in ['-', 'stdout']: if ops.out and ops.out not in ['-', 'stdout']:
print ops.out print((ops.out))
out = open(ops.out, "w") out = open(ops.out, "w")
json.dump(g_data, out, indent=ops.indent) json.dump(g_data, out, indent=ops.indent)

View File

@@ -104,7 +104,7 @@ class OnlSubmoduleManager(object):
for script in os.getenv("ONL_SUBMODULE_UPDATED_SCRIPTS", "").split(':'): for script in os.getenv("ONL_SUBMODULE_UPDATED_SCRIPTS", "").split(':'):
if os.path.exists(script): if os.path.exists(script):
try: try:
print "Calling %s..." % script print(("Calling %s..." % script))
check_call([script, path], cwd=self.root) check_call([script, path], cwd=self.root)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
raise OnlSubmoduleError("The repository post-init script %s failed." % script) raise OnlSubmoduleError("The repository post-init script %s failed." % script)
@@ -131,6 +131,6 @@ if __name__ == '__main__':
try: try:
sm = OnlSubmoduleManager(ops.root) sm = OnlSubmoduleManager(ops.root)
sm.require(ops.path) sm.require(ops.path)
except OnlSubmoduleError, e: except OnlSubmoduleError as e:
logger.error("%s" % e.value) logger.error("%s" % e.value)

View File

@@ -91,7 +91,7 @@ if swi is None:
swi = OnlSwitchImage(ops.swi, 'r') swi = OnlSwitchImage(ops.swi, 'r')
if ops.contents: if ops.contents:
print " ".join(swi.get_contents()) print((" ".join(swi.get_contents())))
if ops.platforms: if ops.platforms:
print " ".join(swi.get_platforms()) print((" ".join(swi.get_platforms())))