mirror of
https://github.com/Telecominfraproject/OpenNetworkLinux.git
synced 2025-11-02 11:18:18 +00:00
1243 lines
45 KiB
Python
Executable File
1243 lines
45 KiB
Python
Executable File
#!/usr/bin/python
|
|
############################################################
|
|
#
|
|
# ONL Package Management
|
|
#
|
|
############################################################
|
|
import argparse
|
|
import os
|
|
import sys
|
|
import logging
|
|
import yaml
|
|
import tempfile
|
|
import shutil
|
|
import pprint
|
|
import subprocess
|
|
import glob
|
|
import submodules
|
|
import onlyaml
|
|
import onlu
|
|
from string import Template
|
|
import re
|
|
import json
|
|
import lsb_release
|
|
|
|
g_dist_codename = lsb_release.get_distro_information().get('CODENAME')
|
|
|
|
logger = onlu.init_logging('onlpm')
|
|
|
|
class OnlPackageError(Exception):
|
|
"""General Package Error Exception
|
|
|
|
This class is used to communicate user-level configuration
|
|
and runtime errors related to package contents, package
|
|
building, and similar operations.
|
|
|
|
This exception should be caught at the highest level and
|
|
the error message comminicated to the user."""
|
|
|
|
def __init__(self, value):
|
|
self.value = value
|
|
|
|
def __str__(self):
|
|
return repr(self.value)
|
|
|
|
|
|
class OnlPackageMissingError(OnlPackageError):
|
|
def __init__(self, pkg):
|
|
self.value = "Package %s does not exist." % pkg
|
|
|
|
|
|
class OnlPackageMissingFileError(OnlPackageError):
|
|
def __init__(self, p, f):
|
|
self.value = "Package %s does not contain the file %s." % (p, f)
|
|
|
|
class OnlPackageMissingDirError(OnlPackageError):
|
|
def __init__(self, p, d):
|
|
self.value = "Package %s does not contain the directory %s." % (p, d)
|
|
|
|
|
|
class OnlPackage(object):
|
|
"""Individual Debian Package Builder Class
|
|
|
|
This class builds a single debian package from a package specification.
|
|
The package specification is a dictionary with the following keys:
|
|
|
|
name: The name of the package
|
|
version: The version of the package
|
|
arch: The package architecture
|
|
copyright: The copyright string or path to copyright file
|
|
changelog: The changelog string or path to changelog file
|
|
maintainer: The package maintainer address
|
|
summary: The package summary description
|
|
*desc: The package description (defaults to summary)
|
|
files: A dict containing source/dst pairs.
|
|
A src can be a file or a directory
|
|
A dst can be a file or a directory
|
|
A list containing src,dst pairs.
|
|
*depends: List of package dependencies
|
|
*docs : List of documentation files
|
|
|
|
Keys marked with an asterisk are optional. All others are required."""
|
|
|
|
# These are built-in defaults for some package keys
|
|
DEFAULTS = {
|
|
'vendor' : 'Open Network Linux',
|
|
'url' : 'http://opennetlinux.org',
|
|
'license' : 'unknown',
|
|
|
|
# Default Python Package Installation
|
|
'PY_INSTALL' : '/usr/lib/python2.7/dist-packages',
|
|
|
|
# Default Builder build directory name. Must match setup.env
|
|
'BUILD_DIR' : 'BUILD/%s' % g_dist_codename,
|
|
|
|
# Default Templates Location
|
|
'ONL_TEMPLATES' : "%s/packages/base/any/templates" % os.getenv("ONL")
|
|
|
|
}
|
|
|
|
############################################################
|
|
#
|
|
# 'pdict' is the main package dictionary.
|
|
# 'cdict' is the "common" key dictionary (optional)
|
|
# 'ddict' is the "default" key dictionary
|
|
#
|
|
# The union of 'pdict' + 'cdict' + 'ddict' must provide a complete
|
|
# package specification containing all required keys.
|
|
#
|
|
# 'dir_' is the parent directory of the package.
|
|
# File contents for the package specification may be absolute or
|
|
# relative to this directory.
|
|
#
|
|
############################################################
|
|
def __init__(self, pdict, dir_, cdict=None, ddict=None):
|
|
"""Initialize package object.
|
|
|
|
'pdict' : The main package dictionary
|
|
'cdict' : The "common" key dictionary (optional)
|
|
'ddict' : The "default" key dictionary
|
|
'dir_' : The parent directory of the package.
|
|
File contents for the package specification may be absolute or
|
|
relative to this directory.
|
|
|
|
The union of 'pdict' + 'cdict' + 'ddict' must provide a complete
|
|
package specification containing all required keys."""
|
|
|
|
# Default key value dictionary
|
|
if ddict is None:
|
|
ddict = self.DEFAULTS
|
|
|
|
# Optional common value dictionary
|
|
if cdict is None:
|
|
cdict = {}
|
|
|
|
#
|
|
# The key value precedence is package dict, common dict, default dict.
|
|
#
|
|
self.pkg = dict(ddict.items() + cdict.items() + pdict.items())
|
|
|
|
# Validate all required package keys are present and well-formed.
|
|
if not 'external' in self.pkg:
|
|
self._validate()
|
|
|
|
# Save package working directory for future build steps.
|
|
self.dir = dir_
|
|
|
|
|
|
|
|
def id(self):
|
|
"""Cannonical package identifier.
|
|
|
|
The package identifier is <basename>:<arch>
|
|
|
|
The version numbers and suffixes related to the package are ignored
|
|
for the purposes of the identifier. It is assumed that any given
|
|
set of packages used in creating a particular system will only
|
|
be at a single revision at a time."""
|
|
|
|
return self.pkg['name'] + ':' + self.pkg['arch']
|
|
|
|
def arch(self):
|
|
return self.pkg['arch']
|
|
|
|
@staticmethod
|
|
def idparse(pkgid, ex=True):
|
|
"""Parse a cannonical package identifier.
|
|
|
|
pkgid: string of the form <name>:<arch>
|
|
ex: If true, a package expection is raised if it cannot be parsed.
|
|
|
|
Returns a tuple of the form (name, arch), or (False,False)"""
|
|
try:
|
|
# (name, arch)
|
|
return tuple(pkgid.split(':'))
|
|
except ValueError:
|
|
if ex:
|
|
raise OnlPackageError("%s is not a valid package specification." % pkg)
|
|
else:
|
|
return (False,False)
|
|
|
|
|
|
def __str__(self):
|
|
"""Package key contents."""
|
|
return pprint.pformat(self.pkg)
|
|
|
|
|
|
def _validate_key(self, key, type_, required=True):
|
|
"""Validate a given key (option dependent)."""
|
|
|
|
if key not in self.pkg:
|
|
if required:
|
|
raise OnlPackageError("'%s' is required but not specified in the package file." % key)
|
|
else:
|
|
return False
|
|
|
|
if type(self.pkg[key]) is str and type_ is list:
|
|
self.pkg[key] = [ self.pkg[key] ]
|
|
|
|
if type(self.pkg[key]) in [ float , int ]:
|
|
self.pkg[key] = str(self.pkg[key])
|
|
|
|
if type(self.pkg[key]) != type_:
|
|
raise OnlPackageError("key '%s' is the wrong type (%s should be %s)" % (
|
|
key, type(pkg[key]), type_))
|
|
|
|
return True
|
|
|
|
|
|
def _validate_files(self):
|
|
"""Validate the existence of the required input files for the current package."""
|
|
self.pkg['files'] = onlu.validate_src_dst_file_tuples(self.dir,
|
|
self.pkg['files'],
|
|
dict(PKG=self.pkg['name'], PKG_INSTALL='/usr/share/onl/packages/%s/%s' % (self.pkg['arch'], self.pkg['name'])),
|
|
OnlPackageError)
|
|
def _validate(self):
|
|
"""Validate the package contents."""
|
|
|
|
self._validate_key('name', str)
|
|
self._validate_key('arch', str)
|
|
self._validate_key('copyright', str)
|
|
self._validate_key('changelog', str)
|
|
self._validate_key('version', str)
|
|
self._validate_key('maintainer', str)
|
|
self._validate_key('depends', list, False)
|
|
self._validate_key('priority', str, False)
|
|
self._validate_key('summary', str)
|
|
self._validate_key('docs', list, False)
|
|
self._validate_key('vendor', str)
|
|
self._validate_key('url', str)
|
|
self._validate_key('license', str)
|
|
|
|
if not self._validate_key('desc', str, False):
|
|
self.pkg['desc'] = self.pkg['summary']
|
|
|
|
self.pkg['description'] = "%s\n%s" % (self.pkg['summary'], self.pkg['desc'])
|
|
|
|
return True
|
|
|
|
|
|
|
|
def build(self, dir_=None):
|
|
"""Build the debian package.
|
|
|
|
When this method is called it is assumed that all file
|
|
prerequisites for the package have already been built
|
|
or met. This is simply the packaging stage.
|
|
|
|
'dir_' : This is the output directory in which the package
|
|
should be left. If unspecified the package's local directory
|
|
will contain the package file."""
|
|
|
|
|
|
if 'external' in self.pkg:
|
|
# Package file built externally
|
|
epkg = self.pkg['external']
|
|
if os.path.exists(epkg):
|
|
return epkg
|
|
else:
|
|
raise OnlPackageError("The external package file '%s' does not exist." % epkg);
|
|
|
|
|
|
|
|
# Make sure all required files exist
|
|
if 'files' in self.pkg:
|
|
self._validate_files()
|
|
|
|
# If dir_ is not specified, leave package in local package directory.
|
|
if dir_ is None:
|
|
dir_ = self.dir
|
|
|
|
workdir = tempfile.mkdtemp()
|
|
root = os.path.join(workdir, "root");
|
|
os.mkdir(root);
|
|
|
|
# The package file will be built into the workdir
|
|
self.pkg['__workdir'] = workdir
|
|
|
|
for (src,dst) in self.pkg.get('files', {}):
|
|
|
|
if dst.startswith('/'):
|
|
dst = dst[1:]
|
|
|
|
if os.path.isdir(src):
|
|
#
|
|
# Copy entire src directory to target directory
|
|
#
|
|
dstpath = os.path.join(root, dst)
|
|
logger.debug("Copytree %s -> %s" % (src, dstpath))
|
|
shutil.copytree(src, dstpath)
|
|
else:
|
|
#
|
|
# If the destination ends in a '/' it means copy the filename
|
|
# as-is to that directory.
|
|
#
|
|
# If not, its a full rename to the destination.
|
|
#
|
|
if dst.endswith('/'):
|
|
dstpath = os.path.join(root, dst)
|
|
if not os.path.exists(dstpath):
|
|
os.makedirs(dstpath)
|
|
shutil.copy(src, dstpath)
|
|
else:
|
|
dstpath = os.path.join(root, os.path.dirname(dst))
|
|
if not os.path.exists(dstpath):
|
|
os.makedirs(dstpath)
|
|
shutil.copyfile(src, os.path.join(root, dst))
|
|
shutil.copymode(src, os.path.join(root, dst))
|
|
|
|
|
|
for (link,src) in self.pkg.get('links', {}).iteritems():
|
|
logger.info("Linking %s -> %s..." % (link, src))
|
|
link = os.path.join(root, link)
|
|
os.symlink(src, link)
|
|
|
|
#
|
|
# FPM doesn't seem to have a doc option so we copy documentation
|
|
# files directly into place.
|
|
#
|
|
for src in self.pkg.get('docs', []):
|
|
if not os.path.exists(src):
|
|
raise OnlPackageError("Documentation source file '%s' does not exist." % src)
|
|
|
|
dstpath = os.path.join(root, "usr/share/doc/%(name)s" % self.pkg)
|
|
if not os.path.exists(dstpath):
|
|
os.makedirs(dstpath)
|
|
shutil.copy(src, dstpath)
|
|
|
|
changelog = os.path.join(workdir, 'changelog')
|
|
copyright_ = os.path.join(workdir, 'copyright')
|
|
|
|
#
|
|
# Export changelog and copyright files from the PKG dict
|
|
# to the workdir for processing.
|
|
#
|
|
# The copyright and changelog data can be embedded directly
|
|
# int the PKG file or kept as separate files.
|
|
#
|
|
|
|
def copy_str_or_file(src, dst):
|
|
if os.path.exists(src):
|
|
shutil.copyfile(src, dst)
|
|
else:
|
|
with open(dst, "w") as f:
|
|
f.write(src)
|
|
f.write("\n")
|
|
|
|
copy_str_or_file(self.pkg['copyright'], copyright_)
|
|
copy_str_or_file(self.pkg['changelog'], changelog)
|
|
|
|
|
|
############################################################
|
|
#
|
|
# Invoke fpm with all necessary options.
|
|
#
|
|
############################################################
|
|
self.pkg['__root'] = root
|
|
|
|
command = """fpm -p %(__workdir)s -f -C %(__root)s -s dir -t deb -n %(name)s -v %(version)s -a %(arch)s -m %(maintainer)s --description "%(description)s" --url "%(url)s" --license "%(license)s" --vendor "%(vendor)s" """ % self.pkg
|
|
|
|
for dep in self.pkg.get('depends', []):
|
|
command = command + "-d %s " % dep
|
|
|
|
for provides in onlu.sflatten(self.pkg.get('provides', [])):
|
|
command = command + "--provides %s " % provides
|
|
|
|
if 'init' in self.pkg:
|
|
if not os.path.exists(self.pkg['init']):
|
|
raise OnlPackageError("Init script '%s' does not exist." % self.pkg['init'])
|
|
command = command + "--deb-init %s" % self.pkg['init']
|
|
|
|
if 'post-install' in self.pkg:
|
|
if not os.path.exists(self.pkg['post-install']):
|
|
raise OnlPackageError("Post-install script '%s' does not exist." % self.pkg['post-install'])
|
|
command = command + "--after-install %s" % self.pkg['post-install']
|
|
|
|
onlu.execute(command)
|
|
|
|
# Grab the package from the workdir. There can be only one.
|
|
sys.stdout.write(workdir)
|
|
files = glob.glob(os.path.join(workdir, '*.deb'))
|
|
if len(files) == 0:
|
|
raise OnlPackageError("No debian package.")
|
|
elif len(files) > 1:
|
|
raise OnlPackageError("Too many packages.")
|
|
else:
|
|
# Move to the target directory
|
|
shutil.copy(files[0], dir_)
|
|
|
|
# Remove entire work directory.
|
|
shutil.rmtree(workdir)
|
|
|
|
# Return the path to the final built package
|
|
return os.path.join(dir_, os.path.basename(files[0]))
|
|
|
|
|
|
def tagged(self, tag):
|
|
return tag in self.pkg.get('tags',[])
|
|
|
|
class OnlPackageGroup(object):
|
|
"""Debian Package Group
|
|
|
|
A Package Group is defined in a Package Group File. It contains common
|
|
package settings, multiple package declarations, and a common build step.
|
|
|
|
All packages are built through their parent PackageGroup object.
|
|
"""
|
|
|
|
def __init__(self):
|
|
self.filtered = False
|
|
|
|
def archcheck(self, arches):
|
|
if arches is None:
|
|
return True
|
|
else:
|
|
for p in self.packages:
|
|
if p.arch() not in arches:
|
|
return False
|
|
return True
|
|
|
|
def prerequisite_packages(self):
|
|
return list(onlu.sflatten(self._pkgs.get('prerequisites', {}).get('packages', [])))
|
|
|
|
def prerequisite_submodules(self):
|
|
return self._pkgs.get('prerequisites', {}).get('submodules', [])
|
|
|
|
def prerequisites(self):
|
|
return self._pkgs.get('prerequisites', {})
|
|
|
|
def load(self, pkg):
|
|
if not os.path.exists(pkg):
|
|
raise OnlPackageError("Package file '%s' does not exist." % pkg)
|
|
|
|
pkg_data = onlyaml.loadf(pkg, OnlPackage.DEFAULTS)
|
|
|
|
pkglist = []
|
|
|
|
#
|
|
# Package declarations are dicts.
|
|
#
|
|
if(type(pkg_data) is not dict):
|
|
raise OnlPackageError("The package file '%s' is empty or malformed." % pkg)
|
|
|
|
if 'packages' not in pkg_data:
|
|
raise OnlPackageError("The package file '%s' does not contain a packages declaration." % pkg)
|
|
|
|
if type(pkg_data['packages']) is not list:
|
|
raise OnlPackageError("The packages declaration is not a list.")
|
|
|
|
self.packages = []
|
|
for p in pkg_data['packages']:
|
|
self.packages.append(OnlPackage(p, os.path.dirname(pkg),
|
|
pkg_data.get('common', None),
|
|
None))
|
|
|
|
# This is used for the pkg_info dump
|
|
self._pkg_info = pkg_data.copy()
|
|
self._pkgs = pkg_data
|
|
self._pkgs['__source'] = os.path.abspath(pkg)
|
|
self._pkgs['__directory'] = os.path.dirname(self._pkgs['__source'])
|
|
self._pkgs['__mtime'] = os.path.getmtime(pkg)
|
|
|
|
def reload(self):
|
|
"""Reload our package file if it has changed."""
|
|
|
|
if ( (self._pkgs['__mtime'] != os.path.getmtime(self._pkgs['__source'])) or
|
|
(self._pkgs.get('reload', False)) ):
|
|
logger.debug("Reloading updated package file %s..." % self._pkgs['__source'])
|
|
self.load(self._pkgs['__source'])
|
|
|
|
|
|
def __str__(self):
|
|
return "\n".join( self.list().keys() )
|
|
|
|
def list(self):
|
|
rv = {}
|
|
lst = [ p.id() for p in self.packages ]
|
|
for p in self.packages:
|
|
rv[p.id()] = self.prerequisites()
|
|
return rv
|
|
|
|
def pkg_info(self):
|
|
return ("** Package Group: %s\n" % self._pkgs['__source'] +
|
|
yaml.dump(self._pkg_info, default_flow_style = False) +
|
|
"\n")
|
|
|
|
def __contains__(self, pkg):
|
|
"""The current package group contains the given package id."""
|
|
for p in self.packages:
|
|
if pkg == 'all' or p.id() == pkg:
|
|
return True
|
|
return False
|
|
|
|
def is_child(self, dir_):
|
|
return self._pkgs['__directory'].startswith(dir_)
|
|
|
|
|
|
def gmake_locked(self, target, operation):
|
|
|
|
buildpaths = []
|
|
if self._pkgs.get('build', True) and not os.environ.get('NOBUILD', False):
|
|
buildpaths = [
|
|
os.path.join(self._pkgs['__directory'], 'builds'),
|
|
os.path.join(self._pkgs['__directory'], 'BUILDS'),
|
|
]
|
|
|
|
for bp in buildpaths:
|
|
if os.path.exists(bp):
|
|
MAKE = os.environ.get('MAKE', "make")
|
|
cmd = MAKE + ' -C ' + bp + " " + os.environ.get('ONLPM_MAKE_OPTIONS', "") + target
|
|
onlu.execute(cmd,
|
|
ex=OnlPackageError('%s failed.' % operation))
|
|
|
|
def build(self, dir_=None):
|
|
"""Build all packages in the current group.
|
|
|
|
dir_ : The output directory for the package group.
|
|
The default is the package group parent directory.
|
|
|
|
The option to build individual packages is not provided.
|
|
The assumption is that the packages defined in the group are
|
|
related and should always be built together.
|
|
|
|
It is also assumed that all packages in the group have a common
|
|
build step. That build step is performed once, and all packages
|
|
are then built from the artifacts as defined in the package
|
|
specifications.
|
|
|
|
This assures there are not mismatches in the contents of packages
|
|
from the same group and that there are no unecessary invocations of
|
|
the build steps.
|
|
"""
|
|
|
|
products = []
|
|
|
|
with onlu.Lock(os.path.join(self._pkgs['__directory'], '.lock')):
|
|
self.gmake_locked("", 'Build')
|
|
for p in self.packages:
|
|
products.append(p.build(dir_=dir_))
|
|
|
|
|
|
if 'release' in self._pkgs:
|
|
release_list = onlu.validate_src_dst_file_tuples(self._pkgs['__directory'],
|
|
self._pkgs['release'],
|
|
dict(),
|
|
OnlPackageError)
|
|
for f in release_list:
|
|
# Todo -- customize
|
|
dst = os.path.join(os.getenv('ONL'), 'RELEASE', g_dist_codename, f[1])
|
|
if not os.path.exists(dst):
|
|
os.makedirs(dst)
|
|
logger.info("Releasing %s -> %s" % (os.path.basename(f[0]), dst))
|
|
shutil.copy(f[0], dst)
|
|
|
|
return products
|
|
|
|
def clean(self, dir_=None):
|
|
with onlu.Lock(os.path.join(self._pkgs['__directory'], '.lock')):
|
|
self.gmake_locked("clean", 'Clean')
|
|
|
|
|
|
class OnlPackageRepo(object):
|
|
"""Package Repository and Interchange Class
|
|
|
|
This class implements access to a single package repository.
|
|
Packages can be:
|
|
1. Installed in the repository.
|
|
2. Looked up in the repository.
|
|
3. Extracted from the repository.
|
|
4. Extracted into a local cache whose file contents
|
|
can be used by other packages with dependencies on those
|
|
contents."""
|
|
|
|
def __init__(self, root, packagedir='packages'):
|
|
"""Initialize a repo object.
|
|
|
|
root : The root directory that should be used for this repository."""
|
|
|
|
root = os.path.join(root, g_dist_codename)
|
|
|
|
if not os.path.exists(root):
|
|
os.makedirs(root)
|
|
|
|
# The package subdirectory goes here
|
|
self.repo = os.path.join(root, packagedir)
|
|
|
|
# The extract cache goes here
|
|
self.extracts = os.path.join(root, 'extracts')
|
|
|
|
# All access to the repository is locked
|
|
self.lock = onlu.Lock(os.path.join(root, '.lock'))
|
|
|
|
def add_packages(self, pkglist):
|
|
"""Add a package or list of packages to the repository."""
|
|
with self.lock:
|
|
for p in pkglist if type(pkglist) is list else [ pkglist ]:
|
|
if not os.path.exists(p):
|
|
raise OnlPackageError("Package file '%s' does not exist." % p)
|
|
logger.info("adding package '%s'..." % p)
|
|
underscores = p.split('_')
|
|
# Package name is the first entry
|
|
package = os.path.split(underscores[0])[1]
|
|
# Architecture is the last entry (.deb)
|
|
arch = underscores[-1].split('.')[0]
|
|
logger.debug("+ /bin/cp %s %s/%s", p, self.repo, arch)
|
|
dstdir = os.path.join(self.repo, arch)
|
|
if not os.path.exists(dstdir):
|
|
os.makedirs(dstdir)
|
|
logger.info("dstdir=%s"% dstdir)
|
|
|
|
# Remove any existing versions of this package.
|
|
for existing in glob.glob(os.path.join(dstdir, "%s_*.deb" % package)):
|
|
logger.debug("Removing existing package %s" % existing)
|
|
os.unlink(existing)
|
|
|
|
shutil.copy(p, dstdir)
|
|
extract_dir = os.path.join(self.extracts, arch, package)
|
|
if os.path.exists(extract_dir):
|
|
# Make sure the package gets re-extracted the next time it's requested by clearing any existing extract in the cache.
|
|
logger.info("removed previous extract directory %s...", extract_dir)
|
|
logger.debug("+ /bin/rm -fr %s", extract_dir)
|
|
shutil.rmtree(extract_dir)
|
|
|
|
def remove_packages(self, pkglist):
|
|
with self.lock:
|
|
for p in pkglist if type(pkglist) is list else [ pkglist ]:
|
|
path = self.lookup(p)
|
|
if path:
|
|
logger.info("removing package %s..." % p)
|
|
os.unlink(path)
|
|
|
|
def lookup_all(self, pkg):
|
|
"""Lookup all packages in the repo matching the given package identifier."""
|
|
with self.lock:
|
|
rv = []
|
|
(name, arch) = OnlPackage.idparse(pkg)
|
|
dirname = os.path.join(self.repo, arch)
|
|
if os.path.exists(dirname):
|
|
manifest = os.listdir(dirname)
|
|
rv = [ os.path.join(dirname, x) for x in manifest if arch in x and "%s_" % name in x ]
|
|
return rv
|
|
|
|
def __contains__(self, pkg):
|
|
r = self.lookup_all(pkg)
|
|
return len(r) != 0
|
|
|
|
def lookup(self, pkg, ex=False):
|
|
"""Lookup a package in the repo. The package must be unique and exist."""
|
|
r = self.lookup_all(pkg)
|
|
if len(r) == 0:
|
|
if ex:
|
|
raise OnlPackageError("Package %s is not in the repository." % pkg)
|
|
return False
|
|
elif len(r) > 1:
|
|
if ex:
|
|
raise OnlPackageError("Multiple matches for package identifier '%s': %s" % (pkg, r))
|
|
return False
|
|
else:
|
|
return r[0]
|
|
|
|
def extract(self, pkg, dstdir=None, prefix=True, force=False, remove_ts=False, sudo=False):
|
|
"""Extract the given package.
|
|
|
|
pkg : The package identifier. Must be unique in the repo.
|
|
dstdir : The parent directory which will contain the extracted package contents.
|
|
The default is the local repo's extract cache.
|
|
force: If True, the package will be extracted even if its contents are already valid in the extract cache."""
|
|
|
|
PKG_TIMESTAMP = '.PKG.TIMESTAMP'
|
|
|
|
with self.lock:
|
|
path = self.lookup(pkg)
|
|
if path:
|
|
|
|
if dstdir is None:
|
|
dstdir = self.extracts
|
|
|
|
if prefix:
|
|
edir = os.path.join(dstdir, pkg.replace(':', '_'))
|
|
else:
|
|
edir = dstdir
|
|
|
|
if not force and os.path.exists(os.path.join(edir, PKG_TIMESTAMP)):
|
|
if (os.path.getmtime(os.path.join(edir, PKG_TIMESTAMP)) ==
|
|
os.path.getmtime(path)):
|
|
# Existing extract is identical to source package
|
|
logger.debug("Existing extract for %s matches the package file." % pkg)
|
|
else:
|
|
# Existing extract must be removed.
|
|
logger.warn("Existing extract for %s does not match." % pkg)
|
|
force=True
|
|
else:
|
|
# Status unknown. Really shouldn't happen.
|
|
force=True
|
|
|
|
if force:
|
|
if os.path.exists(edir) and prefix:
|
|
shutil.rmtree(edir)
|
|
if not os.path.exists(edir):
|
|
os.makedirs(edir)
|
|
|
|
onlu.execute([ 'dpkg', '-x', path, edir ], sudo=sudo)
|
|
onlu.execute([ 'touch', '-r', path, os.path.join(edir, PKG_TIMESTAMP) ], sudo=sudo)
|
|
|
|
if remove_ts and os.path.exists(os.path.join(edir, PKG_TIMESTAMP)):
|
|
onlu.execute([ 'rm', os.path.join(edir, PKG_TIMESTAMP) ], sudo=sudo)
|
|
|
|
return edir
|
|
|
|
return False
|
|
|
|
def contents(self, pkg):
|
|
with self.lock:
|
|
path = self.lookup(pkg)
|
|
if path:
|
|
print "** %s contents:" % path
|
|
onlu.execute(['dpkg', '-c', path])
|
|
|
|
|
|
def get_file(self, pkg, filename, force=False, ex=True):
|
|
"""Get a file contained in the given package.
|
|
|
|
The package will be extracted (if necessary) into the extract cache
|
|
and the path to the requested file will be returned.
|
|
|
|
force: Passed to extract() as the force option."""
|
|
|
|
edir = self.extract(pkg, force=force)
|
|
for root, dirs, files in os.walk(edir):
|
|
for file_ in files:
|
|
if file_ == filename:
|
|
return os.path.join(root, file_)
|
|
|
|
if ex:
|
|
raise OnlPackageMissingFileError(pkg, filename)
|
|
|
|
return None
|
|
|
|
|
|
def get_dir(self, pkg, dirname, force=False, ex=True):
|
|
"""Get a directory contained in the given package.
|
|
|
|
The package will be extracted (if necessary) into the extract cache
|
|
and the path to the requested directory will be returned.
|
|
|
|
force: Passed to extract() as the force option."""
|
|
|
|
edir = self.extract(pkg, force=force)
|
|
for root, dirs, files in os.walk(edir):
|
|
if os.path.basename(root) == dirname and root != edir:
|
|
return root
|
|
|
|
if ex:
|
|
raise OnlPackageMissingDirError(pkg, dirname)
|
|
|
|
return None
|
|
|
|
class OnlPackageManager(object):
|
|
|
|
def __init__(self):
|
|
# Stores all loaded package groups.
|
|
self.package_groups = []
|
|
self.opr = None
|
|
|
|
def set_repo(self, repodir):
|
|
self.opr = OnlPackageRepo(repodir, ops.repo_package_dir)
|
|
|
|
|
|
def filter(self, subdir=None, arches=None, substr=None):
|
|
|
|
for pg in self.package_groups:
|
|
if subdir and not pg.is_child(subdir):
|
|
pg.filtered = True
|
|
if not pg.archcheck(arches):
|
|
pg.filtered = True
|
|
|
|
|
|
def load(self, basedir, usecache=True, rebuildcache=False):
|
|
pkgspec = [ 'PKG.yml', 'pkg.yml' ]
|
|
|
|
import cPickle as pickle
|
|
CACHE=os.path.join(basedir, '.PKGs.cache')
|
|
|
|
# Lock the CACHE file
|
|
with onlu.Lock(CACHE + ".lock"):
|
|
if usecache:
|
|
if os.path.exists(CACHE):
|
|
if rebuildcache:
|
|
logger.debug("Removing package cache %s" % CACHE)
|
|
os.unlink(CACHE)
|
|
else:
|
|
logger.debug("Loading from package cache %s" % CACHE)
|
|
self.package_groups = pickle.load(open(CACHE, "rb"))
|
|
|
|
# Validate and update the cache
|
|
for pg in self.package_groups:
|
|
pg.reload()
|
|
|
|
# Update cache and return
|
|
pickle.dump(self.package_groups, open(CACHE, "wb"))
|
|
return
|
|
else:
|
|
if os.path.exists(CACHE):
|
|
logger.debug("Removing package cache %s" % CACHE)
|
|
os.unlink(CACHE)
|
|
|
|
|
|
for root, dirs, files in os.walk(basedir):
|
|
for f in files:
|
|
if f in pkgspec:
|
|
if "%s.disabled" % f in files:
|
|
logger.warn("Skipping %s due to .disabled file)." % os.path.join(root, f))
|
|
else:
|
|
pg = OnlPackageGroup()
|
|
try:
|
|
logger.debug('Loading package file %s...' % os.path.join(root, f))
|
|
pg.load(os.path.join(root, f))
|
|
logger.debug(' Loaded package file %s' % os.path.join(root, f))
|
|
self.package_groups.append(pg)
|
|
except OnlPackageError, e:
|
|
logger.error("%s: " % e)
|
|
logger.warn("Skipping %s due to errors." % os.path.join(root, f))
|
|
|
|
if usecache:
|
|
# Write the package cache
|
|
logger.debug("Writing the package cache %s..." % CACHE)
|
|
pickle.dump(self.package_groups, open(CACHE, "wb"))
|
|
|
|
|
|
def __contains__(self, pkg):
|
|
for pg in self.package_groups:
|
|
if pkg == 'all' or pkg in pg:
|
|
return True
|
|
return False
|
|
|
|
def build(self, pkg=None, dir_=None, filtered=True, prereqs_only=False):
|
|
|
|
built = False
|
|
|
|
for pg in self.package_groups:
|
|
if pkg is None or pkg in pg:
|
|
|
|
if filtered and pg.filtered:
|
|
continue
|
|
|
|
if not prereqs_only:
|
|
#
|
|
# Process prerequisite submodules.
|
|
# Only due this if we are building the actual package,
|
|
# not processing the package dependencies.
|
|
#
|
|
for sub in pg.prerequisite_submodules():
|
|
root = sub.get('root', None)
|
|
path = sub.get('path', None)
|
|
depth = sub.get('depth', None)
|
|
recursive = sub.get('recursive', None)
|
|
|
|
if not root:
|
|
raise OnlPackageError("Submodule prerequisite in package %s does not have a root key." % pkg)
|
|
|
|
if not path:
|
|
raise OnlPackageError("Submodule prerequisite in package %s does not have a path key." % pkg)
|
|
|
|
try:
|
|
manager = submodules.OnlSubmoduleManager(root)
|
|
manager.require(path, depth=depth, recursive=recursive)
|
|
except submodules.OnlSubmoduleError, e:
|
|
raise OnlPackageError(e.value)
|
|
|
|
# Process prerequisite packages
|
|
for pr in pg.prerequisite_packages():
|
|
logger.info("Requiring prerequisite package %s..." % pr)
|
|
self.require(pr, build_missing=True)
|
|
|
|
if not prereqs_only:
|
|
# Build package
|
|
products = pg.build(dir_=dir_)
|
|
if self.opr:
|
|
# Add results to our repo
|
|
self.opr.add_packages(products)
|
|
|
|
built = True
|
|
|
|
if not built:
|
|
raise OnlPackageMissingError(pkg)
|
|
|
|
def clean(self, pkg=None, dir_=None):
|
|
for pg in self.package_groups:
|
|
if pkg is None or pkg in pg:
|
|
products = pg.clean(dir_=dir_)
|
|
if self.opr:
|
|
# Remove results from our repo
|
|
self.opr.remove_packages(products)
|
|
|
|
def require(self, pkg, force=False, build_missing=False, skip_missing=False,
|
|
try_arches=None):
|
|
|
|
if pkg not in self and try_arches:
|
|
for a in try_arches:
|
|
p = "%s:%s" % (pkg, a)
|
|
if p in self:
|
|
pkg = p;
|
|
break
|
|
|
|
if pkg not in self:
|
|
if skip_missing:
|
|
return
|
|
# Can't require a package we don't have
|
|
raise OnlPackageMissingError(pkg)
|
|
|
|
if force or (pkg not in self.opr and build_missing):
|
|
logger.info("Rebuilding %s... " % pkg)
|
|
self.build(pkg, filtered=False)
|
|
else:
|
|
if pkg in self.opr:
|
|
self.build(pkg, filtered=False, prereqs_only=True)
|
|
|
|
if pkg not in self.opr:
|
|
raise OnlPackageError("Package %s is required but has not been built." % pkg)
|
|
|
|
|
|
def __str__(self):
|
|
return "\n".join(self.list())
|
|
|
|
def filtered_package_groups(self):
|
|
return [ pg for pg in self.package_groups if not pg.filtered ]
|
|
|
|
def list(self):
|
|
rv = {}
|
|
for pg in self.filtered_package_groups():
|
|
for (p,d) in pg.list().iteritems():
|
|
rv[p] = d
|
|
return rv
|
|
|
|
def pmake(self, handle=sys.stdout):
|
|
packages = self.list()
|
|
|
|
# Collect some dependency data
|
|
TARGETS={}
|
|
ARCHS={}
|
|
|
|
for (p,d) in packages.iteritems():
|
|
(name,arch) = p.split(':')
|
|
target = p.replace(':', '_')
|
|
depends = " ".join(d.get('packages', [])).replace(':', '_')
|
|
|
|
TARGETS[target] = TARGETS.get(target, {})
|
|
TARGETS[target][arch] = arch
|
|
TARGETS[target]['name'] = target
|
|
TARGETS[target]['depends'] = depends
|
|
TARGETS[target]['package'] = p
|
|
ARCHS[arch] = ARCHS.get(arch, [])
|
|
ARCHS[arch].append(TARGETS[target])
|
|
|
|
if d.get('broken', False):
|
|
TARGETS[target]['stage'] = 20
|
|
elif len(depends) == 0:
|
|
TARGETS[target]['stage'] = 0
|
|
else:
|
|
TARGETS[target]['stage'] = 1
|
|
|
|
|
|
handle.write("############################################################\n")
|
|
handle.write("#\n")
|
|
handle.write("# These are the rules that build each individual package.\n")
|
|
handle.write("#\n")
|
|
handle.write("############################################################\n")
|
|
for (t, d) in TARGETS.iteritems():
|
|
handle.write("%s : %s\n" % (t, d['depends']))
|
|
handle.write("\ttouch building/%s\n" % t)
|
|
handle.write("\tonlpm.py --require %s\n" % d['package'])
|
|
handle.write("\tmv building/%s finished/\n" % (t))
|
|
|
|
for (arch, targets) in ARCHS.iteritems():
|
|
handle.write("############################################################\n")
|
|
handle.write("#\n")
|
|
handle.write("# These rules represent the build stages for arch='%s'\n" % arch)
|
|
handle.write("#\n")
|
|
handle.write("############################################################\n")
|
|
STAGES = {}
|
|
for t in targets:
|
|
STAGES[t['stage']] = STAGES.get(t['stage'], [])
|
|
STAGES[t['stage']].append(t['name'])
|
|
|
|
for stage in range(0, 10):
|
|
handle.write("arch_%s_stage%s: %s\n\n" % (arch, stage, " ".join(STAGES.get(stage, []))))
|
|
|
|
for arch in ARCHS.keys():
|
|
handle.write("arch_%s:\n" % arch)
|
|
for stage in range(0, 10):
|
|
handle.write("\t$(MAKE) arch_%s_stage%s\n" % (arch, stage))
|
|
handle.write("\n")
|
|
|
|
|
|
|
|
def pkg_info(self):
|
|
return "\n".join([ pg.pkg_info() for pg in self.package_groups if not pg.filtered ])
|
|
|
|
if __name__ == '__main__':
|
|
|
|
ap = argparse.ArgumentParser("onlpm")
|
|
ap.add_argument("--repo", default=os.environ.get('ONLPM_OPTION_REPO', None))
|
|
ap.add_argument("--repo-package-dir", default=os.environ.get('ONLPM_OPTION_REPO_PACKAGE_DIR', 'packages'))
|
|
ap.add_argument("--packagedirs", nargs='+', metavar='PACKAGEDIR')
|
|
ap.add_argument("--subdir", default=os.getcwd())
|
|
ap.add_argument("--extract", metavar='PACKAGE')
|
|
ap.add_argument("--extract-dir", nargs=2, metavar=('PACKAGE', 'DIR'), action='append')
|
|
ap.add_argument("--force", action='store_true')
|
|
ap.add_argument("--list", action='store_true');
|
|
ap.add_argument("--list-all", action='store_true')
|
|
ap.add_argument("--list-tagged")
|
|
ap.add_argument("--list-platforms", action='store_true')
|
|
ap.add_argument("--csv", action='store_true')
|
|
ap.add_argument("--show-group", action='store_true')
|
|
ap.add_argument("--arch")
|
|
ap.add_argument("--arches", nargs='+', default=['amd64', 'powerpc', 'all']),
|
|
ap.add_argument("--pmake", action='store_true')
|
|
ap.add_argument("--prereq-packages", action='store_true')
|
|
ap.add_argument("--lookup", metavar='PACKAGE')
|
|
ap.add_argument("--find-file", nargs=2, metavar=('PACKAGE', 'FILE'))
|
|
ap.add_argument("--find-dir", nargs=2, metavar=('PACKAGE', 'DIR'))
|
|
ap.add_argument("--link-file", nargs=3, metavar=('PACKAGE', 'FILE', 'DST'), action='append')
|
|
ap.add_argument("--link-dir", nargs=3, metavar=('PACKAGE', 'FILE', 'DST'), action='append')
|
|
ap.add_argument("--copy-file", nargs=3, metavar=('PACKAGE', 'FILE', 'DST'), action='append')
|
|
ap.add_argument("--build", nargs='+', metavar='PACKAGE')
|
|
ap.add_argument("--clean", nargs='+', metavar='PACKAGE')
|
|
ap.add_argument("--require", nargs='+', metavar='PACKAGE')
|
|
ap.add_argument("--no-build-missing", action='store_true')
|
|
ap.add_argument("--contents", nargs='+', metavar='PACKAGE')
|
|
ap.add_argument("--delete", nargs='+', metavar='PACKAGE')
|
|
ap.add_argument("--sudo", action='store_true')
|
|
ap.add_argument("--verbose", action='store_true')
|
|
ap.add_argument("--quiet", action='store_true')
|
|
ap.add_argument("--rebuild-pkg-cache", action='store_true', default=os.environ.get('ONLPM_OPTION_REBUILD_PKG_CACHE', False))
|
|
ap.add_argument("--no-pkg-cache", action='store_true', default=os.environ.get('ONLPM_OPTION_NO_PKG_CACHE', False))
|
|
ap.add_argument("--pkg-info", action='store_true')
|
|
ap.add_argument("--skip-missing", action='store_true')
|
|
ap.add_argument("--try-arches", nargs='+', metavar='ARCH')
|
|
ap.add_argument("--in-repo", nargs='+', metavar='PACKAGE')
|
|
ap.add_argument("--include-env-json", default=os.environ.get('ONLPM_OPTION_INCLUDE_ENV_JSON', None))
|
|
ap.add_argument("--platform-manifest", metavar=('PACKAGE'))
|
|
|
|
ops = ap.parse_args()
|
|
|
|
archlist = []
|
|
for a in ops.arches:
|
|
al = a.split(',')
|
|
archlist = archlist + al
|
|
ops.arches = archlist
|
|
|
|
if ops.include_env_json:
|
|
for j in ops.include_env_json.split(':'):
|
|
data = json.load(open(j))
|
|
for (k, v) in data.iteritems():
|
|
os.environ[k] = v
|
|
|
|
#
|
|
# The packagedirs and repo options can be inherited from the environment
|
|
# to make it easier to customize the settings for a given project.
|
|
#
|
|
# If these options are none it means neither was specified on the
|
|
# commandline or environment.
|
|
#
|
|
if not ops.packagedirs:
|
|
if 'ONLPM_OPTION_PACKAGEDIRS' not in os.environ:
|
|
logger.error("No packagedirs specified. Please use the --packagedirs option or set ONLPM_OPTION_PACKAGEDIRS in the environment.")
|
|
sys.exit(1)
|
|
ops.packagedirs = os.environ['ONLPM_OPTION_PACKAGEDIRS'].split(':')
|
|
|
|
if not ops.repo:
|
|
logger.error("No repo directory specified. Please use the --repo option or set ONLPM_OPTION_REPO in the environment.")
|
|
sys.exit(1)
|
|
|
|
|
|
logger.setLevel(logging.INFO)
|
|
if ops.quiet:
|
|
logger.setLevel(logging.ERROR)
|
|
if ops.verbose or os.environ.get('ONLPM_VERBOSE', None):
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
try:
|
|
|
|
pm = OnlPackageManager()
|
|
if ops.repo:
|
|
logger.debug("Setting repo as '%s'..." % ops.repo)
|
|
pm.set_repo(ops.repo)
|
|
|
|
if ops.in_repo:
|
|
for p in ops.in_repo:
|
|
print "%s: %s" % (p, p in pm.opr)
|
|
sys.exit(0)
|
|
|
|
for pdir in ops.packagedirs:
|
|
logger.debug("Loading package dir %s..." % pdir)
|
|
pm.load(pdir, usecache=not ops.no_pkg_cache, rebuildcache=ops.rebuild_pkg_cache)
|
|
logger.debug(" Loaded package dir %s" % pdir)
|
|
|
|
if ops.list_tagged:
|
|
for pg in pm.package_groups:
|
|
for p in pg.packages:
|
|
if p.tagged(ops.list_tagged):
|
|
if ops.arch in [ p.pkg['arch'], "all", None ]:
|
|
print "%-64s" % p.id(),
|
|
if ops.show_group:
|
|
print "[ ", pg._pkgs['__source'], "]",
|
|
print
|
|
|
|
if ops.list_platforms:
|
|
platforms = []
|
|
for pg in pm.package_groups:
|
|
for p in pg.packages:
|
|
(name, arch) = OnlPackage.idparse(p.id())
|
|
m = re.match(r'onl-platform-config-(?P<platform>.*)', name)
|
|
if m:
|
|
if ops.arch in [ arch, "all", None ]:
|
|
platforms.append(m.groups('platform')[0])
|
|
|
|
if ops.csv:
|
|
print ','.join(platforms)
|
|
else:
|
|
for p in platforms:
|
|
print "%-64s" % p
|
|
|
|
# List all packages, no filtering
|
|
if ops.list_all:
|
|
print pm
|
|
|
|
pm.filter(subdir = ops.subdir, arches=ops.arches)
|
|
|
|
if ops.list:
|
|
print pm
|
|
|
|
if ops.pmake:
|
|
pm.pmake()
|
|
|
|
if ops.pkg_info:
|
|
print pm.pkg_info()
|
|
|
|
|
|
############################################################
|
|
#
|
|
# Build all packages.
|
|
#
|
|
############################################################
|
|
if ops.clean:
|
|
raise OnlPackageError("Clean not implemented yet.")
|
|
for p in ops.clean:
|
|
if p in pm:
|
|
pm.clean(p)
|
|
else:
|
|
raise OnlPackageMissingError(p)
|
|
|
|
if ops.build:
|
|
for p in ops.build:
|
|
if p in pm:
|
|
pm.build(p)
|
|
else:
|
|
raise OnlPackageMissingError(p)
|
|
|
|
if ops.require:
|
|
for p in ops.require:
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing,
|
|
skip_missing=ops.skip_missing, try_arches=ops.try_arches)
|
|
|
|
if ops.find_file:
|
|
(p, f) = ops.find_file
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
path = pm.opr.get_file(p, f)
|
|
print path
|
|
|
|
if ops.find_dir:
|
|
(p, d) = ops.find_dir
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
path = pm.opr.get_dir(p, d)
|
|
print path
|
|
|
|
if ops.link_file:
|
|
for (p, f, dst) in ops.link_file:
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
path = pm.opr.get_file(p, f)
|
|
if dst == '.':
|
|
dst = f
|
|
if os.path.exists(dst):
|
|
os.unlink(dst)
|
|
os.symlink(path, dst)
|
|
|
|
if ops.link_dir:
|
|
for (p, d, dst) in ops.link_dir:
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
path = pm.opr.get_dir(p, d)
|
|
if dst == '.':
|
|
dst = d
|
|
if os.path.exists(dst):
|
|
os.unlink(dst)
|
|
os.symlink(path, dst)
|
|
|
|
if ops.copy_file:
|
|
for (p, f, dst) in ops.copy_file:
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
path = pm.opr.get_file(p, f)
|
|
if dst == '.':
|
|
dst = f
|
|
if os.path.exists(dst):
|
|
os.unlink(dst)
|
|
shutil.copyfile(path, dst);
|
|
|
|
if ops.extract_dir:
|
|
for (p, d) in ops.extract_dir:
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
pm.opr.extract(p, dstdir=d, prefix=False, force=True, remove_ts=True, sudo=ops.sudo)
|
|
|
|
############################################################
|
|
#
|
|
# Show the contents of the given packages.
|
|
#
|
|
############################################################
|
|
if ops.contents:
|
|
for p in ops.contents:
|
|
pm.require(p, force=ops.force, build_missing=not ops.no_build_missing)
|
|
pm.opr.contents(p)
|
|
|
|
|
|
if ops.platform_manifest:
|
|
pm.require(ops.platform_manifest, force=ops.force, build_missing=not ops.no_build_missing)
|
|
path = pm.opr.get_file(ops.platform_manifest, 'manifest.json')
|
|
if path:
|
|
m = json.load(open(path))
|
|
print " ".join(m['platforms'])
|
|
|
|
|
|
############################################################
|
|
#
|
|
# Delete the given packages from the repo
|
|
#
|
|
############################################################
|
|
if ops.delete:
|
|
pm.opr.remove_packages(ops.delete)
|
|
|
|
except (OnlPackageError, onlyaml.OnlYamlError), e:
|
|
logger.error(e)
|
|
sys.exit(1)
|
|
|
|
|