mirror of
https://github.com/andrew-d/static-binaries.git
synced 2025-01-13 15:58:31 +08:00
Initial work on helper build tool
This commit is contained in:
parent
9b5ddbde95
commit
9c98414abf
1
sbuild/.gitignore
vendored
Normal file
1
sbuild/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.pyc
|
2
sbuild/sbuild/__init__.py
Normal file
2
sbuild/sbuild/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
__author__ = 'Andrew Dunham <andrew@du.nham.ca>'
|
||||
__version__ = '0.0.1'
|
50
sbuild/sbuild/frozendict.py
Normal file
50
sbuild/sbuild/frozendict.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2012 Santiago Lezica
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is furnished
|
||||
# to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# Taken from https://github.com/slezica/python-frozendict
|
||||
|
||||
import collections, operator
|
||||
|
||||
class frozendict(collections.Mapping):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.__dict = dict(*args, **kwargs)
|
||||
self.__hash = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.__dict[key]
|
||||
|
||||
def copy(self, **add_or_replace):
|
||||
return frozendict(self, **add_or_replace)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__dict)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__dict)
|
||||
|
||||
def __repr__(self):
|
||||
return '<frozendict %s>' % repr(self.__dict)
|
||||
|
||||
def __hash__(self):
|
||||
if self.__hash is None:
|
||||
self.__hash = reduce(operator.xor, map(hash, self.iteritems()), 0)
|
||||
|
||||
return self.__hash
|
18
sbuild/sbuild/immutable.py
Normal file
18
sbuild/sbuild/immutable.py
Normal file
@ -0,0 +1,18 @@
|
||||
import collections
|
||||
|
||||
from .frozendict import frozendict
|
||||
|
||||
def make_immutable(val):
|
||||
if isinstance(val, collections.Set):
|
||||
return frozenset(make_immutable(x) for x in val)
|
||||
|
||||
elif isinstance(val, collections.Mapping):
|
||||
return frozendict(
|
||||
(make_immutable(k), make_immutable(v)) for k, v in val.items()
|
||||
)
|
||||
|
||||
elif isinstance(val, collection.Sequence):
|
||||
return tuple([make_immutable(x) for x in val])
|
||||
|
||||
else:
|
||||
return val
|
19
sbuild/sbuild/importer.py
Normal file
19
sbuild/sbuild/importer.py
Normal file
@ -0,0 +1,19 @@
|
||||
import os
|
||||
import imp
|
||||
import glob
|
||||
|
||||
|
||||
def import_modules_in_dir(directory):
|
||||
"""Imports and returns all importable modules in the given directory."""
|
||||
assert os.path.isdir(directory)
|
||||
|
||||
modules = {}
|
||||
|
||||
for path in glob.glob(join(directory,'[!_]*.py')):
|
||||
if not os.path.isfile(os.path.join(directory, path)):
|
||||
continue
|
||||
|
||||
name, ext = os.path.splitext(basename(path))
|
||||
modules[name] = imp.load_source(name, path) # TODO: importlib.machinery.SourceFileLoader on Py3
|
||||
|
||||
return modules
|
72
sbuild/sbuild/normalize.py
Normal file
72
sbuild/sbuild/normalize.py
Normal file
@ -0,0 +1,72 @@
|
||||
from .immutable import make_immutable
|
||||
|
||||
# Required attributes (all strings)
|
||||
REQUIRED_ATTRS = ['name', 'platform', 'architecture', 'version']
|
||||
|
||||
# Format (name, constructor for default)
|
||||
OPTIONAL_ATTRS = [
|
||||
('dependencies', lambda: ()),
|
||||
('dev_dependencies', lambda: ()),
|
||||
('flags', lambda: frozendict()),
|
||||
]
|
||||
|
||||
# Format (name, required)
|
||||
MODULE_FUNCTIONS = [
|
||||
('fetch', True),
|
||||
('prepare', False),
|
||||
('build', True),
|
||||
('finish', False),
|
||||
]
|
||||
|
||||
VALID_PLATFORMS = ['linux', 'darwin', 'windows']
|
||||
VALID_PLATFORMS = ['x86', 'x86_64', 'arm']
|
||||
|
||||
|
||||
class InvalidModuleError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def normalize_module(self, name, module):
|
||||
missing = object()
|
||||
|
||||
ret = {}
|
||||
|
||||
for attr in REQUIRED_ATTRS:
|
||||
val = getattr(module, attr, missing)
|
||||
if val is missing:
|
||||
raise InvalidModuleError("package '%s' missing attribute '%s'" % (
|
||||
name, attr))
|
||||
if not isinstance(val, str):
|
||||
raise InvalidModuleError("package '%s' has attribute '%s' that "+
|
||||
"is not a string" % (name, attr))
|
||||
|
||||
ret[attr] = val
|
||||
|
||||
for (attr, ctor) in OPTIONAL_ATTRS:
|
||||
val = getattr(module, attr, missing)
|
||||
if val is missing:
|
||||
val = ctor()
|
||||
|
||||
ret[attr] = make_immutable(val)
|
||||
|
||||
for (fname, required) in MODULE_FUNCTIONS:
|
||||
val = getattr(module, fname, missing)
|
||||
if val is missing and required:
|
||||
raise InvalidModuleError("package '%s' is missing required " +
|
||||
"function '%s'" % (name, fname))
|
||||
|
||||
if not hasattr(val, '__call__'):
|
||||
raise InvalidModuleError("package '%s' has function '%s' that is " +
|
||||
"not callable" % (name, fname))
|
||||
|
||||
ret[fname] = val
|
||||
|
||||
if ret['platform'] not in VALID_PLATFORMS:
|
||||
raise InvalidModuleError("package '%s' has invalid platform '%s'" % (
|
||||
name, ret['platform']))
|
||||
|
||||
if ret['architecture'] not in VALID_ARCHITECTURES:
|
||||
raise InvalidModuleError("package '%s' has invalid architecture '%s'" % (
|
||||
name, ret['architecture']))
|
||||
|
||||
return frozendict(item)
|
123
sbuild/sbuild/sbuild.py
Normal file
123
sbuild/sbuild/sbuild.py
Normal file
@ -0,0 +1,123 @@
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import subprocess
|
||||
from collections import defaultdict, deque
|
||||
|
||||
from .immutable import make_immutable
|
||||
from .importer import import_modules_in_dir
|
||||
from .normalize import InvalidModuleError, normalize_module
|
||||
from .toposort import CycleException, topological_sort
|
||||
|
||||
|
||||
class BuildContext(object):
|
||||
def __init__(self):
|
||||
self.__build_dirs = {}
|
||||
self.__flags = defaultdict(lambda: [])
|
||||
|
||||
def build_dir_for(self, package):
|
||||
if package in self.__build_dirs:
|
||||
return self.__build_dirs[package]
|
||||
|
||||
d = tempfile.mkdtemp()
|
||||
self.__build_dirs[package] = d
|
||||
return d
|
||||
|
||||
@property
|
||||
def flags(self):
|
||||
return make_immutable(self.__flags)
|
||||
|
||||
def add_flags(self, input):
|
||||
for k, v in input.items():
|
||||
self.__flags[k].extend(v)
|
||||
|
||||
def cleanup(self):
|
||||
for dname in self.__build_dirs:
|
||||
os.removedirs(dname)
|
||||
|
||||
|
||||
class PackageBuilder(object):
|
||||
def __init__(self, output_dir, package_dir=None):
|
||||
self.output_dir = output_dir
|
||||
self.package_dir = package_dir
|
||||
if self.package_dir is None:
|
||||
self.package_dir = os.path.join(os.path.dirname(__file__), 'packages')
|
||||
|
||||
def import_packages(self):
|
||||
# Import all modules
|
||||
mods = import_modules_in_dir(self.package_dir)
|
||||
|
||||
# Build the dict of package name --> module.
|
||||
self.packages = {}
|
||||
for mod in mods:
|
||||
self.packages[mod.name] = normalize_module(mod)
|
||||
|
||||
def build(self, package):
|
||||
build_order = self._get_build_order(package)
|
||||
|
||||
# Get modules and run the builds.
|
||||
ctx = BuildContext()
|
||||
try:
|
||||
mods = tuple(self.packages[x] for x in build_order)
|
||||
self._build_all(mods, ctx)
|
||||
finally:
|
||||
ctx.cleanup()
|
||||
|
||||
def _build_all(self, mods, ctx):
|
||||
# Collect and install all dev dependencies
|
||||
deps = []
|
||||
for mod in mods:
|
||||
deps.extend(mod.dev_dependencies)
|
||||
|
||||
# Install them.
|
||||
subprocess.check_call(['apt-get', 'install'] + deps)
|
||||
|
||||
# Fetch
|
||||
for mod in mods:
|
||||
mod.fetch(ctx)
|
||||
|
||||
# Prepare
|
||||
for mod in mods:
|
||||
mod.prepare(ctx)
|
||||
|
||||
# Build
|
||||
for mod in mods:
|
||||
# Build this module.
|
||||
mod.build(ctx)
|
||||
|
||||
# Append the flags to the context's flags.
|
||||
ctx.add_flags(mod.flags)
|
||||
|
||||
|
||||
def _get_build_order(self, package):
|
||||
# Find the module that exposes this package.
|
||||
module = self.packages.get(package)
|
||||
if not module:
|
||||
raise Exception("package '%s' not found" % (package,))
|
||||
|
||||
# Recursively collect dependency 'edges' - i.e. a tuple of (A, B) that
|
||||
# indicates that package A depends on package B.
|
||||
edges = self._get_dependency_edges(package)
|
||||
|
||||
# Topologically sort all dependencies.
|
||||
return topological_sort(edges)
|
||||
|
||||
def _get_dependency_edges(self, package):
|
||||
edges = []
|
||||
|
||||
def recurse(current, path):
|
||||
if current in path:
|
||||
raise Exception("recursive dependency cycle detected: %r" % (
|
||||
path + [current]))
|
||||
|
||||
# Add edges for all dependencies, then recurse to them.
|
||||
for dep in self.packages[current]['dependencies']:
|
||||
edges.append((dep, current))
|
||||
recurse(dep, path + [current])
|
||||
|
||||
try:
|
||||
recurse(package, [])
|
||||
except KeyError as e:
|
||||
raise Exception("dependency %r does not exist" % (e.args[0],))
|
||||
|
||||
return edges
|
37
sbuild/sbuild/toposort.py
Normal file
37
sbuild/sbuild/toposort.py
Normal file
@ -0,0 +1,37 @@
|
||||
class CycleException(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def topological_sort(edge_list):
|
||||
# edge_set is consumed, need a copy
|
||||
edge_set = set([tuple(i) for i in edge_list])
|
||||
|
||||
# node_list will contain the ordered nodes
|
||||
node_list = list()
|
||||
|
||||
# source_set is the set of nodes with no incoming edges
|
||||
node_from_list, node_to_list = zip(*edge_set)
|
||||
source_set = set(node_from_list) - set(node_to_list)
|
||||
|
||||
while len(source_set) != 0:
|
||||
# Pop node_from off source_set and insert it in node_list
|
||||
node_from = source_set.pop()
|
||||
node_list.append(node_from)
|
||||
|
||||
# Find nodes which have a common edge with node_from
|
||||
from_selection = [e for e in edge_set if e[0] == node_from]
|
||||
for edge in from_selection:
|
||||
# Remove the edge from the graph
|
||||
node_to = edge[1]
|
||||
edge_set.discard(edge)
|
||||
|
||||
# If node_to doesn't have any remaining incoming edges...
|
||||
to_selection = [e for e in edge_set if e[1] == node_to]
|
||||
if len(to_selection) == 0:
|
||||
# ... add node_to to source_set
|
||||
source_set.add(node_to)
|
||||
|
||||
if len(edge_set) != 0:
|
||||
raise CycleException(edge_set)
|
||||
|
||||
return node_list
|
57
sbuild/tests/test_dependencies.py
Normal file
57
sbuild/tests/test_dependencies.py
Normal file
@ -0,0 +1,57 @@
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..')
|
||||
))
|
||||
from sbuild import sbuild
|
||||
|
||||
|
||||
class TestDependencyResolution(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.b = sbuild.PackageBuilder('/tmp')
|
||||
|
||||
def test_simple_dependencies(self):
|
||||
self.b.packages = {
|
||||
'one': {
|
||||
'name': 'one',
|
||||
'dependencies': ['two'],
|
||||
},
|
||||
'two': {
|
||||
'name': 'two',
|
||||
'dependencies': [],
|
||||
},
|
||||
}
|
||||
|
||||
assert self.b._get_build_order('one') == ['two', 'one']
|
||||
|
||||
def test_multiple_dependencies(self):
|
||||
self.b.packages = {
|
||||
'one': {
|
||||
'name': 'one',
|
||||
'dependencies': ['two', 'three'],
|
||||
},
|
||||
'two': {'name': 'two', 'dependencies': []},
|
||||
'three': {'name': 'three', 'dependencies': []},
|
||||
}
|
||||
|
||||
assert self.b._get_build_order('one') == ['three', 'two', 'one']
|
||||
|
||||
def test_recursive_dependencies(self):
|
||||
self.b.packages = {
|
||||
'one': {
|
||||
'name': 'one',
|
||||
'dependencies': ['two'],
|
||||
},
|
||||
'two': {
|
||||
'name': 'two',
|
||||
'dependencies': ['three'],
|
||||
},
|
||||
'three': {
|
||||
'name': 'three',
|
||||
'dependencies': [],
|
||||
},
|
||||
}
|
||||
|
||||
assert self.b._get_build_order('one') == ['three', 'two', 'one']
|
Loading…
x
Reference in New Issue
Block a user