M7350v1_en_gpl

This commit is contained in:
T
2024-09-09 08:52:07 +00:00
commit f9cc65cfda
65988 changed files with 26357421 additions and 0 deletions

View File

@@ -0,0 +1,23 @@
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := fs_config.c
LOCAL_MODULE := fs_config
LOCAL_FORCE_STATIC_EXECUTABLE := true
LOCAL_MODULE_TAGS := eng
include $(BUILD_HOST_EXECUTABLE)

View File

@@ -0,0 +1,16 @@
ACLOCAL_AMFLAGS = -I m4
AM_CPPFLAGS := \
-I. \
-I$(WORKSPACE)/system/core/include
c_sources := fs_config.c
bin_PROGRAMS := fs_config
fsconfig_SOURCES := $(c_sources)
fsconfig_CPPFLAGS := $(AM_CPPFLAGS)
fsconfig_LDFLAGS := -all-static

View File

@@ -0,0 +1,19 @@
AC_INIT([fsconfig-native],
1.0.0)
AM_INIT_AUTOMAKE([foreign])
AC_PROG_CC
AM_PROG_CC_C_O
AC_PROG_CPP
AC_SUBST([CUTILS_CFLAGS])
AC_SUBST([CUTILS_LIBS])
AM_CONDITIONAL(USE_GLIB, test "x${with_glib}" = "xyes")
AC_CONFIG_FILES([ \
Makefile \
])
AC_OUTPUT

View File

@@ -0,0 +1,69 @@
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <stdio.h>
#include <sys/stat.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "private/android_filesystem_config.h"
// This program takes a list of files and directories (indicated by a
// trailing slash) on the stdin, and prints to stdout each input
// filename along with its desired uid, gid, and mode (in octal).
// The leading slash should be stripped from the input.
//
// Example input:
//
// system/etc/dbus.conf
// data/app/
//
// Output:
//
// system/etc/dbus.conf 1002 1002 440
// data/app 1000 1000 771
//
// Note that the output will omit the trailing slash from
// directories.
int main(int argc, char** argv) {
char buffer[1024];
while (fgets(buffer, 1023, stdin) != NULL) {
int is_dir = 0;
int i;
for (i = 0; i < 1024 && buffer[i]; ++i) {
switch (buffer[i]) {
case '\n':
buffer[i-is_dir] = '\0';
i = 1025;
break;
case '/':
is_dir = 1;
break;
default:
is_dir = 0;
break;
}
}
unsigned uid = 0, gid = 0, mode = 0;
fs_config(buffer, is_dir, &uid, &gid, &mode);
printf("%s %d %d %o\n", buffer, uid, gid, mode);
}
return 0;
}

View File

@@ -0,0 +1,864 @@
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import errno
import getopt
import getpass
import imp
import os
import platform
import re
import shutil
import subprocess
import sys
import tempfile
import threading
import time
import zipfile
try:
from hashlib import sha1 as sha1
except ImportError:
from sha import sha as sha1
# missing in Python 2.4 and before
if not hasattr(os, "SEEK_SET"):
os.SEEK_SET = 0
class Options(object): pass
OPTIONS = Options()
OPTIONS.search_path = "out/host/linux-x86"
OPTIONS.verbose = False
OPTIONS.tempfiles = []
OPTIONS.device_specific = None
OPTIONS.extras = {}
OPTIONS.info_dict = None
# Values for "certificate" in apkcerts that mean special things.
SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
class ExternalError(RuntimeError): pass
def Run(args, **kwargs):
"""Create and return a subprocess.Popen object, printing the command
line on the terminal if -v was specified."""
if OPTIONS.verbose:
print " running: ", " ".join(args)
return subprocess.Popen(args, **kwargs)
def CloseInheritedPipes():
""" Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
before doing other work."""
if platform.system() != "Darwin":
return
for d in range(3, 1025):
try:
stat = os.fstat(d)
if stat is not None:
pipebit = stat[0] & 0x1000
if pipebit != 0:
os.close(d)
except OSError:
pass
def LoadInfoDict(zip, type):
"""Read and parse the META/misc_info.txt key/value pairs from the
input target files and return a dict."""
d = {}
try:
for line in zip.read("META/misc_info.txt").split("\n"):
line = line.strip()
if not line or line.startswith("#"): continue
k, v = line.split("=", 1)
d[k] = v
except KeyError:
# ok if misc_info.txt doesn't exist
pass
# backwards compatibility: These values used to be in their own
# files. Look for them, in case we're processing an old
# target_files zip.
if "mkyaffs2_extra_flags" not in d:
try:
d["mkyaffs2_extra_flags"] = zip.read("META/mkyaffs2-extra-flags.txt").strip()
except KeyError:
# ok if flags don't exist
pass
if "recovery_api_version" not in d:
try:
d["recovery_api_version"] = zip.read("META/recovery-api-version.txt").strip()
except KeyError:
raise ValueError("can't find recovery API version in input target-files")
if "tool_extensions" not in d:
try:
d["tool_extensions"] = zip.read("META/tool-extensions.txt").strip()
except KeyError:
# ok if extensions don't exist
pass
try:
data = zip.read("META/imagesizes.txt")
for line in data.split("\n"):
if not line: continue
name, value = line.split(" ", 1)
if not value: continue
if name == "blocksize":
d[name] = value
else:
d[name + "_size"] = value
except KeyError:
pass
def makeint(key):
if key in d:
d[key] = int(d[key], 0)
makeint("recovery_api_version")
makeint("blocksize")
makeint("system_size")
makeint("userdata_size")
makeint("recovery_size")
makeint("boot_size")
d["fstab"] = LoadRecoveryFSTab(zip, type)
return d
def LoadRecoveryFSTab(zip, type):
class Partition(object):
pass
try:
if type == 'MTD':
data = zip.read("RECOVERY/recovery.fstab")
elif type == 'MMC':
data = zip.read("RECOVERY/RAMDISK/etc/recovery_mmc.fstab")
except KeyError:
print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab in %s." % zip
data = ""
d = {}
for line in data.split("\n"):
line = line.strip()
if not line or line.startswith("#"): continue
pieces = line.split()
if not (3 <= len(pieces) <= 4):
raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
p = Partition()
p.mount_point = pieces[0]
p.fs_type = pieces[1]
p.device = pieces[2]
p.length = 0
options = None
if len(pieces) >= 4:
if pieces[3].startswith("/"):
p.device2 = pieces[3]
if len(pieces) >= 5:
options = pieces[4]
else:
p.device2 = None
options = pieces[3]
else:
p.device2 = None
if options:
options = options.split(",")
for i in options:
if i.startswith("length="):
p.length = int(i[7:])
else:
print "%s: unknown option \"%s\"" % (p.mount_point, i)
d[p.mount_point] = p
return d
def DumpInfoDict(d):
for k, v in sorted(d.items()):
print "%-25s = (%s) %s" % (k, type(v).__name__, v)
def BuildBootableImage(sourcedir):
"""Take a kernel, cmdline, and ramdisk directory from the input (in
'sourcedir'), and turn them into a boot image. Return the image
data, or None if sourcedir does not appear to contains files for
building the requested image."""
if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
return None
ramdisk_img = tempfile.NamedTemporaryFile()
img = tempfile.NamedTemporaryFile()
p1 = Run(["mkbootfs", os.path.join(sourcedir, "RAMDISK")],
stdout=subprocess.PIPE)
p2 = Run(["minigzip"],
stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
p2.wait()
p1.wait()
assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,)
assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (targetname,)
cmd = ["mkbootimg", "--kernel", os.path.join(sourcedir, "kernel")]
fn = os.path.join(sourcedir, "cmdline")
if os.access(fn, os.F_OK):
cmd.append("--cmdline")
cmd.append(open(fn).read().rstrip("\n"))
fn = os.path.join(sourcedir, "base")
if os.access(fn, os.F_OK):
cmd.append("--base")
cmd.append(open(fn).read().rstrip("\n"))
fn = os.path.join(sourcedir, "pagesize")
if os.access(fn, os.F_OK):
cmd.append("--pagesize")
cmd.append(open(fn).read().rstrip("\n"))
pagesize = open(fn).read().rstrip("\n")
else:
pagesize = 2048
cmd.extend(["--ramdisk", ramdisk_img.name,
"--output", img.name])
p = Run(cmd, stdout=subprocess.PIPE)
p.communicate()
assert p.returncode == 0, "mkbootimg of %s image failed" % (
os.path.basename(sourcedir),)
fn = os.path.join(sourcedir, "sign-key")
if os.access(fn, os.F_OK):
# Signature key found
# Get SHA256 of raw image
sha256 = tempfile.NamedTemporaryFile()
p = Run(["openssl", "dgst", "-sha256", "-binary", img.name],
stdout=sha256)
p.communicate()
# Create signature
signature = tempfile.NamedTemporaryFile()
p = Run(["openssl", "rsautl", "-sign", "-in", sha256.name, "-inkey",
os.path.join(sourcedir, "sign-key"), "-out", signature.name],
stdout=subprocess.PIPE)
p.communicate()
# Create padding of pagesize
signature_pad = tempfile.NamedTemporaryFile()
p = Run(["dd", "if=/dev/zero", "of="+signature_pad.name, "bs="+pagesize, "count=1"],
stdout=subprocess.PIPE)
p.communicate()
# Add Signature.
p = Run(["dd", "if="+signature.name, "of="+signature_pad.name, "conv=notrunc"],
stdout=subprocess.PIPE)
p.communicate()
signedimg = tempfile.NamedTemporaryFile()
p = Run(["cat", img.name, signature_pad.name],
stdout=signedimg)
p.communicate()
img.close()
img = signedimg
# Close all files
sha256.close()
signature.close()
signature_pad.close()
img.seek(os.SEEK_SET, 0)
data = img.read()
ramdisk_img.close()
img.close()
return data
def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir):
"""Return a File object (with name 'name') with the desired bootable
image. Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
'prebuilt_name', otherwise construct it from the source files in
'unpack_dir'/'tree_subdir'."""
prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
print "using prebuilt %s..." % (prebuilt_name,)
return File.FromLocalFile(name, prebuilt_path)
else:
print "building image from target_files %s..." % (tree_subdir,)
return File(name, BuildBootableImage(os.path.join(unpack_dir, tree_subdir)))
def UnzipTemp(filename, pattern=None):
"""Unzip the given archive into a temporary directory and return the name.
If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
main file), open for reading.
"""
tmp = tempfile.mkdtemp(prefix="targetfiles-")
OPTIONS.tempfiles.append(tmp)
def unzip_to_dir(filename, dirname):
cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
if pattern is not None:
cmd.append(pattern)
p = Run(cmd, stdout=subprocess.PIPE)
p.communicate()
if p.returncode != 0:
raise ExternalError("failed to unzip input target-files \"%s\"" %
(filename,))
m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
if m:
unzip_to_dir(m.group(1), tmp)
unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
filename = m.group(1)
else:
unzip_to_dir(filename, tmp)
return tmp, zipfile.ZipFile(filename, "r")
def GetKeyPasswords(keylist):
"""Given a list of keys, prompt the user to enter passwords for
those which require them. Return a {key: password} dict. password
will be None if the key has no password."""
no_passwords = []
need_passwords = []
devnull = open("/dev/null", "w+b")
for k in sorted(keylist):
# We don't need a password for things that aren't really keys.
if k in SPECIAL_CERT_STRINGS:
no_passwords.append(k)
continue
p = Run(["openssl", "pkcs8", "-in", k+".pk8",
"-inform", "DER", "-nocrypt"],
stdin=devnull.fileno(),
stdout=devnull.fileno(),
stderr=subprocess.STDOUT)
p.communicate()
if p.returncode == 0:
no_passwords.append(k)
else:
need_passwords.append(k)
devnull.close()
key_passwords = PasswordManager().GetPasswords(need_passwords)
key_passwords.update(dict.fromkeys(no_passwords, None))
return key_passwords
def SignFile(input_name, output_name, key, password, align=None,
whole_file=False):
"""Sign the input_name zip/jar/apk, producing output_name. Use the
given key and password (the latter may be None if the key does not
have a password.
If align is an integer > 1, zipalign is run to align stored files in
the output zip on 'align'-byte boundaries.
If whole_file is true, use the "-w" option to SignApk to embed a
signature that covers the whole file in the archive comment of the
zip file.
"""
if align == 0 or align == 1:
align = None
if align:
temp = tempfile.NamedTemporaryFile()
sign_name = temp.name
else:
sign_name = output_name
cmd = ["java", "-Xmx2048m", "-jar",
os.path.join(OPTIONS.search_path, "framework", "signapk.jar")]
if whole_file:
cmd.append("-w")
cmd.extend([key + ".x509.pem", key + ".pk8",
input_name, sign_name])
p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
if password is not None:
password += "\n"
p.communicate(password)
if p.returncode != 0:
raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
if align:
p = Run(["zipalign", "-f", str(align), sign_name, output_name])
p.communicate()
if p.returncode != 0:
raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
temp.close()
def CheckSize(data, target, info_dict):
"""Check the data string passed against the max size limit, if
any, for the given target. Raise exception if the data is too big.
Print a warning if the data is nearing the maximum size."""
if target.endswith(".img"): target = target[:-4]
mount_point = "/" + target
if info_dict["fstab"]:
if mount_point == "/userdata": mount_point = "/data"
p = info_dict["fstab"][mount_point]
fs_type = p.fs_type
limit = info_dict.get(p.device + "_size", None)
if not fs_type or not limit: return
if fs_type == "yaffs2":
# image size should be increased by 1/64th to account for the
# spare area (64 bytes per 2k page)
limit = limit / 2048 * (2048+64)
size = len(data)
pct = float(size) * 100.0 / limit
msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
if pct >= 99.0:
raise ExternalError(msg)
elif pct >= 95.0:
print
print " WARNING: ", msg
print
elif OPTIONS.verbose:
print " ", msg
def ReadApkCerts(tf_zip):
"""Given a target_files ZipFile, parse the META/apkcerts.txt file
and return a {package: cert} dict."""
certmap = {}
for line in tf_zip.read("META/apkcerts.txt").split("\n"):
line = line.strip()
if not line: continue
m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
r'private_key="(.*)"$', line)
if m:
name, cert, privkey = m.groups()
if cert in SPECIAL_CERT_STRINGS and not privkey:
certmap[name] = cert
elif (cert.endswith(".x509.pem") and
privkey.endswith(".pk8") and
cert[:-9] == privkey[:-4]):
certmap[name] = cert[:-9]
else:
raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
return certmap
COMMON_DOCSTRING = """
-p (--path) <dir>
Prepend <dir>/bin to the list of places to search for binaries
run by this script, and expect to find jars in <dir>/framework.
-s (--device_specific) <file>
Path to the python module containing device-specific
releasetools code.
-x (--extra) <key=value>
Add a key/value pair to the 'extras' dict, which device-specific
extension code may look at.
-v (--verbose)
Show command lines being executed.
-h (--help)
Display this usage message and exit.
"""
def Usage(docstring):
print docstring.rstrip("\n")
print COMMON_DOCSTRING
def ParseOptions(argv,
docstring,
extra_opts="", extra_long_opts=(),
extra_option_handler=None):
"""Parse the options in argv and return any arguments that aren't
flags. docstring is the calling module's docstring, to be displayed
for errors and -h. extra_opts and extra_long_opts are for flags
defined by the caller, which are processed by passing them to
extra_option_handler."""
try:
opts, args = getopt.getopt(
argv, "hvp:s:x:" + extra_opts,
["help", "verbose", "path=", "device_specific=", "extra="] +
list(extra_long_opts))
except getopt.GetoptError, err:
Usage(docstring)
print "**", str(err), "**"
sys.exit(2)
path_specified = False
for o, a in opts:
if o in ("-h", "--help"):
Usage(docstring)
sys.exit()
elif o in ("-v", "--verbose"):
OPTIONS.verbose = True
elif o in ("-p", "--path"):
OPTIONS.search_path = a
elif o in ("-s", "--device_specific"):
OPTIONS.device_specific = a
elif o in ("-x", "--extra"):
key, value = a.split("=", 1)
OPTIONS.extras[key] = value
else:
if extra_option_handler is None or not extra_option_handler(o, a):
assert False, "unknown option \"%s\"" % (o,)
os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
os.pathsep + os.environ["PATH"])
return args
def Cleanup():
for i in OPTIONS.tempfiles:
if os.path.isdir(i):
shutil.rmtree(i)
else:
os.remove(i)
class PasswordManager(object):
def __init__(self):
self.editor = os.getenv("EDITOR", None)
self.pwfile = os.getenv("ANDROID_PW_FILE", None)
def GetPasswords(self, items):
"""Get passwords corresponding to each string in 'items',
returning a dict. (The dict may have keys in addition to the
values in 'items'.)
Uses the passwords in $ANDROID_PW_FILE if available, letting the
user edit that file to add more needed passwords. If no editor is
available, or $ANDROID_PW_FILE isn't define, prompts the user
interactively in the ordinary way.
"""
current = self.ReadFile()
first = True
while True:
missing = []
for i in items:
if i not in current or not current[i]:
missing.append(i)
# Are all the passwords already in the file?
if not missing: return current
for i in missing:
current[i] = ""
if not first:
print "key file %s still missing some passwords." % (self.pwfile,)
answer = raw_input("try to edit again? [y]> ").strip()
if answer and answer[0] not in 'yY':
raise RuntimeError("key passwords unavailable")
first = False
current = self.UpdateAndReadFile(current)
def PromptResult(self, current):
"""Prompt the user to enter a value (password) for each key in
'current' whose value is fales. Returns a new dict with all the
values.
"""
result = {}
for k, v in sorted(current.iteritems()):
if v:
result[k] = v
else:
while True:
result[k] = getpass.getpass("Enter password for %s key> "
% (k,)).strip()
if result[k]: break
return result
def UpdateAndReadFile(self, current):
if not self.editor or not self.pwfile:
return self.PromptResult(current)
f = open(self.pwfile, "w")
os.chmod(self.pwfile, 0600)
f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
f.write("# (Additional spaces are harmless.)\n\n")
first_line = None
sorted = [(not v, k, v) for (k, v) in current.iteritems()]
sorted.sort()
for i, (_, k, v) in enumerate(sorted):
f.write("[[[ %s ]]] %s\n" % (v, k))
if not v and first_line is None:
# position cursor on first line with no password.
first_line = i + 4
f.close()
p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
_, _ = p.communicate()
return self.ReadFile()
def ReadFile(self):
result = {}
if self.pwfile is None: return result
try:
f = open(self.pwfile, "r")
for line in f:
line = line.strip()
if not line or line[0] == '#': continue
m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
if not m:
print "failed to parse password file: ", line
else:
result[m.group(2)] = m.group(1)
f.close()
except IOError, e:
if e.errno != errno.ENOENT:
print "error reading password file: ", str(e)
return result
def ZipWriteStr(zip, filename, data, perms=0644):
# use a fixed timestamp so the output is repeatable.
zinfo = zipfile.ZipInfo(filename=filename,
date_time=(2009, 1, 1, 0, 0, 0))
zinfo.compress_type = zip.compression
zinfo.external_attr = perms << 16
zip.writestr(zinfo, data)
class DeviceSpecificParams(object):
module = None
def __init__(self, **kwargs):
"""Keyword arguments to the constructor become attributes of this
object, which is passed to all functions in the device-specific
module."""
for k, v in kwargs.iteritems():
setattr(self, k, v)
self.extras = OPTIONS.extras
if self.module is None:
path = OPTIONS.device_specific
if not path: return
try:
if os.path.isdir(path):
info = imp.find_module("releasetools", [path])
else:
d, f = os.path.split(path)
b, x = os.path.splitext(f)
if x == ".py":
f = b
info = imp.find_module(f, [d])
self.module = imp.load_module("device_specific", *info)
except ImportError:
print "unable to load device-specific module; assuming none"
def _DoCall(self, function_name, *args, **kwargs):
"""Call the named function in the device-specific module, passing
the given args and kwargs. The first argument to the call will be
the DeviceSpecific object itself. If there is no module, or the
module does not define the function, return the value of the
'default' kwarg (which itself defaults to None)."""
if self.module is None or not hasattr(self.module, function_name):
return kwargs.get("default", None)
return getattr(self.module, function_name)(*((self,) + args), **kwargs)
def FullOTA_Assertions(self):
"""Called after emitting the block of assertions at the top of a
full OTA package. Implementations can add whatever additional
assertions they like."""
return self._DoCall("FullOTA_Assertions")
def FullOTA_InstallEnd(self):
"""Called at the end of full OTA installation; typically this is
used to install the image for the device's baseband processor."""
return self._DoCall("FullOTA_InstallEnd")
def IncrementalOTA_Assertions(self):
"""Called after emitting the block of assertions at the top of an
incremental OTA package. Implementations can add whatever
additional assertions they like."""
return self._DoCall("IncrementalOTA_Assertions")
def IncrementalOTA_VerifyEnd(self):
"""Called at the end of the verification phase of incremental OTA
installation; additional checks can be placed here to abort the
script before any changes are made."""
return self._DoCall("IncrementalOTA_VerifyEnd")
def IncrementalOTA_InstallEnd(self):
"""Called at the end of incremental OTA installation; typically
this is used to install the image for the device's baseband
processor."""
return self._DoCall("IncrementalOTA_InstallEnd")
class File(object):
def __init__(self, name, data):
self.name = name
self.data = data
self.size = len(data)
self.sha1 = sha1(data).hexdigest()
@classmethod
def FromLocalFile(cls, name, diskname):
f = open(diskname, "rb")
data = f.read()
f.close()
return File(name, data)
def WriteToTemp(self):
t = tempfile.NamedTemporaryFile()
t.write(self.data)
t.flush()
return t
def AddToZip(self, z):
ZipWriteStr(z, self.name, self.data)
DIFF_PROGRAM_BY_EXT = {
".gz" : "imgdiff",
".zip" : ["imgdiff", "-z"],
".jar" : ["imgdiff", "-z"],
".apk" : ["imgdiff", "-z"],
".img" : "imgdiff",
}
class Difference(object):
def __init__(self, tf, sf):
self.tf = tf
self.sf = sf
self.patch = None
def ComputePatch(self):
"""Compute the patch (as a string of data) needed to turn sf into
tf. Returns the same tuple as GetPatch()."""
tf = self.tf
sf = self.sf
ext = os.path.splitext(tf.name)[1]
diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
ttemp = tf.WriteToTemp()
stemp = sf.WriteToTemp()
ext = os.path.splitext(tf.name)[1]
try:
ptemp = tempfile.NamedTemporaryFile()
if isinstance(diff_program, list):
cmd = copy.copy(diff_program)
else:
cmd = [diff_program]
cmd.append(stemp.name)
cmd.append(ttemp.name)
cmd.append(ptemp.name)
p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, err = p.communicate()
if err or p.returncode != 0:
print "WARNING: failure running %s:\n%s\n" % (diff_program, err)
return None
diff = ptemp.read()
finally:
ptemp.close()
stemp.close()
ttemp.close()
self.patch = diff
return self.tf, self.sf, self.patch
def GetPatch(self):
"""Return a tuple (target_file, source_file, patch_data).
patch_data may be None if ComputePatch hasn't been called, or if
computing the patch failed."""
return self.tf, self.sf, self.patch
def ComputeDifferences(diffs):
"""Call ComputePatch on all the Difference objects in 'diffs'."""
print len(diffs), "diffs to compute"
# Do the largest files first, to try and reduce the long-pole effect.
by_size = [(i.tf.size, i) for i in diffs]
by_size.sort(reverse=True)
by_size = [i[1] for i in by_size]
lock = threading.Lock()
diff_iter = iter(by_size) # accessed under lock
def worker():
try:
lock.acquire()
for d in diff_iter:
lock.release()
start = time.time()
d.ComputePatch()
dur = time.time() - start
lock.acquire()
tf, sf, patch = d.GetPatch()
if sf.name == tf.name:
name = tf.name
else:
name = "%s (%s)" % (tf.name, sf.name)
if patch is None:
print "patching failed! %s" % (name,)
else:
print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
lock.release()
except Exception, e:
print e
raise
# start worker threads; wait for them all to finish.
threads = [threading.Thread(target=worker)
for i in range(OPTIONS.worker_threads)]
for th in threads:
th.start()
while threads:
threads.pop().join()
# map recovery.fstab's fs_types to mount/format "partition types"
PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD",
"ext4": "EMMC", "emmc": "EMMC" }
def GetTypeAndDevice(mount_point, info):
fstab = info["fstab"]
if fstab:
return PARTITION_TYPES[fstab[mount_point].fs_type], fstab[mount_point].device
else:
return None

View File

@@ -0,0 +1,278 @@
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import common
class EdifyGenerator(object):
"""Class to generate scripts in the 'edify' recovery script language
used from donut onwards."""
def __init__(self, version, info):
self.script = []
self.mounts = set()
self.version = version
self.info = info
def MakeTemporary(self):
"""Make a temporary script object whose commands can latter be
appended to the parent script with AppendScript(). Used when the
caller wants to generate script commands out-of-order."""
x = EdifyGenerator(self.version, self.info)
x.mounts = self.mounts
return x
@staticmethod
def _WordWrap(cmd, linelen=80):
"""'cmd' should be a function call with null characters after each
parameter (eg, "somefun(foo,\0bar,\0baz)"). This function wraps cmd
to a given line length, replacing nulls with spaces and/or newlines
to format it nicely."""
indent = cmd.index("(")+1
out = []
first = True
x = re.compile("^(.{,%d})\0" % (linelen-indent,))
while True:
if not first:
out.append(" " * indent)
first = False
m = x.search(cmd)
if not m:
parts = cmd.split("\0", 1)
out.append(parts[0]+"\n")
if len(parts) == 1:
break
else:
cmd = parts[1]
continue
out.append(m.group(1)+"\n")
cmd = cmd[m.end():]
return "".join(out).replace("\0", " ").rstrip("\n")
def AppendScript(self, other):
"""Append the contents of another script (which should be created
with temporary=True) to this one."""
self.script.extend(other.script)
def AssertSomeFingerprint(self, *fp):
"""Assert that the current system build fingerprint is one of *fp."""
if not fp:
raise ValueError("must specify some fingerprints")
cmd = ('assert(' +
' ||\0'.join([('file_getprop("/system/build.prop", '
'"ro.build.fingerprint") == "%s"')
% i for i in fp]) +
');')
self.script.append(self._WordWrap(cmd))
def AssertOlderBuild(self, timestamp):
"""Assert that the build on the device is older (or the same as)
the given timestamp."""
self.script.append(('assert(!less_than_int(%s, '
'getprop("ro.build.date.utc")));') % (timestamp,))
def AssertDevice(self, device):
"""Assert that the device identifier is the given string."""
cmd = ('assert(getprop("ro.product.device") == "%s" ||\0'
'getprop("ro.build.product") == "%s");' % (device, device))
self.script.append(self._WordWrap(cmd))
def AssertSomeBootloader(self, *bootloaders):
"""Asert that the bootloader version is one of *bootloaders."""
cmd = ("assert(" +
" ||\0".join(['getprop("ro.bootloader") == "%s"' % (b,)
for b in bootloaders]) +
");")
self.script.append(self._WordWrap(cmd))
def ShowProgress(self, frac, dur):
"""Update the progress bar, advancing it over 'frac' over the next
'dur' seconds. 'dur' may be zero to advance it via SetProgress
commands instead of by time."""
self.script.append("show_progress(%f, %d);" % (frac, int(dur)))
def SetProgress(self, frac):
"""Set the position of the progress bar within the chunk defined
by the most recent ShowProgress call. 'frac' should be in
[0,1]."""
self.script.append("set_progress(%f);" % (frac,))
def PatchCheck(self, filename, *sha1):
"""Check that the given file (or MTD reference) has one of the
given *sha1 hashes, checking the version saved in cache if the
file does not match."""
self.script.append('assert(apply_patch_check("%s"' % (filename,) +
"".join([', "%s"' % (i,) for i in sha1]) +
'));')
def FileCheck(self, filename, *sha1):
"""Check that the given file (or MTD reference) has one of the
given *sha1 hashes."""
self.script.append('assert(sha1_check(read_file("%s")' % (filename,) +
"".join([', "%s"' % (i,) for i in sha1]) +
'));')
def CacheFreeSpaceCheck(self, amount):
"""Check that there's at least 'amount' space that can be made
available on /cache."""
self.script.append("assert(apply_patch_space(%d));" % (amount,))
def Mount(self, mount_point):
"""Mount the partition with the given mount_point."""
fstab = self.info.get("fstab", None)
if fstab:
p = fstab[mount_point]
self.script.append('mount("%s", "%s", "%s", "%s");' %
(p.fs_type, common.PARTITION_TYPES[p.fs_type],
p.device, p.mount_point))
self.mounts.add(p.mount_point)
def UnpackPackageDir(self, src, dst):
"""Unpack a given directory from the OTA package into the given
destination directory."""
self.script.append('package_extract_dir("%s", "%s");' % (src, dst))
def Comment(self, comment):
"""Write a comment into the update script."""
self.script.append("")
for i in comment.split("\n"):
self.script.append("# " + i)
self.script.append("")
def Print(self, message):
"""Log a message to the screen (if the logs are visible)."""
self.script.append('ui_print("%s");' % (message,))
def FormatPartition(self, partition):
"""Format the given partition, specified by its mount point (eg,
"/system")."""
reserve_size = 0
fstab = self.info.get("fstab", None)
if fstab:
p = fstab[partition]
self.script.append('format("%s", "%s", "%s", "%s");' %
(p.fs_type, common.PARTITION_TYPES[p.fs_type],
p.device, p.length))
def DeleteFiles(self, file_list):
"""Delete all files in file_list."""
if not file_list: return
cmd = "delete(" + ",\0".join(['"%s"' % (i,) for i in file_list]) + ");"
self.script.append(self._WordWrap(cmd))
def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs):
"""Apply binary patches (in *patchpairs) to the given srcfile to
produce tgtfile (which may be "-" to indicate overwriting the
source file."""
if len(patchpairs) % 2 != 0 or len(patchpairs) == 0:
raise ValueError("bad patches given to ApplyPatch")
cmd = ['apply_patch("%s",\0"%s",\0%s,\0%d'
% (srcfile, tgtfile, tgtsha1, tgtsize)]
for i in range(0, len(patchpairs), 2):
cmd.append(',\0%s, package_extract_file("%s")' % patchpairs[i:i+2])
cmd.append(');')
cmd = "".join(cmd)
self.script.append(self._WordWrap(cmd))
def WriteRawImage(self, mount_point, fn):
"""Write the given package file into the partition for the given
mount point."""
fstab = self.info["fstab"]
if fstab:
p = fstab[mount_point]
partition_type = common.PARTITION_TYPES[p.fs_type]
args = {'device': p.device, 'fn': fn}
if partition_type == "MTD":
self.script.append(
'write_raw_image(package_extract_file("%(fn)s"), "%(device)s");'
% args)
elif partition_type == "EMMC":
self.script.append(
'package_extract_file("%(fn)s", "%(device)s");' % args)
else:
raise ValueError("don't know how to write \"%s\" partitions" % (p.fs_type,))
def SetPermissions(self, fn, uid, gid, mode):
"""Set file ownership and permissions."""
self.script.append('set_perm(%d, %d, 0%o, "%s");' % (uid, gid, mode, fn))
def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode):
"""Recursively set path ownership and permissions."""
self.script.append('set_perm_recursive(%d, %d, 0%o, 0%o, "%s");'
% (uid, gid, dmode, fmode, fn))
def MakeSymlinks(self, symlink_list):
"""Create symlinks, given a list of (dest, link) pairs."""
by_dest = {}
for d, l in symlink_list:
by_dest.setdefault(d, []).append(l)
for dest, links in sorted(by_dest.iteritems()):
cmd = ('symlink("%s", ' % (dest,) +
",\0".join(['"' + i + '"' for i in sorted(links)]) + ");")
self.script.append(self._WordWrap(cmd))
def RetouchBinaries(self, file_list):
"""Execute the retouch instructions in files listed."""
cmd = ('retouch_binaries(' +
', '.join(['"' + i[0] + '", "' + i[1] + '"' for i in file_list]) +
');')
self.script.append(self._WordWrap(cmd))
def UndoRetouchBinaries(self, file_list):
"""Undo the retouching (retouch to zero offset)."""
cmd = ('undo_retouch_binaries(' +
', '.join(['"' + i[0] + '", "' + i[1] + '"' for i in file_list]) +
');')
self.script.append(self._WordWrap(cmd))
def AppendExtra(self, extra):
"""Append text verbatim to the output script."""
self.script.append(extra)
def UnmountAll(self):
for p in sorted(self.mounts):
self.script.append('unmount("%s");' % (p,))
self.mounts = set()
def AddToZip(self, input_zip, output_zip, fota, input_path=None):
"""Write the accumulated script to the output_zip file. input_zip
is used as the source for the 'updater' binary needed to run
script. If input_path is not None, it will be used as a local
path for the binary instead of input_zip."""
self.UnmountAll()
common.ZipWriteStr(output_zip, "META-INF/com/google/android/updater-script",
"\n".join(self.script) + "\n")
fval = int(fota)
if input_path is None:
data = input_zip.read("OTA/bin/updater")
if fval:
datadua = input_zip.read("OTA/bin/ipth_dua")
else:
data = open(os.path.join(input_path, "updater")).read()
if fval:
datadua = open(os.path.join(input_path, "ipth_dua")).read()
common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary",
data, perms=0755)
if fval:
common.ZipWriteStr(output_zip, "META-INF/com/google/android/ipth_dua",
datadua, perms=0755)

View File

@@ -0,0 +1,913 @@
#!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Given a target-files zipfile, produces an OTA package that installs
that build. An incremental OTA is produced if -i is given, otherwise
a full OTA is produced.
Usage: ota_from_target_files [flags] input_target_files output_ota_package
-b (--board_config) <file>
Deprecated.
-k (--package_key) <key> Key to use to sign the package (default is
the value of default_system_dev_certificate from the input
target-files's META/misc_info.txt, or
"build/target/product/security/testkey" if that value is not
specified).
For incremental OTAs, the default value is based on the source
target-file, not the target build.
-i (--incremental_from) <file>
Generate an incremental OTA using the given target-files zip as
the starting build.
-w (--wipe_user_data)
Generate an OTA package that will wipe the user data partition
when installed.
-n (--no_prereq)
Omit the timestamp prereq check normally included at the top of
the build scripts (used for developer OTA packages which
legitimately need to go back and forth).
-e (--extra_script) <file>
Insert the contents of file at the end of the update script.
-a (--aslr_mode) <on|off>
Specify whether to turn on ASLR for the package (on by default).
-d (--device_type) <type>
Specify mmc or mtd type device. mtd by default
-f (--fota) <fota>
Specify if fota upgrade is used or not. not used by default
"""
import sys
if sys.hexversion < 0x02040000:
print >> sys.stderr, "Python 2.4 or newer is required."
sys.exit(1)
print >> sys.stdout ,sys.path[1]
import copy
import errno
import os
import re
import subprocess
import tempfile
import time
import zipfile
try:
from hashlib import sha1 as sha1
except ImportError:
from sha import sha as sha1
import common
import edify_generator
OPTIONS = common.OPTIONS
OPTIONS.package_key = None
OPTIONS.incremental_source = None
OPTIONS.require_verbatim = set()
OPTIONS.prohibit_verbatim = set(("system/build.prop",))
OPTIONS.patch_threshold = 0.95
OPTIONS.wipe_user_data = False
OPTIONS.omit_prereq = False
OPTIONS.extra_script = None
OPTIONS.aslr_mode = True
OPTIONS.worker_threads = 3
OPTIONS.device_type = 'MTD'
OPTIONS.fota = 0
def MostPopularKey(d, default):
"""Given a dict, return the key corresponding to the largest
value. Returns 'default' if the dict is empty."""
x = [(v, k) for (k, v) in d.iteritems()]
if not x: return default
x.sort()
return x[-1][1]
def IsSymlink(info):
"""Return true if the zipfile.ZipInfo object passed in represents a
symlink."""
return (info.external_attr >> 16) == 0120777
def IsRegular(info):
"""Return true if the zipfile.ZipInfo object passed in represents a
symlink."""
return (info.external_attr >> 28) == 010
class Item:
"""Items represent the metadata (user, group, mode) of files and
directories in the system image."""
ITEMS = {}
def __init__(self, name, dir=False):
self.name = name
self.uid = None
self.gid = None
self.mode = None
self.dir = dir
if name:
self.parent = Item.Get(os.path.dirname(name), dir=True)
self.parent.children.append(self)
else:
self.parent = None
if dir:
self.children = []
def Dump(self, indent=0):
if self.uid is not None:
print "%s%s %d %d %o" % (" "*indent, self.name, self.uid, self.gid, self.mode)
else:
print "%s%s %s %s %s" % (" "*indent, self.name, self.uid, self.gid, self.mode)
if self.dir:
print "%s%s" % (" "*indent, self.descendants)
print "%s%s" % (" "*indent, self.best_subtree)
for i in self.children:
i.Dump(indent=indent+1)
@classmethod
def Get(cls, name, dir=False):
if name not in cls.ITEMS:
cls.ITEMS[name] = Item(name, dir=dir)
return cls.ITEMS[name]
@classmethod
def GetMetadata(cls, input_zip):
try:
# See if the target_files contains a record of what the uid,
# gid, and mode is supposed to be.
output = input_zip.read("META/filesystem_config.txt")
except KeyError:
# Run the external 'fs_config' program to determine the desired
# uid, gid, and mode for every Item object. Note this uses the
# one in the client now, which might not be the same as the one
# used when this target_files was built.
p = common.Run(["fs_config"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
suffix = { False: "", True: "/" }
input = "".join(["%s%s\n" % (i.name, suffix[i.dir])
for i in cls.ITEMS.itervalues() if i.name])
output, error = p.communicate(input)
assert not error
for line in output.split("\n"):
if not line: continue
name, uid, gid, mode = line.split()
i = cls.ITEMS.get(name, None)
if i is not None:
i.uid = int(uid)
i.gid = int(gid)
i.mode = int(mode, 8)
if i.dir:
i.children.sort(key=lambda i: i.name)
# set metadata for the files generated by this script.
i = cls.ITEMS.get("system/recovery-from-boot.p", None)
if i: i.uid, i.gid, i.mode = 0, 0, 0644
i = cls.ITEMS.get("system/etc/install-recovery.sh", None)
if i: i.uid, i.gid, i.mode = 0, 0, 0544
def CountChildMetadata(self):
"""Count up the (uid, gid, mode) tuples for all children and
determine the best strategy for using set_perm_recursive and
set_perm to correctly chown/chmod all the files to their desired
values. Recursively calls itself for all descendants.
Returns a dict of {(uid, gid, dmode, fmode): count} counting up
all descendants of this node. (dmode or fmode may be None.) Also
sets the best_subtree of each directory Item to the (uid, gid,
dmode, fmode) tuple that will match the most descendants of that
Item.
"""
assert self.dir
d = self.descendants = {(self.uid, self.gid, self.mode, None): 1}
for i in self.children:
if i.dir:
for k, v in i.CountChildMetadata().iteritems():
d[k] = d.get(k, 0) + v
else:
k = (i.uid, i.gid, None, i.mode)
d[k] = d.get(k, 0) + 1
# Find the (uid, gid, dmode, fmode) tuple that matches the most
# descendants.
# First, find the (uid, gid) pair that matches the most
# descendants.
ug = {}
for (uid, gid, _, _), count in d.iteritems():
ug[(uid, gid)] = ug.get((uid, gid), 0) + count
ug = MostPopularKey(ug, (0, 0))
# Now find the dmode and fmode that match the most descendants
# with that (uid, gid), and choose those.
best_dmode = (0, 0755)
best_fmode = (0, 0644)
for k, count in d.iteritems():
if k[:2] != ug: continue
if k[2] is not None and count >= best_dmode[0]: best_dmode = (count, k[2])
if k[3] is not None and count >= best_fmode[0]: best_fmode = (count, k[3])
self.best_subtree = ug + (best_dmode[1], best_fmode[1])
return d
def SetPermissions(self, script):
"""Append set_perm/set_perm_recursive commands to 'script' to
set all permissions, users, and groups for the tree of files
rooted at 'self'."""
self.CountChildMetadata()
def recurse(item, current):
# current is the (uid, gid, dmode, fmode) tuple that the current
# item (and all its children) have already been set to. We only
# need to issue set_perm/set_perm_recursive commands if we're
# supposed to be something different.
# During recovery, usrdata partition is mounted at /data. For setting
# permissions for files in usrdata we need to make sure the path starts
# with /data
if item.name.startswith('userdata'):
path_on_device = item.name[4:]
else:
path_on_device = item.name
if item.dir:
if current != item.best_subtree:
script.SetPermissionsRecursive("/"+path_on_device, *item.best_subtree)
current = item.best_subtree
if item.uid != current[0] or item.gid != current[1] or \
item.mode != current[2]:
script.SetPermissions("/"+path_on_device, item.uid, item.gid, item.mode)
for i in item.children:
recurse(i, current)
else:
if item.uid != current[0] or item.gid != current[1] or \
item.mode != current[3]:
script.SetPermissions("/"+path_on_device, item.uid, item.gid, item.mode)
recurse(self, (-1, -1, -1, -1))
def CopySystemFiles(input_zip, output_zip=None,
substitute=None):
"""Copies files underneath system/ in the input zip to the output
zip. Populates the Item class with their metadata, and returns a
list of symlinks as well as a list of files that will be retouched.
output_zip may be None, in which case the copy is skipped (but the
other side effects still happen). substitute is an optional dict
of {output filename: contents} to be output instead of certain input
files.
"""
symlinks = []
retouch_files = []
for info in input_zip.infolist():
if info.filename.startswith("SYSTEM/"):
basefilename = info.filename[7:]
if IsSymlink(info):
symlinks.append((input_zip.read(info.filename),
"/system/" + basefilename))
else:
info2 = copy.copy(info)
fn = info2.filename = "system/" + basefilename
if substitute and fn in substitute and substitute[fn] is None:
continue
if output_zip is not None:
if substitute and fn in substitute:
data = substitute[fn]
else:
data = input_zip.read(info.filename)
if info.filename.startswith("SYSTEM/lib/") and IsRegular(info):
retouch_files.append(("/system/" + basefilename,
common.sha1(data).hexdigest()))
if fn.endswith("/"):
#zip does not play nice with empty folders. Create dummy file to make sure folder is saved in archive
info_dummy = copy.copy(info2)
info_dummy.filename = info_dummy.filename + "__emptyfile__"
output_zip.writestr(info_dummy,data)
output_zip.writestr(info2, data)
if fn.endswith("/"):
Item.Get(fn[:-1], dir=True)
else:
Item.Get(fn, dir=False)
symlinks.sort()
return (symlinks, retouch_files)
def CopyUserdataFiles(input_zip, output_zip=None,
substitute=None):
"""Copies files underneath DATA/ in the input zip to the output
zip. Populates the Item class with their metadata, and returns a
list of symlinks as well as a list of files that will be retouched.
output_zip may be None, in which case the copy is skipped (but the
other side effects still happen). substitute is an optional dict
of {output filename: contents} to be output instead of certain input
files.
"""
symlinks = []
retouch_files = []
for info in input_zip.infolist():
if info.filename.startswith("DATA/"):
basefilename = info.filename[5:]
if IsSymlink(info):
symlinks.append((input_zip.read(info.filename),
"/data/" + basefilename))
else:
info2 = copy.copy(info)
fn = info2.filename = "userdata/" + basefilename
if substitute and fn in substitute and substitute[fn] is None:
continue
if output_zip is not None:
if substitute and fn in substitute:
data = substitute[fn]
else:
data = input_zip.read(info.filename)
if info.filename.startswith("DATA/lib/") and IsRegular(info):
retouch_files.append(("/data/" + basefilename,
common.sha1(data).hexdigest()))
if fn.endswith("/"):
#zip does not play nice with empty folders. Create dummy file to make sure folder is saved in archive
info_dummy = copy.copy(info2)
info_dummy.filename = info_dummy.filename + "__emptyfile__"
output_zip.writestr(info_dummy,data)
output_zip.writestr(info2, data)
if fn.endswith("/"):
Item.Get(fn[:-1], dir=True)
else:
Item.Get(fn, dir=False)
symlinks.sort()
return (symlinks, retouch_files)
def SignOutput(temp_zip_name, output_zip_name):
key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
pw = key_passwords[OPTIONS.package_key]
common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
whole_file=True)
def AppendAssertions(script, input_zip):
print "Skip assertions"
def MakeRecoveryPatch(output_zip, recovery_img, boot_img):
"""Generate a binary patch that creates the recovery image starting
with the boot image. (Most of the space in these images is just the
kernel, which is identical for the two, so the resulting patch
should be efficient.) Add it to the output zip, along with a shell
script that is run from init.rc on first boot to actually do the
patching and install the new recovery image.
recovery_img and boot_img should be File objects for the
corresponding images. info should be the dictionary returned by
common.LoadInfoDict() on the input target_files.
Returns an Item for the shell script, which must be made
executable.
"""
d = common.Difference(recovery_img, boot_img)
_, _, patch = d.ComputePatch()
common.ZipWriteStr(output_zip, "recovery/recovery-from-boot.p", patch)
Item.Get("system/recovery-from-boot.p", dir=False)
boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict)
recovery_type, recovery_device = common.GetTypeAndDevice("/recovery", OPTIONS.info_dict)
sh = """#!/system/bin/sh
if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
log -t recovery "Installing new recovery image"
applypatch %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p
else
log -t recovery "Recovery image already installed"
fi
""" % { 'boot_size': boot_img.size,
'boot_sha1': boot_img.sha1,
'recovery_size': recovery_img.size,
'recovery_sha1': recovery_img.sha1,
'boot_type': boot_type,
'boot_device': boot_device,
'recovery_type': recovery_type,
'recovery_device': recovery_device,
}
common.ZipWriteStr(output_zip, "recovery/etc/install-recovery.sh", sh)
return Item.Get("system/etc/install-recovery.sh", dir=False)
def WriteFullOTAPackage(input_zip, output_zip, fota):
# TODO: how to determine this? We don't know what version it will
# be installed on top of. For now, we expect the API just won't
# change very often.
script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
metadata = {"post-build": GetBuildProp("ro.build.fingerprint", input_zip),
"pre-device": GetBuildProp("ro.product.device", input_zip),
"post-timestamp": GetBuildProp("ro.build.date.utc", input_zip),
}
device_specific = common.DeviceSpecificParams(
input_zip=input_zip,
input_version=OPTIONS.info_dict["recovery_api_version"],
output_zip=output_zip,
script=script,
input_tmp=OPTIONS.input_tmp,
metadata=metadata,
info_dict=OPTIONS.info_dict,
type=OPTIONS.device_type,
fota=OPTIONS.fota)
device_specific.FullOTA_Assertions()
script.ShowProgress(0.5, 0)
if OPTIONS.wipe_user_data:
script.FormatPartition("/data")
script.FormatPartition("/system")
script.Mount("/system")
#dont want to format userdata partition
#script.FormatPartition("/data")
script.Mount("/data")
script.UnpackPackageDir("recovery", "/system")
script.UnpackPackageDir("system", "/system")
script.UnpackPackageDir("userdata", "/data")
(symlinks, retouch_files) = CopySystemFiles(input_zip, output_zip)
script.MakeSymlinks(symlinks)
if OPTIONS.aslr_mode:
script.RetouchBinaries(retouch_files)
else:
script.UndoRetouchBinaries(retouch_files)
(userdata_symlinks, userdata_retouch_files) = CopyUserdataFiles(input_zip, output_zip)
script.RetouchBinaries(userdata_retouch_files)
script.MakeSymlinks(userdata_symlinks)
boot_img = common.GetBootableImage("boot.img", "boot.img",
OPTIONS.input_tmp, "BOOT")
recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
OPTIONS.input_tmp, "RECOVERY")
Item.GetMetadata(input_zip)
Item.Get("system").SetPermissions(script)
Item.Get("userdata").SetPermissions(script)
common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
#Write dsp diff/mbn images to zip file
dsp_image_names = ["dsp1.diff", "dsp2.diff", "dsp3.diff", "dsp1.mbn", "dsp2.mbn", "dsp3.mbn"]
for dsp_image_name in dsp_image_names:
dsp_image_path = os.path.join(OPTIONS.input_tmp, dsp_image_name)
if os.path.exists(dsp_image_path):
print dsp_image_name + " image found. Writing to zip"
dsp_file = common.File.FromLocalFile(dsp_image_name, dsp_image_path)
common.ZipWriteStr(output_zip, dsp_image_name, dsp_file.data)
#if os.path.exists
script.ShowProgress(0.2, 0)
script.ShowProgress(0.2, 10)
script.WriteRawImage("/boot", "boot.img")
script.ShowProgress(0.1, 0)
device_specific.FullOTA_InstallEnd()
if OPTIONS.extra_script is not None:
script.AppendExtra(OPTIONS.extra_script)
script.UnmountAll()
script.AddToZip(input_zip, output_zip, fota)
WriteMetadata(metadata, output_zip)
def WriteMetadata(metadata, output_zip):
common.ZipWriteStr(output_zip, "META-INF/com/android/metadata",
"".join(["%s=%s\n" % kv
for kv in sorted(metadata.iteritems())]))
def LoadSystemFiles(z):
"""Load all the files from SYSTEM/... in a given target-files
ZipFile, and return a dict of {filename: File object}."""
out = {}
retouch_files = []
for info in z.infolist():
if info.filename.startswith("SYSTEM/") and not IsSymlink(info):
basefilename = info.filename[7:]
fn = "system/" + basefilename
data = z.read(info.filename)
out[fn] = common.File(fn, data)
if info.filename.startswith("SYSTEM/lib/") and IsRegular(info):
retouch_files.append(("/system/" + basefilename,
out[fn].sha1))
return (out, retouch_files)
def GetBuildProp(property, z):
#We dont need the properties from here. Returning dummy value
return "None"
"""Return the fingerprint of the build of a given target-files
ZipFile object."""
bp = z.read("SYSTEM/build.prop")
if not property:
return bp
m = re.search(re.escape(property) + r"=(.*)\n", bp)
if not m:
raise common.ExternalError("couldn't find %s in build.prop" % (property,))
return m.group(1).strip()
def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip, fota):
source_version = OPTIONS.source_info_dict["recovery_api_version"]
target_version = OPTIONS.target_info_dict["recovery_api_version"]
if source_version == 0:
print ("WARNING: generating edify script for a source that "
"can't install it.")
script = edify_generator.EdifyGenerator(source_version, OPTIONS.target_info_dict)
metadata = {"pre-device": GetBuildProp("ro.product.device", source_zip),
"post-timestamp": GetBuildProp("ro.build.date.utc", target_zip),
}
device_specific = common.DeviceSpecificParams(
source_zip=source_zip,
source_version=source_version,
target_zip=target_zip,
target_version=target_version,
output_zip=output_zip,
script=script,
metadata=metadata,
info_dict=OPTIONS.info_dict)
print "Loading target..."
(target_data, target_retouch_files) = LoadSystemFiles(target_zip)
print "Loading source..."
(source_data, source_retouch_files) = LoadSystemFiles(source_zip)
verbatim_targets = []
patch_list = []
diffs = []
largest_source_size = 0
for fn in sorted(target_data.keys()):
tf = target_data[fn]
assert fn == tf.name
sf = source_data.get(fn, None)
if sf is None or fn in OPTIONS.require_verbatim:
# This file should be included verbatim
if fn in OPTIONS.prohibit_verbatim:
raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
print "send", fn, "verbatim"
tf.AddToZip(output_zip)
verbatim_targets.append((fn, tf.size))
elif tf.sha1 != sf.sha1:
# File is different; consider sending as a patch
diffs.append(common.Difference(tf, sf))
else:
# Target file identical to source.
pass
common.ComputeDifferences(diffs)
for diff in diffs:
tf, sf, d = diff.GetPatch()
if d is None or len(d) > tf.size * OPTIONS.patch_threshold:
# patch is almost as big as the file; don't bother patching
tf.AddToZip(output_zip)
verbatim_targets.append((tf.name, tf.size))
else:
common.ZipWriteStr(output_zip, "patch/" + tf.name + ".p", d)
patch_list.append((tf.name, tf, sf, tf.size, common.sha1(d).hexdigest()))
largest_source_size = max(largest_source_size, sf.size)
source_fp = GetBuildProp("ro.build.fingerprint", source_zip)
target_fp = GetBuildProp("ro.build.fingerprint", target_zip)
metadata["pre-build"] = source_fp
metadata["post-build"] = target_fp
script.Mount("/system")
script.AssertSomeFingerprint(source_fp, target_fp)
source_boot = common.GetBootableImage(
"/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT")
target_boot = common.GetBootableImage(
"/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
updating_boot = (source_boot.data != target_boot.data)
source_recovery = common.GetBootableImage(
"/tmp/recovery.img", "recovery.img", OPTIONS.source_tmp, "RECOVERY")
target_recovery = common.GetBootableImage(
"/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
updating_recovery = (source_recovery.data != target_recovery.data)
# Here's how we divide up the progress bar:
# 0.1 for verifying the start state (PatchCheck calls)
# 0.8 for applying patches (ApplyPatch calls)
# 0.1 for unpacking verbatim files, symlinking, and doing the
# device-specific commands.
AppendAssertions(script, target_zip)
device_specific.IncrementalOTA_Assertions()
script.Print("Verifying current system...")
script.ShowProgress(0.1, 0)
total_verify_size = float(sum([i[2].size for i in patch_list]) + 1)
if updating_boot:
total_verify_size += source_boot.size
so_far = 0
for fn, tf, sf, size, patch_sha in patch_list:
script.PatchCheck("/"+fn, tf.sha1, sf.sha1)
so_far += sf.size
script.SetProgress(so_far / total_verify_size)
if updating_boot:
d = common.Difference(target_boot, source_boot)
_, _, d = d.ComputePatch()
print "boot target: %d source: %d diff: %d" % (
target_boot.size, source_boot.size, len(d))
common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict)
script.PatchCheck("%s:%s:%d:%s:%d:%s" %
(boot_type, boot_device,
source_boot.size, source_boot.sha1,
target_boot.size, target_boot.sha1))
so_far += source_boot.size
script.SetProgress(so_far / total_verify_size)
if patch_list or updating_recovery or updating_boot:
script.CacheFreeSpaceCheck(largest_source_size)
device_specific.IncrementalOTA_VerifyEnd()
script.Comment("---- start making changes here ----")
if OPTIONS.wipe_user_data:
script.Print("Erasing user data...")
script.FormatPartition("/data")
script.Print("Removing unneeded files...")
script.DeleteFiles(["/"+i[0] for i in verbatim_targets] +
["/"+i for i in sorted(source_data)
if i not in target_data] +
["/system/recovery.img"])
script.ShowProgress(0.8, 0)
total_patch_size = float(sum([i[1].size for i in patch_list]) + 1)
if updating_boot:
total_patch_size += target_boot.size
so_far = 0
script.Print("Patching system files...")
deferred_patch_list = []
for item in patch_list:
fn, tf, sf, size, _ = item
if tf.name == "system/build.prop":
deferred_patch_list.append(item)
continue
script.ApplyPatch("/"+fn, "-", tf.size, tf.sha1, sf.sha1, "patch/"+fn+".p")
so_far += tf.size
script.SetProgress(so_far / total_patch_size)
if updating_boot:
# Produce the boot image by applying a patch to the current
# contents of the boot partition, and write it back to the
# partition.
script.Print("Patching boot image...")
script.ApplyPatch("%s:%s:%d:%s:%d:%s"
% (boot_type, boot_device,
source_boot.size, source_boot.sha1,
target_boot.size, target_boot.sha1),
"-",
target_boot.size, target_boot.sha1,
source_boot.sha1, "patch/boot.img.p")
so_far += target_boot.size
script.SetProgress(so_far / total_patch_size)
print "boot image changed; including."
else:
print "boot image unchanged; skipping."
if updating_recovery:
# Is it better to generate recovery as a patch from the current
# boot image, or from the previous recovery image? For large
# updates with significant kernel changes, probably the former.
# For small updates where the kernel hasn't changed, almost
# certainly the latter. We pick the first option. Future
# complicated schemes may let us effectively use both.
#
# A wacky possibility: as long as there is room in the boot
# partition, include the binaries and image files from recovery in
# the boot image (though not in the ramdisk) so they can be used
# as fodder for constructing the recovery image.
MakeRecoveryPatch(output_zip, target_recovery, target_boot)
script.DeleteFiles(["/system/recovery-from-boot.p",
"/system/etc/install-recovery.sh"])
print "recovery image changed; including as patch from boot."
else:
print "recovery image unchanged; skipping."
script.ShowProgress(0.1, 10)
(target_symlinks, target_retouch_dummies) = CopySystemFiles(target_zip, None)
target_symlinks_d = dict([(i[1], i[0]) for i in target_symlinks])
temp_script = script.MakeTemporary()
Item.GetMetadata(target_zip)
Item.Get("system").SetPermissions(temp_script)
# Note that this call will mess up the tree of Items, so make sure
# we're done with it.
(source_symlinks, source_retouch_dummies) = CopySystemFiles(source_zip, None)
source_symlinks_d = dict([(i[1], i[0]) for i in source_symlinks])
# Delete all the symlinks in source that aren't in target. This
# needs to happen before verbatim files are unpacked, in case a
# symlink in the source is replaced by a real file in the target.
to_delete = []
for dest, link in source_symlinks:
if link not in target_symlinks_d:
to_delete.append(link)
script.DeleteFiles(to_delete)
if verbatim_targets:
script.Print("Unpacking new files...")
script.UnpackPackageDir("system", "/system")
if updating_recovery:
script.Print("Unpacking new recovery...")
script.UnpackPackageDir("recovery", "/system")
script.Print("Symlinks and permissions...")
# Create all the symlinks that don't already exist, or point to
# somewhere different than what we want. Delete each symlink before
# creating it, since the 'symlink' command won't overwrite.
to_create = []
for dest, link in target_symlinks:
if link in source_symlinks_d:
if dest != source_symlinks_d[link]:
to_create.append((dest, link))
else:
to_create.append((dest, link))
script.DeleteFiles([i[1] for i in to_create])
script.MakeSymlinks(to_create)
if OPTIONS.aslr_mode:
script.RetouchBinaries(target_retouch_files)
else:
script.UndoRetouchBinaries(target_retouch_files)
# Now that the symlinks are created, we can set all the
# permissions.
script.AppendScript(temp_script)
# Do device-specific installation (eg, write radio image).
device_specific.IncrementalOTA_InstallEnd()
if OPTIONS.extra_script is not None:
script.AppendExtra(OPTIONS.extra_script)
# Patch the build.prop file last, so if something fails but the
# device can still come up, it appears to be the old build and will
# get set the OTA package again to retry.
script.Print("Patching remaining system files...")
for item in deferred_patch_list:
fn, tf, sf, size, _ = item
script.ApplyPatch("/"+fn, "-", tf.size, tf.sha1, sf.sha1, "patch/"+fn+".p")
script.SetPermissions("/system/build.prop", 0, 0, 0644)
script.AddToZip(target_zip, output_zip, fota)
WriteMetadata(metadata, output_zip)
def main(argv):
def option_handler(o, a):
if o in ("-b", "--board_config"):
pass # deprecated
elif o in ("-k", "--package_key"):
OPTIONS.package_key = a
elif o in ("-i", "--incremental_from"):
OPTIONS.incremental_source = a
elif o in ("-w", "--wipe_user_data"):
OPTIONS.wipe_user_data = True
elif o in ("-n", "--no_prereq"):
OPTIONS.omit_prereq = True
elif o in ("-e", "--extra_script"):
OPTIONS.extra_script = a
elif o in ("-a", "--aslr_mode"):
if a in ("on", "On", "true", "True", "yes", "Yes"):
OPTIONS.aslr_mode = True
else:
OPTIONS.aslr_mode = False
elif o in ("--worker_threads"):
OPTIONS.worker_threads = int(a)
elif o in ("-d", "--device_type"):
OPTIONS.device_type = a
elif o in ("-f", "--fota"):
OPTIONS.fota = a
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="b:k:i:d:wne:a:d:f:",
extra_long_opts=["board_config=",
"package_key=",
"incremental_from=",
"wipe_user_data",
"no_prereq",
"extra_script=",
"worker_threads=",
"aslr_mode=",
"device_type=",
"fota="],
extra_option_handler=option_handler)
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
if OPTIONS.extra_script is not None:
OPTIONS.extra_script = open(OPTIONS.extra_script).read()
print "unzipping target target-files..."
OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
OPTIONS.target_tmp = OPTIONS.input_tmp
OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.device_type)
if OPTIONS.verbose:
print "--- target info ---"
common.DumpInfoDict(OPTIONS.info_dict)
print "using device-specific extensions in", OPTIONS.device_specific
temp_zip_file = open("update.zip","w",0)
output_zip = zipfile.ZipFile(temp_zip_file, "w",
compression=zipfile.ZIP_DEFLATED)
if OPTIONS.incremental_source is None:
WriteFullOTAPackage(input_zip, output_zip, OPTIONS.fota)
if OPTIONS.package_key is None:
OPTIONS.package_key = OPTIONS.info_dict.get(
"default_system_dev_certificate",
"build/target/product/security/testkey")
else:
print "unzipping source target-files..."
OPTIONS.source_tmp, source_zip = common.UnzipTemp(OPTIONS.incremental_source)
OPTIONS.target_info_dict = OPTIONS.info_dict
OPTIONS.source_info_dict = common.LoadInfoDict(source_zip, OPTIONS.device_type)
if OPTIONS.package_key is None:
OPTIONS.package_key = OPTIONS.source_info_dict.get(
"default_system_dev_certificate",
"build/target/product/security/testkey")
if OPTIONS.verbose:
print "--- source info ---"
common.DumpInfoDict(OPTIONS.source_info_dict)
WriteIncrementalOTAPackage(input_zip, source_zip, output_zip, OPTIONS.fota)
output_zip.close()
temp_zip_file.close()
common.Cleanup()
print "done."
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError, e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)