2017-11-09 14:12:03 -08:00
|
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
# encoding: utf-8
|
|
|
|
|
|
#
|
|
|
|
|
|
# Copyright 2017 Greg Neagle.
|
|
|
|
|
|
#
|
|
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
|
|
#
|
|
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
#
|
|
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
#
|
|
|
|
|
|
# Thanks to Tim Sutton for ideas, suggestions, and sample code.
|
|
|
|
|
|
#
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""installinstallmacos.py
|
2017-11-09 14:12:03 -08:00
|
|
|
|
A tool to download the parts for an Install macOS app from Apple's
|
|
|
|
|
|
softwareupdate servers and install a functioning Install macOS app onto an
|
2020-06-23 14:09:31 +02:00
|
|
|
|
empty disk image"""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
2019-05-02 20:26:00 -07:00
|
|
|
|
# Python 3 compatibility shims
|
2020-06-23 14:09:31 +02:00
|
|
|
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
import argparse
|
2018-08-07 13:50:38 -07:00
|
|
|
|
import gzip
|
2017-11-09 14:12:03 -08:00
|
|
|
|
import os
|
|
|
|
|
|
import plistlib
|
2020-06-23 14:58:45 +02:00
|
|
|
|
import shutil
|
2017-11-09 14:12:03 -08:00
|
|
|
|
import subprocess
|
|
|
|
|
|
import sys
|
2018-08-14 15:26:17 -07:00
|
|
|
|
import xattr
|
2020-06-23 14:09:31 +02:00
|
|
|
|
|
2019-05-02 20:26:00 -07:00
|
|
|
|
try:
|
|
|
|
|
|
# python 2
|
|
|
|
|
|
from urllib.parse import urlsplit
|
|
|
|
|
|
except ImportError:
|
|
|
|
|
|
# python 3
|
|
|
|
|
|
from urlparse import urlsplit
|
2017-11-09 14:12:03 -08:00
|
|
|
|
from xml.dom import minidom
|
|
|
|
|
|
from xml.parsers.expat import ExpatError
|
2018-09-23 14:17:45 +02:00
|
|
|
|
from distutils.version import LooseVersion
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
|
2018-10-02 18:58:38 +02:00
|
|
|
|
DEFAULT_SUCATALOGS = {
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"17": "https://swscan.apple.com/content/catalogs/others/"
|
|
|
|
|
|
"index-10.13-10.12-10.11-10.10-10.9"
|
|
|
|
|
|
"-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog",
|
|
|
|
|
|
"18": "https://swscan.apple.com/content/catalogs/others/"
|
|
|
|
|
|
"index-10.14-10.13-10.12-10.11-10.10-10.9"
|
|
|
|
|
|
"-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog",
|
|
|
|
|
|
"19": "https://swscan.apple.com/content/catalogs/others/"
|
|
|
|
|
|
"index-10.15-10.14-10.13-10.12-10.11-10.10-10.9"
|
|
|
|
|
|
"-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog",
|
2018-10-02 18:58:38 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
2018-08-14 15:26:17 -07:00
|
|
|
|
SEED_CATALOGS_PLIST = (
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"/System/Library/PrivateFrameworks/Seeding.framework/Versions/Current/"
|
|
|
|
|
|
"Resources/SeedCatalogs.plist"
|
2018-08-14 15:26:17 -07:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 00:45:54 +02:00
|
|
|
|
def get_board_id():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Gets the local system board ID"""
|
|
|
|
|
|
ioreg_cmd = ["/usr/sbin/ioreg", "-p", "IODeviceTree", "-r", "-n", "/", "-d", "1"]
|
2018-09-23 00:45:54 +02:00
|
|
|
|
try:
|
|
|
|
|
|
ioreg_output = subprocess.check_output(ioreg_cmd).splitlines()
|
|
|
|
|
|
for line in ioreg_output:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
line_decoded = line.decode("utf8")
|
|
|
|
|
|
if "board-id" in line_decoded:
|
2020-05-06 17:15:41 +02:00
|
|
|
|
board_id = line_decoded.split("<")[-1]
|
2020-06-23 14:09:31 +02:00
|
|
|
|
board_id = board_id[board_id.find('<"') + 2 : board_id.find('">')]
|
2018-09-23 00:45:54 +02:00
|
|
|
|
return board_id
|
2020-05-06 17:15:41 +02:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
2018-09-23 00:45:54 +02:00
|
|
|
|
raise ReplicationError(err)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-10-30 19:07:57 +01:00
|
|
|
|
def is_a_vm():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Determines if the script is being run in a virtual machine"""
|
2020-08-18 15:00:53 +02:00
|
|
|
|
sysctl_cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.features"]
|
2018-10-30 19:07:57 +01:00
|
|
|
|
try:
|
|
|
|
|
|
sysctl_output = subprocess.check_output(sysctl_cmd)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
cpu_features = sysctl_output.decode("utf8").split(" ")
|
2018-10-30 19:07:57 +01:00
|
|
|
|
is_vm = False
|
|
|
|
|
|
for i in range(len(cpu_features)):
|
|
|
|
|
|
if cpu_features[i] == "VMM":
|
|
|
|
|
|
is_vm = True
|
2020-05-06 17:15:41 +02:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
2018-10-30 19:07:57 +01:00
|
|
|
|
raise ReplicationError(err)
|
|
|
|
|
|
return is_vm
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 00:45:54 +02:00
|
|
|
|
def get_hw_model():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Gets the local system ModelIdentifier"""
|
2020-08-18 15:00:53 +02:00
|
|
|
|
sysctl_cmd = ["/usr/sbin/sysctl", "-n", "hw.model"]
|
2018-09-23 00:45:54 +02:00
|
|
|
|
try:
|
|
|
|
|
|
sysctl_output = subprocess.check_output(sysctl_cmd)
|
2020-08-18 15:00:53 +02:00
|
|
|
|
hw_model = sysctl_output.decode("utf8")
|
2020-05-06 17:15:41 +02:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
2018-09-23 00:45:54 +02:00
|
|
|
|
raise ReplicationError(err)
|
|
|
|
|
|
return hw_model
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-06 14:04:45 +02:00
|
|
|
|
def get_bridge_id():
|
2020-08-18 15:00:53 +02:00
|
|
|
|
"""Gets the local system DeviceID for T2 Macs - note only works on 10.13+"""
|
|
|
|
|
|
if os.path.exists("/usr/libexec/remotectl"):
|
|
|
|
|
|
remotectl_cmd = [
|
|
|
|
|
|
"/usr/libexec/remotectl",
|
|
|
|
|
|
"get-property",
|
|
|
|
|
|
"localbridge",
|
|
|
|
|
|
"HWModel",
|
|
|
|
|
|
]
|
|
|
|
|
|
try:
|
|
|
|
|
|
remotectl_output = subprocess.check_output(
|
|
|
|
|
|
remotectl_cmd, stderr=subprocess.STDOUT
|
|
|
|
|
|
)
|
|
|
|
|
|
bridge_id = remotectl_output.decode("utf8").split(" ")[-1].split("\n")[0]
|
2020-09-18 21:51:29 +01:00
|
|
|
|
except subprocess.CalledProcessError:
|
2020-08-18 15:00:53 +02:00
|
|
|
|
return None
|
|
|
|
|
|
return bridge_id
|
2020-08-06 14:04:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 17:22:01 +02:00
|
|
|
|
def get_current_build_info():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Gets the local system build"""
|
2018-09-23 17:22:01 +02:00
|
|
|
|
build_info = []
|
2020-06-23 14:09:31 +02:00
|
|
|
|
sw_vers_cmd = ["/usr/bin/sw_vers"]
|
2018-09-23 14:17:45 +02:00
|
|
|
|
try:
|
|
|
|
|
|
sw_vers_output = subprocess.check_output(sw_vers_cmd).splitlines()
|
|
|
|
|
|
for line in sw_vers_output:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
line_decoded = line.decode("utf8")
|
|
|
|
|
|
if "ProductVersion" in line_decoded:
|
2020-05-06 17:15:41 +02:00
|
|
|
|
build_info.insert(0, line_decoded.split("\t")[-1])
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if "BuildVersion" in line_decoded:
|
2020-05-06 17:15:41 +02:00
|
|
|
|
build_info.insert(1, line_decoded.split("\t")[-1])
|
|
|
|
|
|
except subprocess.CalledProcessError as err:
|
2018-09-23 14:17:45 +02:00
|
|
|
|
raise ReplicationError(err)
|
2018-09-23 17:22:01 +02:00
|
|
|
|
return build_info
|
2018-08-14 15:26:17 -07:00
|
|
|
|
|
2018-08-07 13:50:38 -07:00
|
|
|
|
|
2019-05-02 20:26:00 -07:00
|
|
|
|
def get_input(prompt=None):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Python 2 and 3 wrapper for raw_input/input"""
|
2019-05-02 20:26:00 -07:00
|
|
|
|
try:
|
|
|
|
|
|
return raw_input(prompt)
|
|
|
|
|
|
except NameError:
|
|
|
|
|
|
# raw_input doesn't exist in Python 3
|
|
|
|
|
|
return input(prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-07-03 10:02:31 -07:00
|
|
|
|
def read_plist(filepath):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Wrapper for the differences between Python 2 and Python 3's plistlib"""
|
2019-05-12 12:11:20 -07:00
|
|
|
|
try:
|
|
|
|
|
|
with open(filepath, "rb") as fileobj:
|
|
|
|
|
|
return plistlib.load(fileobj)
|
|
|
|
|
|
except AttributeError:
|
|
|
|
|
|
# plistlib module doesn't have a load function (as in Python 2)
|
|
|
|
|
|
return plistlib.readPlist(filepath)
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-07-03 10:02:31 -07:00
|
|
|
|
def read_plist_from_string(bytestring):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Wrapper for the differences between Python 2 and Python 3's plistlib"""
|
2019-05-12 12:11:20 -07:00
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
return plistlib.loads(bytestring)
|
2019-05-12 12:11:20 -07:00
|
|
|
|
except AttributeError:
|
|
|
|
|
|
# plistlib module doesn't have a load function (as in Python 2)
|
2020-07-31 13:45:05 +02:00
|
|
|
|
return plistlib.readPlistFromString(bytestring) # pylint: disable=no-member
|
2019-05-12 12:11:20 -07:00
|
|
|
|
|
|
|
|
|
|
|
2020-05-06 17:15:41 +02:00
|
|
|
|
def write_plist(plist_object, filepath):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Wrapper for the differences between Python 2 and Python 3's plistlib"""
|
2020-05-06 17:15:41 +02:00
|
|
|
|
try:
|
|
|
|
|
|
with open(filepath, "wb") as fileobj:
|
|
|
|
|
|
return plistlib.dump(plist_object, fileobj)
|
|
|
|
|
|
except AttributeError:
|
|
|
|
|
|
# plistlib module doesn't have a load function (as in Python 2)
|
|
|
|
|
|
return plistlib.writePlist(plist_object, filepath)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 16:02:28 +02:00
|
|
|
|
def get_seeding_program(sucatalog_url):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Returns a seeding program name based on the sucatalog_url"""
|
2018-09-23 16:02:28 +02:00
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
seed_catalogs = read_plist(SEED_CATALOGS_PLIST)
|
2018-09-23 16:02:28 +02:00
|
|
|
|
for key, value in seed_catalogs.items():
|
|
|
|
|
|
if sucatalog_url == value:
|
|
|
|
|
|
return key
|
2020-06-23 14:09:31 +02:00
|
|
|
|
return ""
|
2020-05-28 07:56:51 -07:00
|
|
|
|
except (OSError, IOError, ExpatError, AttributeError, KeyError) as err:
|
|
|
|
|
|
print(err, file=sys.stderr)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
return ""
|
2018-09-23 16:02:28 +02:00
|
|
|
|
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
def get_seed_catalog(seedname="DeveloperSeed"):
|
|
|
|
|
|
"""Returns the developer seed sucatalog"""
|
2018-08-07 13:50:38 -07:00
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
seed_catalogs = read_plist(SEED_CATALOGS_PLIST)
|
2018-10-02 18:58:38 +02:00
|
|
|
|
return seed_catalogs.get(seedname)
|
2020-05-28 07:56:51 -07:00
|
|
|
|
except (OSError, IOError, ExpatError, AttributeError, KeyError) as err:
|
|
|
|
|
|
print(err, file=sys.stderr)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
return ""
|
2018-10-02 18:58:38 +02:00
|
|
|
|
|
|
|
|
|
|
|
2019-02-19 11:34:51 -08:00
|
|
|
|
def get_seeding_programs():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Returns the list of seeding program names"""
|
2019-02-19 11:34:51 -08:00
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
seed_catalogs = read_plist(SEED_CATALOGS_PLIST)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
return list(seed_catalogs.keys())
|
2020-05-28 07:56:51 -07:00
|
|
|
|
except (OSError, IOError, ExpatError, AttributeError, KeyError) as err:
|
|
|
|
|
|
print(err, file=sys.stderr)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
return ""
|
2019-02-19 11:34:51 -08:00
|
|
|
|
|
|
|
|
|
|
|
2018-10-02 18:58:38 +02:00
|
|
|
|
def get_default_catalog():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Returns the default softwareupdate catalog for the current OS"""
|
|
|
|
|
|
darwin_major = os.uname()[2].split(".")[0]
|
2018-10-02 18:58:38 +02:00
|
|
|
|
return DEFAULT_SUCATALOGS.get(darwin_major)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def make_sparse_image(volume_name, output_path):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Make a sparse disk image we can install a product to"""
|
|
|
|
|
|
cmd = [
|
|
|
|
|
|
"/usr/bin/hdiutil",
|
|
|
|
|
|
"create",
|
|
|
|
|
|
"-size",
|
|
|
|
|
|
"16g",
|
|
|
|
|
|
"-fs",
|
|
|
|
|
|
"HFS+",
|
|
|
|
|
|
"-volname",
|
|
|
|
|
|
volume_name,
|
|
|
|
|
|
"-type",
|
|
|
|
|
|
"SPARSE",
|
|
|
|
|
|
"-plist",
|
|
|
|
|
|
output_path,
|
|
|
|
|
|
]
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
output = subprocess.check_output(cmd)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
|
|
|
|
|
print(err, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
return read_plist_from_string(output)[0]
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except IndexError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Unexpected output from hdiutil: %s" % output, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ExpatError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Malformed output from hdiutil: %s" % output, file=sys.stderr)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
print(err, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 15:38:48 +02:00
|
|
|
|
def make_compressed_dmg(app_path, diskimagepath, volume_name):
|
2017-11-09 14:12:03 -08:00
|
|
|
|
"""Returns path to newly-created compressed r/o disk image containing
|
|
|
|
|
|
Install macOS.app"""
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"Making read-only compressed disk image containing %s..."
|
|
|
|
|
|
% os.path.basename(app_path)
|
|
|
|
|
|
)
|
|
|
|
|
|
cmd = [
|
|
|
|
|
|
"/usr/bin/hdiutil",
|
|
|
|
|
|
"create",
|
|
|
|
|
|
"-fs",
|
|
|
|
|
|
"HFS+",
|
|
|
|
|
|
"-srcfolder",
|
|
|
|
|
|
app_path,
|
|
|
|
|
|
diskimagepath,
|
|
|
|
|
|
]
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
subprocess.check_call(cmd)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
|
|
|
|
|
print(err, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Disk image created at: %s" % diskimagepath)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def mountdmg(dmgpath):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Attempts to mount the dmg at dmgpath and returns first mountpoint
|
|
|
|
|
|
"""
|
|
|
|
|
|
mountpoints = []
|
|
|
|
|
|
dmgname = os.path.basename(dmgpath)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
cmd = [
|
|
|
|
|
|
"/usr/bin/hdiutil",
|
|
|
|
|
|
"attach",
|
|
|
|
|
|
dmgpath,
|
|
|
|
|
|
"-mountRandom",
|
|
|
|
|
|
"/tmp",
|
|
|
|
|
|
"-nobrowse",
|
|
|
|
|
|
"-plist",
|
|
|
|
|
|
"-owners",
|
|
|
|
|
|
"on",
|
|
|
|
|
|
]
|
|
|
|
|
|
proc = subprocess.Popen(
|
|
|
|
|
|
cmd, bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
(pliststr, err) = proc.communicate()
|
|
|
|
|
|
if proc.returncode:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print('Error: "%s" while mounting %s.' % (err, dmgname), file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return None
|
|
|
|
|
|
if pliststr:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
plist = read_plist_from_string(pliststr)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
for entity in plist["system-entities"]:
|
|
|
|
|
|
if "mount-point" in entity:
|
|
|
|
|
|
mountpoints.append(entity["mount-point"])
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
return mountpoints[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def unmountdmg(mountpoint):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Unmounts the dmg at mountpoint
|
|
|
|
|
|
"""
|
2020-06-23 14:09:31 +02:00
|
|
|
|
proc = subprocess.Popen(
|
|
|
|
|
|
["/usr/bin/hdiutil", "detach", mountpoint],
|
|
|
|
|
|
bufsize=-1,
|
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
(dummy_output, err) = proc.communicate()
|
|
|
|
|
|
if proc.returncode:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Polite unmount failed: %s" % err, file=sys.stderr)
|
|
|
|
|
|
print("Attempting to force unmount %s" % mountpoint, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# try forcing the unmount
|
2020-06-23 14:09:31 +02:00
|
|
|
|
retcode = subprocess.call(["/usr/bin/hdiutil", "detach", mountpoint, "-force"])
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if retcode:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Failed to unmount %s" % mountpoint, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def install_product(dist_path, target_vol):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Install a product to a target volume.
|
|
|
|
|
|
Returns a boolean to indicate success or failure."""
|
2020-01-31 11:19:53 +00:00
|
|
|
|
# set CM_BUILD env var to make Installer bypass eligibilty checks
|
|
|
|
|
|
# when installing packages (for machine-specific OS builds)
|
|
|
|
|
|
os.environ["CM_BUILD"] = "CM_BUILD"
|
2020-06-23 14:09:31 +02:00
|
|
|
|
cmd = ["/usr/sbin/installer", "-pkg", dist_path, "-target", target_vol]
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
subprocess.check_call(cmd)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
|
|
|
|
|
print(err, file=sys.stderr)
|
2018-04-02 15:10:52 -07:00
|
|
|
|
return False
|
2019-08-06 14:06:46 +02:00
|
|
|
|
else:
|
|
|
|
|
|
# Apple postinstall script bug ends up copying files to a path like
|
|
|
|
|
|
# /tmp/dmg.T9ak1HApplications
|
2020-06-23 14:09:31 +02:00
|
|
|
|
path = target_vol + "Applications"
|
2019-08-06 14:06:46 +02:00
|
|
|
|
if os.path.exists(path):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("*********************************************************")
|
|
|
|
|
|
print("*** Working around a very dumb Apple bug in a package ***")
|
|
|
|
|
|
print("*** postinstall script that fails to correctly target ***")
|
|
|
|
|
|
print("*** the Install macOS.app when installed to a volume ***")
|
|
|
|
|
|
print("*** other than the current boot volume. ***")
|
|
|
|
|
|
print("*** Please file feedback with Apple! ***")
|
|
|
|
|
|
print("*********************************************************")
|
2019-08-06 14:06:46 +02:00
|
|
|
|
subprocess.check_call(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
["/usr/bin/ditto", path, os.path.join(target_vol, "Applications")]
|
2019-08-06 14:06:46 +02:00
|
|
|
|
)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
subprocess.check_call(["/bin/rm", "-r", path])
|
2019-08-06 14:06:46 +02:00
|
|
|
|
return True
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
class ReplicationError(Exception):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""A custom error when replication fails"""
|
|
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
def replicate_url(
|
|
|
|
|
|
full_url,
|
|
|
|
|
|
root_dir="/tmp",
|
|
|
|
|
|
show_progress=False,
|
|
|
|
|
|
ignore_cache=False,
|
|
|
|
|
|
attempt_resume=False,
|
|
|
|
|
|
):
|
|
|
|
|
|
"""Downloads a URL and stores it in the same relative path on our
|
|
|
|
|
|
filesystem. Returns a path to the replicated file."""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
2019-05-02 20:26:00 -07:00
|
|
|
|
path = urlsplit(full_url)[2]
|
2020-06-23 14:09:31 +02:00
|
|
|
|
relative_url = path.lstrip("/")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
relative_url = os.path.normpath(relative_url)
|
|
|
|
|
|
local_file_path = os.path.join(root_dir, relative_url)
|
|
|
|
|
|
if show_progress:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
options = "-fL"
|
2017-11-09 14:12:03 -08:00
|
|
|
|
else:
|
|
|
|
|
|
options = '-sfL'
|
2020-09-23 16:52:24 -07:00
|
|
|
|
curl_cmd = ['/usr/bin/curl', options,
|
|
|
|
|
|
'--create-dirs',
|
2017-11-09 14:12:03 -08:00
|
|
|
|
'-o', local_file_path]
|
2020-09-25 14:30:03 -07:00
|
|
|
|
if not full_url.endswith(".gz"):
|
|
|
|
|
|
# stupid hack for stupid Apple behavior where it sometimes returns
|
|
|
|
|
|
# compressed files even when not asked for
|
|
|
|
|
|
curl_cmd.append('--compressed')
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if not ignore_cache and os.path.exists(local_file_path):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
curl_cmd.extend(["-z", local_file_path])
|
2018-10-02 18:58:38 +02:00
|
|
|
|
if attempt_resume:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
curl_cmd.extend(["-C", "-"])
|
2017-11-09 14:12:03 -08:00
|
|
|
|
curl_cmd.append(full_url)
|
2019-07-04 10:49:28 +02:00
|
|
|
|
# print("Downloading %s..." % full_url)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
subprocess.check_call(curl_cmd)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except subprocess.CalledProcessError as err:
|
2017-11-09 14:12:03 -08:00
|
|
|
|
raise ReplicationError(err)
|
|
|
|
|
|
return local_file_path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_server_metadata(filename):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Parses a softwareupdate server metadata file, looking for information
|
2017-11-09 14:12:03 -08:00
|
|
|
|
of interest.
|
2020-06-23 14:09:31 +02:00
|
|
|
|
Returns a dictionary containing title, version, and description."""
|
|
|
|
|
|
title = ""
|
|
|
|
|
|
vers = ""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
md_plist = read_plist(filename)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except (OSError, IOError, ExpatError) as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Error reading %s: %s" % (filename, err), file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return {}
|
2020-06-23 14:09:31 +02:00
|
|
|
|
vers = md_plist.get("CFBundleShortVersionString", "")
|
|
|
|
|
|
localization = md_plist.get("localization", {})
|
|
|
|
|
|
preferred_localization = localization.get("English") or localization.get("en")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if preferred_localization:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
title = preferred_localization.get("title", "")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
metadata = {}
|
2020-06-23 14:09:31 +02:00
|
|
|
|
metadata["title"] = title
|
|
|
|
|
|
metadata["version"] = vers
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return metadata
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_server_metadata(catalog, product_key, workdir, ignore_cache=False):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Replicate ServerMetaData"""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
url = catalog["Products"][product_key]["ServerMetadataURL"]
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
smd_path = replicate_url(url, root_dir=workdir, ignore_cache=ignore_cache)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return smd_path
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ReplicationError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Could not replicate %s: %s" % (url, err), file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return None
|
|
|
|
|
|
except KeyError:
|
2020-08-06 14:04:45 +02:00
|
|
|
|
# print("No metadata for %s.\n" % product_key, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_dist(filename):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Parses a softwareupdate dist file, returning a dict of info of
|
|
|
|
|
|
interest"""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
dist_info = {}
|
|
|
|
|
|
try:
|
|
|
|
|
|
dom = minidom.parse(filename)
|
|
|
|
|
|
except ExpatError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Invalid XML in %s" % filename, file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return dist_info
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except IOError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Error reading %s: %s" % (filename, err), file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return dist_info
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
titles = dom.getElementsByTagName("title")
|
2020-06-22 16:25:01 -07:00
|
|
|
|
if titles:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
dist_info["title_from_dist"] = titles[0].firstChild.wholeText
|
2020-06-22 16:25:01 -07:00
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
auxinfos = dom.getElementsByTagName("auxinfo")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if not auxinfos:
|
|
|
|
|
|
return dist_info
|
|
|
|
|
|
auxinfo = auxinfos[0]
|
|
|
|
|
|
key = None
|
|
|
|
|
|
value = None
|
2017-12-19 09:42:04 -05:00
|
|
|
|
children = auxinfo.childNodes
|
|
|
|
|
|
# handle the possibility that keys from auxinfo may be nested
|
|
|
|
|
|
# within a 'dict' element
|
2020-06-23 14:09:31 +02:00
|
|
|
|
dict_nodes = [
|
|
|
|
|
|
n
|
|
|
|
|
|
for n in auxinfo.childNodes
|
|
|
|
|
|
if n.nodeType == n.ELEMENT_NODE and n.tagName == "dict"
|
|
|
|
|
|
]
|
2017-12-19 09:42:04 -05:00
|
|
|
|
if dict_nodes:
|
|
|
|
|
|
children = dict_nodes[0].childNodes
|
|
|
|
|
|
for node in children:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if node.nodeType == node.ELEMENT_NODE and node.tagName == "key":
|
2017-11-09 14:12:03 -08:00
|
|
|
|
key = node.firstChild.wholeText
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if node.nodeType == node.ELEMENT_NODE and node.tagName == "string":
|
2017-11-09 14:12:03 -08:00
|
|
|
|
value = node.firstChild.wholeText
|
|
|
|
|
|
if key and value:
|
|
|
|
|
|
dist_info[key] = value
|
|
|
|
|
|
key = None
|
|
|
|
|
|
value = None
|
|
|
|
|
|
return dist_info
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 00:45:54 +02:00
|
|
|
|
def get_board_ids(filename):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Parses a softwareupdate dist file, returning a list of supported
|
|
|
|
|
|
Board IDs"""
|
2018-09-23 00:45:54 +02:00
|
|
|
|
supported_board_ids = ""
|
|
|
|
|
|
with open(filename) as search:
|
|
|
|
|
|
for line in search:
|
2020-08-18 15:13:59 +02:00
|
|
|
|
line = line.decode("utf8").rstrip() # remove '\n' at end of line
|
2020-08-06 14:04:45 +02:00
|
|
|
|
# dist files for macOS 10.* list boardIDs whereas dist files for
|
|
|
|
|
|
# macOS 11.* list supportedBoardIDs
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if "boardIds" in line:
|
2020-08-06 14:04:45 +02:00
|
|
|
|
supported_board_ids = line.lstrip("var boardIDs = ")
|
|
|
|
|
|
elif "supportedBoardIDs" in line:
|
|
|
|
|
|
supported_board_ids = line.lstrip("var supportedBoardIDs = ")
|
2018-09-23 00:45:54 +02:00
|
|
|
|
return supported_board_ids
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-06 14:04:45 +02:00
|
|
|
|
def get_device_ids(filename):
|
|
|
|
|
|
"""Parses a softwareupdate dist file, returning a list of supported
|
|
|
|
|
|
Device IDs. These are used for identifying T2 chips in the dist files of macOS 11.* - not checked in older builds"""
|
|
|
|
|
|
supported_device_ids = ""
|
|
|
|
|
|
with open(filename) as search:
|
|
|
|
|
|
for line in search:
|
2020-08-18 15:15:08 +02:00
|
|
|
|
line = line.decode("utf8").rstrip() # remove '\n' at end of line
|
2020-08-06 14:04:45 +02:00
|
|
|
|
if "supportedDeviceIDs" in line:
|
|
|
|
|
|
supported_device_ids = line.lstrip("var supportedDeviceIDs = ")
|
|
|
|
|
|
return supported_device_ids
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-09-23 00:45:54 +02:00
|
|
|
|
def get_unsupported_models(filename):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Parses a softwareupdate dist file, returning a list of non-supported
|
2020-08-06 14:04:45 +02:00
|
|
|
|
ModelIdentifiers. This is not used in macOS 11.*"""
|
2018-09-23 00:45:54 +02:00
|
|
|
|
unsupported_models = ""
|
|
|
|
|
|
with open(filename) as search:
|
|
|
|
|
|
for line in search:
|
2020-08-18 15:15:08 +02:00
|
|
|
|
line = line.decode("utf8").rstrip() # remove '\n' at end of line
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if "nonSupportedModels" in line:
|
2020-08-06 14:04:45 +02:00
|
|
|
|
unsupported_models = line.lstrip("var nonSupportedModels = ")
|
2018-09-23 00:45:54 +02:00
|
|
|
|
return unsupported_models
|
2019-04-16 22:26:33 +02:00
|
|
|
|
|
2018-09-23 00:45:54 +02:00
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
def download_and_parse_sucatalog(sucatalog, workdir, ignore_cache=False):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Downloads and returns a parsed softwareupdate catalog"""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
localcatalogpath = replicate_url(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
sucatalog, root_dir=workdir, ignore_cache=ignore_cache
|
|
|
|
|
|
)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ReplicationError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Could not replicate %s: %s" % (sucatalog, err), file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if os.path.splitext(localcatalogpath)[1] == ".gz":
|
2018-10-02 18:58:38 +02:00
|
|
|
|
with gzip.open(localcatalogpath) as the_file:
|
|
|
|
|
|
content = the_file.read()
|
2018-08-07 13:50:38 -07:00
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
catalog = read_plist_from_string(content)
|
2018-08-07 13:50:38 -07:00
|
|
|
|
return catalog
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ExpatError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Error reading %s: %s" % (localcatalogpath, err), file=sys.stderr)
|
2018-08-07 13:50:38 -07:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
else:
|
|
|
|
|
|
try:
|
2019-07-03 10:02:31 -07:00
|
|
|
|
catalog = read_plist(localcatalogpath)
|
2018-08-07 13:50:38 -07:00
|
|
|
|
return catalog
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except (OSError, IOError, ExpatError) as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Error reading %s: %s" % (localcatalogpath, err), file=sys.stderr)
|
2018-08-07 13:50:38 -07:00
|
|
|
|
exit(-1)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_mac_os_installers(catalog):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Return a list of product identifiers for what appear to be macOS
|
|
|
|
|
|
installers"""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
mac_os_installer_products = []
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if "Products" in catalog:
|
|
|
|
|
|
for product_key in catalog["Products"].keys():
|
|
|
|
|
|
product = catalog["Products"][product_key]
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if product["ExtendedMetaInfo"]["InstallAssistantPackageIdentifiers"]:
|
2017-11-09 14:12:03 -08:00
|
|
|
|
mac_os_installer_products.append(product_key)
|
|
|
|
|
|
except KeyError:
|
|
|
|
|
|
continue
|
|
|
|
|
|
return mac_os_installer_products
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-08-06 14:04:45 +02:00
|
|
|
|
def os_installer_product_info(catalog, workdir, ignore_cache=False):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Returns a dict of info about products that look like macOS installers"""
|
2017-11-09 14:12:03 -08:00
|
|
|
|
product_info = {}
|
|
|
|
|
|
installer_products = find_mac_os_installers(catalog)
|
|
|
|
|
|
for product_key in installer_products:
|
|
|
|
|
|
product_info[product_key] = {}
|
2020-08-06 14:04:45 +02:00
|
|
|
|
# get the localized title (e.g. "macOS Catalina Beta") and version
|
|
|
|
|
|
# from ServerMetadataURL for macOS 10.*
|
|
|
|
|
|
# For macOS 11.* we get these directly from the dist file
|
2017-11-09 14:12:03 -08:00
|
|
|
|
filename = get_server_metadata(catalog, product_key, workdir)
|
2019-08-18 09:00:18 -07:00
|
|
|
|
if filename:
|
|
|
|
|
|
product_info[product_key] = parse_server_metadata(filename)
|
2020-06-22 16:25:01 -07:00
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_key]["title"] = None
|
|
|
|
|
|
product_info[product_key]["version"] = None
|
|
|
|
|
|
|
|
|
|
|
|
product = catalog["Products"][product_key]
|
|
|
|
|
|
product_info[product_key]["PostDate"] = product["PostDate"]
|
|
|
|
|
|
distributions = product["Distributions"]
|
|
|
|
|
|
dist_url = distributions.get("English") or distributions.get("en")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
dist_path = replicate_url(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
dist_url, root_dir=workdir, ignore_cache=ignore_cache
|
|
|
|
|
|
)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ReplicationError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Could not replicate %s: %s" % (dist_url, err), file=sys.stderr)
|
2019-05-28 14:53:27 -07:00
|
|
|
|
else:
|
|
|
|
|
|
dist_info = parse_dist(dist_path)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_key]["DistributionPath"] = dist_path
|
2019-07-04 10:49:28 +02:00
|
|
|
|
unsupported_models = get_unsupported_models(dist_path)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_key]["UnsupportedModels"] = unsupported_models
|
2019-07-04 10:49:28 +02:00
|
|
|
|
board_ids = get_board_ids(dist_path)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_key]["BoardIDs"] = board_ids
|
2020-08-06 14:04:45 +02:00
|
|
|
|
device_ids = get_device_ids(dist_path)
|
|
|
|
|
|
product_info[product_key]["DeviceIDs"] = device_ids
|
2019-05-28 14:53:27 -07:00
|
|
|
|
product_info[product_key].update(dist_info)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if not product_info[product_key]["title"]:
|
|
|
|
|
|
product_info[product_key]["title"] = dist_info.get("title_from_dist")
|
|
|
|
|
|
if not product_info[product_key]["version"]:
|
|
|
|
|
|
product_info[product_key]["version"] = dist_info.get("VERSION")
|
|
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
return product_info
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-07-12 18:42:44 +02:00
|
|
|
|
def get_latest_version(current_item, latest_item):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Compares versions between two values and returns the latest (highest) value"""
|
2019-07-12 18:42:44 +02:00
|
|
|
|
if LooseVersion(current_item) > LooseVersion(latest_item):
|
2018-09-23 15:38:48 +02:00
|
|
|
|
return current_item
|
|
|
|
|
|
else:
|
2019-07-12 18:42:44 +02:00
|
|
|
|
return latest_item
|
2018-09-23 15:38:48 +02:00
|
|
|
|
|
|
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
def replicate_product(catalog, product_id, workdir, ignore_cache=False):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Downloads all the packages for a product"""
|
|
|
|
|
|
product = catalog["Products"][product_id]
|
|
|
|
|
|
for package in product.get("Packages", []):
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# TO-DO: Check 'Size' attribute and make sure
|
|
|
|
|
|
# we have enough space on the target
|
|
|
|
|
|
# filesystem before attempting to download
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if "URL" in package:
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
|
|
|
|
|
replicate_url(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
package["URL"],
|
|
|
|
|
|
root_dir=workdir,
|
|
|
|
|
|
show_progress=True,
|
|
|
|
|
|
ignore_cache=ignore_cache,
|
|
|
|
|
|
attempt_resume=(not ignore_cache),
|
|
|
|
|
|
)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ReplicationError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"Could not replicate %s: %s" % (package["URL"], err),
|
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if "MetadataURL" in package:
|
2017-11-09 14:12:03 -08:00
|
|
|
|
try:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
replicate_url(
|
|
|
|
|
|
package["MetadataURL"], root_dir=workdir, ignore_cache=ignore_cache
|
|
|
|
|
|
)
|
2019-05-02 20:26:00 -07:00
|
|
|
|
except ReplicationError as err:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"Could not replicate %s: %s" % (package["MetadataURL"], err),
|
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
|
|
|
|
|
|
|
2018-08-14 15:26:17 -07:00
|
|
|
|
def find_installer_app(mountpoint):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Returns the path to the Install macOS app on the mountpoint"""
|
|
|
|
|
|
applications_dir = os.path.join(mountpoint, "Applications")
|
2018-08-14 15:26:17 -07:00
|
|
|
|
for item in os.listdir(applications_dir):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if item.endswith(".app"):
|
2018-08-14 15:26:17 -07:00
|
|
|
|
return os.path.join(applications_dir, item)
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
def main():
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"""Do the main thing here"""
|
2018-07-04 17:33:11 +02:00
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"installinstallmacos.py - get macOS installers "
|
|
|
|
|
|
"from the Apple software catalog"
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
)
|
2018-09-23 00:45:54 +02:00
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
parser = argparse.ArgumentParser()
|
2020-06-23 14:09:31 +02:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--seedprogram",
|
|
|
|
|
|
default="",
|
|
|
|
|
|
help="Which Seed Program catalog to use. Valid values "
|
|
|
|
|
|
"are %s." % ", ".join(get_seeding_programs()),
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--catalogurl",
|
|
|
|
|
|
default="",
|
|
|
|
|
|
help="Software Update catalog URL. This option "
|
|
|
|
|
|
"overrides any seedprogram option.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--workdir",
|
|
|
|
|
|
metavar="path_to_working_dir",
|
|
|
|
|
|
default=".",
|
|
|
|
|
|
help="Path to working directory on a volume with over "
|
|
|
|
|
|
"10G of available space. Defaults to current working "
|
|
|
|
|
|
"directory.",
|
|
|
|
|
|
)
|
2020-06-23 14:58:45 +02:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--clear",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Clear the working directory to ensure a new download.",
|
|
|
|
|
|
)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--compress",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Output a read-only compressed disk image with "
|
|
|
|
|
|
"the Install macOS app at the root. This is now the "
|
|
|
|
|
|
"default. Use --raw to get a read-write sparse image "
|
|
|
|
|
|
"with the app in the Applications directory.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--raw",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Output a read-write sparse image "
|
|
|
|
|
|
"with the app in the Applications directory. Requires "
|
|
|
|
|
|
"less available disk space and is faster.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--ignore-cache",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Ignore any previously cached files.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--build",
|
|
|
|
|
|
metavar="build_version",
|
|
|
|
|
|
default="",
|
|
|
|
|
|
help="Specify a specific build to search for and " "download.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--list",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Output the available updates to a plist " "and quit.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--current",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Automatically select the current installed " "build.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--renew",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Automatically select the appropriate valid build "
|
|
|
|
|
|
"for the current device, limited to versions newer "
|
|
|
|
|
|
"than the current installed build.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--newer_than_version",
|
|
|
|
|
|
metavar="newer_than_version",
|
|
|
|
|
|
default="",
|
|
|
|
|
|
help="Specify a minimum version to check for newer " "versions to download.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--validate",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Validate builds for board ID and hardware model "
|
|
|
|
|
|
"and only show appropriate builds.",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--auto",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Automatically select the appropriate valid build "
|
|
|
|
|
|
"for the current device.",
|
|
|
|
|
|
)
|
2020-07-31 14:09:04 +02:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--warnings", action="store_true", help="Show warnings in the listed output",
|
|
|
|
|
|
)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--beta",
|
|
|
|
|
|
action="store_true",
|
|
|
|
|
|
help="Include beta versions in the selection "
|
|
|
|
|
|
"(in conjunction with --auto, --os or --version)",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--version",
|
|
|
|
|
|
metavar="match_version",
|
|
|
|
|
|
default="",
|
|
|
|
|
|
help="Selects the latest valid build ID matching "
|
|
|
|
|
|
"the selected version (e.g. 10.14.3).",
|
|
|
|
|
|
)
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
|
"--os",
|
|
|
|
|
|
metavar="match_os",
|
|
|
|
|
|
default="",
|
|
|
|
|
|
help="Selects the latest valid build ID matching "
|
|
|
|
|
|
"the selected OS version (e.g. 10.14).",
|
|
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
2018-09-23 17:22:01 +02:00
|
|
|
|
# show this Mac's info
|
2018-09-23 00:45:54 +02:00
|
|
|
|
hw_model = get_hw_model()
|
|
|
|
|
|
board_id = get_board_id()
|
2020-08-06 14:04:45 +02:00
|
|
|
|
bridge_id = get_bridge_id()
|
2018-09-23 17:22:01 +02:00
|
|
|
|
build_info = get_current_build_info()
|
2018-10-30 19:07:57 +01:00
|
|
|
|
is_vm = is_a_vm()
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("This Mac:")
|
2018-10-30 19:07:57 +01:00
|
|
|
|
if is_vm == True:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Identified as a Virtual Machine")
|
|
|
|
|
|
print("%-17s: %s" % ("Model Identifier", hw_model))
|
2020-08-06 14:04:45 +02:00
|
|
|
|
print("%-17s: %s" % ("Bridge ID", bridge_id))
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("%-17s: %s" % ("Board ID", board_id))
|
|
|
|
|
|
print("%-17s: %s" % ("OS Version", build_info[0]))
|
|
|
|
|
|
print("%-17s: %s\n" % ("Build ID", build_info[1]))
|
2018-09-23 00:45:54 +02:00
|
|
|
|
|
2019-10-16 18:56:24 +10:00
|
|
|
|
if os.getuid() != 0:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
sys.exit(
|
|
|
|
|
|
"This command requires root (to install packages), so please "
|
|
|
|
|
|
"run again with sudo or as root."
|
|
|
|
|
|
)
|
2019-11-07 14:12:53 -06:00
|
|
|
|
|
2020-09-18 16:41:31 -04:00
|
|
|
|
current_dir = os.getcwd()
|
|
|
|
|
|
if os.path.expanduser("~") in current_dir:
|
2020-09-18 21:51:29 +01:00
|
|
|
|
bad_dirs = ["Documents", "Desktop", "Downloads", "Library"]
|
2020-09-18 16:41:31 -04:00
|
|
|
|
for bad_dir in bad_dirs:
|
2020-09-18 21:51:29 +01:00
|
|
|
|
if bad_dir in os.path.split(current_dir):
|
2020-09-18 16:41:31 -04:00
|
|
|
|
print('Running this script from %s may not work as expected. '
|
|
|
|
|
|
'If this does not run as expected, please run again from '
|
|
|
|
|
|
'somewhere else, such as /Users/Shared.'
|
|
|
|
|
|
% current_dir, file=sys.stderr)
|
|
|
|
|
|
|
2018-10-02 18:58:38 +02:00
|
|
|
|
if args.catalogurl:
|
2018-10-04 00:56:38 +02:00
|
|
|
|
su_catalog_url = args.catalogurl
|
2018-10-02 18:58:38 +02:00
|
|
|
|
elif args.seedprogram:
|
|
|
|
|
|
su_catalog_url = get_seed_catalog(args.seedprogram)
|
|
|
|
|
|
if not su_catalog_url:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"Could not find a catalog url for seed program %s" % args.seedprogram,
|
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
|
)
|
|
|
|
|
|
print(
|
|
|
|
|
|
"Valid seeding programs are: %s" % ", ".join(get_seeding_programs()),
|
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
|
)
|
2018-10-02 18:58:38 +02:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
else:
|
|
|
|
|
|
su_catalog_url = get_default_catalog()
|
|
|
|
|
|
if not su_catalog_url:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"Could not find a default catalog url for this OS version.",
|
|
|
|
|
|
file=sys.stderr,
|
|
|
|
|
|
)
|
2018-10-02 18:58:38 +02:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# download sucatalog and look for products that are for macOS installers
|
|
|
|
|
|
catalog = download_and_parse_sucatalog(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
su_catalog_url, args.workdir, ignore_cache=args.ignore_cache
|
|
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
product_info = os_installer_product_info(
|
2020-08-06 14:04:45 +02:00
|
|
|
|
catalog, args.workdir, ignore_cache=args.ignore_cache
|
2020-06-23 14:09:31 +02:00
|
|
|
|
)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if not product_info:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("No macOS installer products found in the sucatalog.", file=sys.stderr)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
exit(-1)
|
|
|
|
|
|
|
2018-07-09 15:01:37 +02:00
|
|
|
|
output_plist = "%s/softwareupdate.plist" % args.workdir
|
|
|
|
|
|
pl = {}
|
2020-06-23 14:09:31 +02:00
|
|
|
|
pl["result"] = []
|
2018-07-04 17:33:11 +02:00
|
|
|
|
|
2018-10-30 19:07:57 +01:00
|
|
|
|
valid_build_found = False
|
|
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# display a menu of choices (some seed catalogs have multiple installers)
|
2020-07-31 14:09:04 +02:00
|
|
|
|
validity_header = ""
|
|
|
|
|
|
if args.warnings:
|
|
|
|
|
|
validity_header = "Notes/Warnings"
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"%2s %-15s %-10s %-8s %-11s %-30s %s"
|
2020-07-31 14:09:04 +02:00
|
|
|
|
% ("#", "ProductID", "Version", "Build", "Post Date", "Title", validity_header)
|
2020-06-23 14:09:31 +02:00
|
|
|
|
)
|
2020-08-06 14:04:45 +02:00
|
|
|
|
# this is where we do checks for validity based on model type and version
|
2017-11-09 14:12:03 -08:00
|
|
|
|
for index, product_id in enumerate(product_info):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
not_valid = ""
|
2020-08-06 14:04:45 +02:00
|
|
|
|
if is_vm == False:
|
|
|
|
|
|
# first look for a BoardID (not present in modern hardware)
|
|
|
|
|
|
if board_id and product_info[product_id]["BoardIDs"]:
|
|
|
|
|
|
if board_id not in product_info[product_id]["BoardIDs"]:
|
|
|
|
|
|
not_valid = "Unsupported Board ID"
|
|
|
|
|
|
# if there's no Board ID there has to be a BridgeID:
|
|
|
|
|
|
elif bridge_id and product_info[product_id]["DeviceIDs"]:
|
|
|
|
|
|
if bridge_id not in product_info[product_id]["DeviceIDs"]:
|
|
|
|
|
|
not_valid = "Unsupported Bridge ID"
|
|
|
|
|
|
# finally we fall back on ModelIdentifiers for T1 and older
|
|
|
|
|
|
elif hw_model and product_info[product_id]["UnsupportedModels"]:
|
|
|
|
|
|
if hw_model in product_info[product_id]["UnsupportedModels"]:
|
2020-07-31 14:09:04 +02:00
|
|
|
|
not_valid = "Unsupported Model Identifier"
|
2020-08-06 14:04:45 +02:00
|
|
|
|
# if we don't have any of those matches, then we can't do a comparison
|
2020-07-31 14:09:04 +02:00
|
|
|
|
else:
|
2020-08-06 14:04:45 +02:00
|
|
|
|
not_valid = "No supported model data"
|
2020-07-31 13:32:36 +02:00
|
|
|
|
if (
|
2020-06-23 14:09:31 +02:00
|
|
|
|
get_latest_version(build_info[0], product_info[product_id]["version"])
|
|
|
|
|
|
!= product_info[product_id]["version"]
|
|
|
|
|
|
):
|
|
|
|
|
|
not_valid = "Unsupported macOS version"
|
2018-10-30 19:07:57 +01:00
|
|
|
|
else:
|
|
|
|
|
|
valid_build_found = True
|
2020-08-06 14:04:45 +02:00
|
|
|
|
|
2020-07-31 14:09:04 +02:00
|
|
|
|
validity_info = ""
|
|
|
|
|
|
if args.warnings:
|
|
|
|
|
|
validity_info = not_valid
|
2018-09-23 17:22:01 +02:00
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"%2s %-15s %-10s %-8s %-11s %-30s %s"
|
|
|
|
|
|
% (
|
|
|
|
|
|
index + 1,
|
|
|
|
|
|
product_id,
|
|
|
|
|
|
product_info[product_id].get("version", "UNKNOWN"),
|
|
|
|
|
|
product_info[product_id].get("BUILD", "UNKNOWN"),
|
|
|
|
|
|
product_info[product_id]["PostDate"].strftime("%Y-%m-%d"),
|
|
|
|
|
|
product_info[product_id]["title"],
|
2020-07-31 14:09:04 +02:00
|
|
|
|
validity_info,
|
2020-06-23 14:09:31 +02:00
|
|
|
|
)
|
|
|
|
|
|
)
|
2018-09-23 14:40:42 +02:00
|
|
|
|
|
2018-09-23 15:38:48 +02:00
|
|
|
|
# go through various options for automatically determining the answer:
|
2018-09-23 14:40:42 +02:00
|
|
|
|
|
|
|
|
|
|
# skip if build is not suitable for current device
|
|
|
|
|
|
# and a validation parameter was chosen
|
2020-08-05 11:22:18 +02:00
|
|
|
|
if not_valid and (
|
2020-08-06 14:04:45 +02:00
|
|
|
|
args.validate
|
|
|
|
|
|
or (args.auto or args.version or args.os)
|
|
|
|
|
|
# and not args.beta # not needed now we have DeviceID check
|
2020-08-05 11:22:18 +02:00
|
|
|
|
):
|
2018-09-23 14:40:42 +02:00
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
# skip if a version is selected and it does not match
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if args.version and args.version != product_info[product_id]["version"]:
|
2018-09-23 14:17:45 +02:00
|
|
|
|
continue
|
|
|
|
|
|
|
2020-05-06 10:21:53 +02:00
|
|
|
|
# skip if an OS is selected and it does not match
|
2019-03-28 21:20:12 +01:00
|
|
|
|
if args.os:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
major = product_info[product_id]["version"].split(".", 2)[:2]
|
|
|
|
|
|
os_version = ".".join(major)
|
2019-03-28 21:20:12 +01:00
|
|
|
|
if args.os != os_version:
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
2019-07-12 18:42:44 +02:00
|
|
|
|
# determine the latest valid build ID and select this
|
2020-05-06 10:21:53 +02:00
|
|
|
|
# when using auto, os and version options
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if args.auto or args.version or args.os:
|
|
|
|
|
|
if args.beta or "Beta" not in product_info[product_id]["title"]:
|
2019-07-12 18:42:44 +02:00
|
|
|
|
try:
|
|
|
|
|
|
latest_valid_build
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
latest_valid_build = product_info[product_id]["BUILD"]
|
2020-05-06 10:21:53 +02:00
|
|
|
|
# if using newer-than option, skip if not newer than the version
|
2020-06-23 14:09:31 +02:00
|
|
|
|
# we are checking against
|
2020-05-06 10:21:53 +02:00
|
|
|
|
if args.newer_than_version:
|
|
|
|
|
|
latest_valid_build = get_latest_version(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_id]["version"], args.newer_than_version
|
|
|
|
|
|
)
|
2020-05-06 10:21:53 +02:00
|
|
|
|
if latest_valid_build == args.newer_than_version:
|
|
|
|
|
|
continue
|
|
|
|
|
|
# if using renew option, skip if the same as the current version
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if (
|
|
|
|
|
|
args.renew
|
|
|
|
|
|
and build_info[0] == product_info[product_id]["version"]
|
|
|
|
|
|
):
|
2020-05-06 10:21:53 +02:00
|
|
|
|
continue
|
2020-06-23 14:09:31 +02:00
|
|
|
|
answer = index + 1
|
2019-07-12 18:42:44 +02:00
|
|
|
|
else:
|
|
|
|
|
|
latest_valid_build = get_latest_version(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_id]["BUILD"], latest_valid_build
|
|
|
|
|
|
)
|
|
|
|
|
|
if latest_valid_build == product_info[product_id]["BUILD"]:
|
2020-05-06 10:21:53 +02:00
|
|
|
|
# if using newer-than option, skip if not newer than the version
|
2020-06-23 14:09:31 +02:00
|
|
|
|
# we are checking against
|
2020-05-06 10:21:53 +02:00
|
|
|
|
if args.newer_than_version:
|
|
|
|
|
|
latest_valid_build = get_latest_version(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_id]["version"],
|
|
|
|
|
|
args.newer_than_version,
|
|
|
|
|
|
)
|
2020-05-06 10:21:53 +02:00
|
|
|
|
if latest_valid_build == args.newer_than_version:
|
|
|
|
|
|
continue
|
|
|
|
|
|
# if using renew option, skip if the same as the current version
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if (
|
|
|
|
|
|
args.renew
|
|
|
|
|
|
and build_info[1] == product_info[product_id]["BUILD"]
|
|
|
|
|
|
):
|
2020-05-06 10:21:53 +02:00
|
|
|
|
continue
|
2020-06-23 14:09:31 +02:00
|
|
|
|
answer = index + 1
|
2018-09-23 14:17:45 +02:00
|
|
|
|
|
2018-09-23 14:40:42 +02:00
|
|
|
|
# Write this build info to plist
|
2020-06-23 14:09:31 +02:00
|
|
|
|
pl_index = {
|
|
|
|
|
|
"index": index + 1,
|
|
|
|
|
|
"product_id": product_id,
|
|
|
|
|
|
"version": product_info[product_id]["version"],
|
|
|
|
|
|
"build": product_info[product_id]["BUILD"],
|
|
|
|
|
|
"title": product_info[product_id]["title"],
|
|
|
|
|
|
}
|
|
|
|
|
|
pl["result"].append(pl_index)
|
2018-07-09 15:01:37 +02:00
|
|
|
|
|
|
|
|
|
|
if args.build:
|
2018-09-23 14:40:42 +02:00
|
|
|
|
# automatically select matching build ID if build option used
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if args.build == product_info[product_id]["BUILD"]:
|
|
|
|
|
|
answer = index + 1
|
2018-09-23 14:17:45 +02:00
|
|
|
|
break
|
2018-09-23 14:40:42 +02:00
|
|
|
|
|
2018-09-23 14:17:45 +02:00
|
|
|
|
elif args.current:
|
2018-09-23 14:40:42 +02:00
|
|
|
|
# automatically select matching build ID if current option used
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if build_info[0] == product_info[product_id]["version"]:
|
|
|
|
|
|
answer = index + 1
|
2018-09-23 14:17:45 +02:00
|
|
|
|
break
|
2018-10-30 19:07:57 +01:00
|
|
|
|
|
|
|
|
|
|
# Stop here if no valid builds found
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if (
|
|
|
|
|
|
valid_build_found == False
|
|
|
|
|
|
and not args.build
|
|
|
|
|
|
and not args.current
|
|
|
|
|
|
and not args.validate
|
2020-06-23 14:58:45 +02:00
|
|
|
|
and not args.list
|
2020-06-23 14:09:31 +02:00
|
|
|
|
):
|
|
|
|
|
|
print("No valid build found for this hardware")
|
2018-10-30 19:07:57 +01:00
|
|
|
|
exit(0)
|
2018-07-04 17:33:11 +02:00
|
|
|
|
|
2020-06-23 14:58:45 +02:00
|
|
|
|
# clear content directory in workdir if requested
|
|
|
|
|
|
if args.clear:
|
|
|
|
|
|
print(
|
|
|
|
|
|
"Removing existing content from working directory '%s'...\n" % args.workdir
|
|
|
|
|
|
)
|
|
|
|
|
|
shutil.rmtree("%s/content" % args.workdir)
|
|
|
|
|
|
|
2018-09-23 14:40:42 +02:00
|
|
|
|
# Output a plist of available updates and quit if list option chosen
|
2018-07-09 15:01:37 +02:00
|
|
|
|
if args.list:
|
2020-05-06 17:15:41 +02:00
|
|
|
|
write_plist(pl, output_plist)
|
2020-07-31 14:09:04 +02:00
|
|
|
|
print("\nValid seeding programs are: %s\n" % ", ".join(get_seeding_programs()))
|
2018-07-09 15:01:37 +02:00
|
|
|
|
exit(0)
|
|
|
|
|
|
|
2018-09-23 14:40:42 +02:00
|
|
|
|
# check for validity of specified build if argument supplied
|
2018-07-09 15:01:37 +02:00
|
|
|
|
if args.build:
|
2018-07-04 17:33:11 +02:00
|
|
|
|
try:
|
2018-07-09 15:01:37 +02:00
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s is not available. "
|
|
|
|
|
|
"Run again without --build argument "
|
|
|
|
|
|
"to select a valid build to download "
|
|
|
|
|
|
"or run without --validate option to download anyway.\n" % args.build
|
|
|
|
|
|
)
|
2018-09-23 14:17:45 +02:00
|
|
|
|
exit(0)
|
|
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n" "Build %s available. Downloading #%s...\n" % (args.build, answer)
|
|
|
|
|
|
)
|
2018-09-23 14:17:45 +02:00
|
|
|
|
elif args.current:
|
|
|
|
|
|
try:
|
|
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s is not available. "
|
|
|
|
|
|
"Run again without --current argument "
|
|
|
|
|
|
"to select a valid build to download.\n" % build_info[0]
|
|
|
|
|
|
)
|
2018-09-23 14:17:45 +02:00
|
|
|
|
exit(0)
|
|
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s available. Downloading #%s...\n" % (build_info[0], answer)
|
|
|
|
|
|
)
|
2020-05-06 10:21:53 +02:00
|
|
|
|
elif args.newer_than_version:
|
|
|
|
|
|
try:
|
|
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"No newer valid version than the specified version available. "
|
|
|
|
|
|
"Run again without --newer_than_version argument "
|
|
|
|
|
|
"to select a valid build to download.\n"
|
|
|
|
|
|
)
|
2020-05-06 10:21:53 +02:00
|
|
|
|
exit(0)
|
|
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s selected. Downloading #%s...\n" % (latest_valid_build, answer)
|
|
|
|
|
|
)
|
2020-05-06 10:21:53 +02:00
|
|
|
|
elif args.renew:
|
|
|
|
|
|
try:
|
|
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"No newer valid version than the current system version available. "
|
|
|
|
|
|
"Run again without --renew argument "
|
|
|
|
|
|
"to select a valid build to download.\n"
|
|
|
|
|
|
)
|
2020-05-06 10:21:53 +02:00
|
|
|
|
exit(0)
|
|
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s selected. Downloading #%s...\n" % (latest_valid_build, answer)
|
|
|
|
|
|
)
|
2018-12-13 00:49:32 +01:00
|
|
|
|
elif args.version:
|
2018-09-23 14:17:45 +02:00
|
|
|
|
try:
|
|
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
2020-08-05 11:25:48 +02:00
|
|
|
|
"Version %s is not available. "
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"Run again without --version argument "
|
|
|
|
|
|
"to select a valid build to download.\n" % args.version
|
|
|
|
|
|
)
|
2018-12-13 00:49:32 +01:00
|
|
|
|
exit(0)
|
|
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s selected. Downloading #%s...\n" % (latest_valid_build, answer)
|
|
|
|
|
|
)
|
2019-03-28 21:20:12 +01:00
|
|
|
|
elif args.os:
|
|
|
|
|
|
try:
|
|
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
2020-08-05 11:25:48 +02:00
|
|
|
|
"OS %s is not available. "
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"Run again without --os argument "
|
|
|
|
|
|
"to select a valid build to download.\n" % args.os
|
|
|
|
|
|
)
|
2019-03-28 21:20:12 +01:00
|
|
|
|
exit(0)
|
|
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s selected. Downloading #%s...\n" % (latest_valid_build, answer)
|
|
|
|
|
|
)
|
2018-12-13 00:49:32 +01:00
|
|
|
|
elif args.auto:
|
|
|
|
|
|
try:
|
|
|
|
|
|
answer
|
|
|
|
|
|
except NameError:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"No valid version available. "
|
|
|
|
|
|
"Run again without --auto argument "
|
|
|
|
|
|
"to select a valid build to download.\n"
|
|
|
|
|
|
)
|
2018-07-04 17:33:11 +02:00
|
|
|
|
exit(0)
|
2018-07-09 15:01:37 +02:00
|
|
|
|
else:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"\n"
|
|
|
|
|
|
"Build %s selected. Downloading #%s...\n" % (latest_valid_build, answer)
|
|
|
|
|
|
)
|
2018-07-09 15:01:37 +02:00
|
|
|
|
else:
|
2019-07-04 10:49:28 +02:00
|
|
|
|
# default option to interactively offer selection
|
|
|
|
|
|
answer = get_input(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
"\nChoose a product to download (1-%s): " % len(product_info)
|
|
|
|
|
|
)
|
2018-07-09 15:01:37 +02:00
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
index = int(answer) - 1
|
|
|
|
|
|
if index < 0:
|
|
|
|
|
|
raise ValueError
|
2019-05-02 20:26:00 -07:00
|
|
|
|
product_id = list(product_info.keys())[index]
|
2018-07-09 15:01:37 +02:00
|
|
|
|
except (ValueError, IndexError):
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Exiting.")
|
2018-07-09 15:01:37 +02:00
|
|
|
|
exit(0)
|
2018-07-04 17:33:11 +02:00
|
|
|
|
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# download all the packages for the selected product
|
2020-06-23 14:09:31 +02:00
|
|
|
|
replicate_product(catalog, product_id, args.workdir, ignore_cache=args.ignore_cache)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
|
|
|
|
|
|
# generate a name for the sparseimage
|
2020-06-23 14:09:31 +02:00
|
|
|
|
volname = "Install_macOS_%s-%s" % (
|
|
|
|
|
|
product_info[product_id]["version"],
|
|
|
|
|
|
product_info[product_id]["BUILD"],
|
|
|
|
|
|
)
|
|
|
|
|
|
sparse_diskimage_path = os.path.join(args.workdir, volname + ".sparseimage")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if os.path.exists(sparse_diskimage_path):
|
|
|
|
|
|
os.unlink(sparse_diskimage_path)
|
|
|
|
|
|
|
|
|
|
|
|
# make an empty sparseimage and mount it
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Making empty sparseimage...")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
sparse_diskimage_path = make_sparse_image(volname, sparse_diskimage_path)
|
|
|
|
|
|
mountpoint = mountdmg(sparse_diskimage_path)
|
|
|
|
|
|
if mountpoint:
|
|
|
|
|
|
# install the product to the mounted sparseimage volume
|
2018-04-02 15:10:52 -07:00
|
|
|
|
success = install_product(
|
2020-06-23 14:09:31 +02:00
|
|
|
|
product_info[product_id]["DistributionPath"], mountpoint
|
|
|
|
|
|
)
|
2018-04-02 15:10:52 -07:00
|
|
|
|
if not success:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Product installation failed.", file=sys.stderr)
|
2018-04-02 15:10:52 -07:00
|
|
|
|
unmountdmg(mountpoint)
|
|
|
|
|
|
exit(-1)
|
2018-08-14 15:26:17 -07:00
|
|
|
|
# add the seeding program xattr to the app if applicable
|
2019-07-03 10:02:31 -07:00
|
|
|
|
seeding_program = get_seeding_program(su_catalog_url)
|
2018-08-14 15:26:17 -07:00
|
|
|
|
if seeding_program:
|
|
|
|
|
|
installer_app = find_installer_app(mountpoint)
|
|
|
|
|
|
if installer_app:
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print(
|
|
|
|
|
|
"Adding seeding program %s extended attribute to app"
|
|
|
|
|
|
% seeding_program
|
|
|
|
|
|
)
|
2020-08-06 14:04:45 +02:00
|
|
|
|
xattr.setxattr(installer_app, "SeedProgram", seeding_program.encode())
|
2020-06-23 14:09:31 +02:00
|
|
|
|
print("Product downloaded and installed to %s" % sparse_diskimage_path)
|
2018-08-14 15:26:17 -07:00
|
|
|
|
if args.raw:
|
2017-11-09 14:12:03 -08:00
|
|
|
|
unmountdmg(mountpoint)
|
|
|
|
|
|
else:
|
2018-08-14 15:26:17 -07:00
|
|
|
|
# if --raw option not given, create a r/o compressed diskimage
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# containing the Install macOS app
|
2020-06-23 14:09:31 +02:00
|
|
|
|
compressed_diskimagepath = os.path.join(args.workdir, volname + ".dmg")
|
2017-11-09 14:12:03 -08:00
|
|
|
|
if os.path.exists(compressed_diskimagepath):
|
|
|
|
|
|
os.unlink(compressed_diskimagepath)
|
2018-08-14 15:26:17 -07:00
|
|
|
|
app_path = find_installer_app(mountpoint)
|
|
|
|
|
|
if app_path:
|
2018-09-23 15:38:48 +02:00
|
|
|
|
make_compressed_dmg(app_path, compressed_diskimagepath, volname)
|
2017-11-09 14:12:03 -08:00
|
|
|
|
# unmount sparseimage
|
|
|
|
|
|
unmountdmg(mountpoint)
|
|
|
|
|
|
# delete sparseimage since we don't need it any longer
|
|
|
|
|
|
os.unlink(sparse_diskimage_path)
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-06-23 14:09:31 +02:00
|
|
|
|
if __name__ == "__main__":
|
2017-11-09 14:12:03 -08:00
|
|
|
|
main()
|