mirror of
https://github.com/commaai/agnos-builder.git
synced 2026-04-06 06:43:53 +08:00
no internal (#271)
* no internal * Update README.md * testing * rm private submodules * do this the normal way now * rm agnos-firmware * add size * fix mac * files
This commit is contained in:
14
scripts/build_and_package.sh
Executable file
14
scripts/build_and_package.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
|
||||
cd $DIR/..
|
||||
|
||||
./build_kernel.sh
|
||||
./build_system.sh
|
||||
scripts/package_ota.py
|
||||
|
||||
# push to azure on an internal machine
|
||||
if [ "$USER" == "batman" ]; then
|
||||
scripts/ota_push.sh staging
|
||||
fi
|
||||
75
scripts/ota_push.sh
Executable file
75
scripts/ota_push.sh
Executable file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
# Make sure we're in the correct directory
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
|
||||
cd $DIR
|
||||
|
||||
# Constants
|
||||
OTA_DIR="$DIR/../output/ota"
|
||||
DATA_ACCOUNT="commadist"
|
||||
|
||||
# Parse input
|
||||
FOUND=0
|
||||
if [ "$1" == "production" ]; then
|
||||
OTA_JSON="$OTA_DIR/ota.json"
|
||||
DATA_CONTAINER="agnosupdate"
|
||||
FOUND=1
|
||||
fi
|
||||
if [ "$1" == "staging" ]; then
|
||||
OTA_JSON="$OTA_DIR/ota-staging.json"
|
||||
DATA_CONTAINER="agnosupdate-staging"
|
||||
FOUND=1
|
||||
fi
|
||||
|
||||
if [ $FOUND == 0 ]; then
|
||||
echo "Supply either 'production' or 'staging' as first argument!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
upload_file() {
|
||||
local FILE_NAME=$1
|
||||
local CLOUD_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER/$FILE_NAME"
|
||||
|
||||
echo "Copying $FILE_NAME to the cloud..."
|
||||
azcopy cp --log-level ERROR --overwrite=false $OTA_DIR/$FILE_NAME "$CLOUD_PATH?$DATA_SAS_TOKEN"
|
||||
echo " $CLOUD_PATH"
|
||||
}
|
||||
|
||||
process_file() {
|
||||
local NAME=$1
|
||||
local HASH_RAW=$(cat $OTA_JSON | jq -r ".[] | select(.name == \"$NAME\") | .hash_raw")
|
||||
upload_file "$NAME-$HASH_RAW.img.xz"
|
||||
|
||||
local ALT_URL=$(cat $OTA_JSON | jq -r ".[] | select(.name == \"$NAME\") | .alt.url")
|
||||
if [ "$ALT_URL" != "null" ]; then
|
||||
local ALT_FILE_NAME=$(basename $ALT_URL)
|
||||
upload_file $ALT_FILE_NAME
|
||||
fi
|
||||
|
||||
# if [ "$NAME" == "system" ]; then
|
||||
# local CAIBX_FILE_NAME="system-$HASH_RAW.caibx"
|
||||
# local CHUNKS_FOLDER="system-$HASH_RAW"
|
||||
|
||||
# echo "Copying system.caibx to the cloud..."
|
||||
# local SYSTEM_CAIBX_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER/$CAIBX_FILE_NAME"
|
||||
# azcopy cp --overwrite=false $OTA_DIR/$CAIBX_FILE_NAME "$SYSTEM_CAIBX_PATH?$DATA_SAS_TOKEN"
|
||||
# echo " $SYSTEM_CAIBX_PATH"
|
||||
|
||||
# echo "Copying system chunks to the cloud..."
|
||||
# local SYSTEM_CHUNKS_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER"
|
||||
# azcopy cp --recursive --overwrite=false $OTA_DIR/$CHUNKS_FOLDER "$SYSTEM_CHUNKS_PATH?$DATA_SAS_TOKEN"
|
||||
# echo " $SYSTEM_CHUNKS_PATH"
|
||||
# fi
|
||||
}
|
||||
|
||||
# Generate token
|
||||
echo "Logging in..."
|
||||
SAS_EXPIRY=$(date -u '+%Y-%m-%dT%H:%M:%SZ' -d '+1 hour')
|
||||
DATA_SAS_TOKEN=$(az storage container generate-sas --as-user --auth-mode login --account-name $DATA_ACCOUNT --name $DATA_CONTAINER --https-only --permissions wr --expiry $SAS_EXPIRY --output tsv)
|
||||
|
||||
# Liftoff!
|
||||
for name in $(cat $OTA_JSON | jq -r ".[] .name"); do
|
||||
process_file $name
|
||||
done
|
||||
|
||||
echo "Done!"
|
||||
114
scripts/package_ota.py
Executable file
114
scripts/package_ota.py
Executable file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import hashlib
|
||||
import subprocess
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
ROOT = Path(__file__).parent.parent
|
||||
OUTPUT_DIR = ROOT / "output"
|
||||
OTA_OUTPUT_DIR = OUTPUT_DIR / "ota"
|
||||
|
||||
AGNOS_UPDATE_URL = os.getenv("AGNOS_UPDATE_URL", "https://commadist.azureedge.net/agnosupdate")
|
||||
AGNOS_STAGING_UPDATE_URL = os.getenv("AGNOS_STAGING_UPDATE_URL", "https://commadist.azureedge.net/agnosupdate-staging")
|
||||
|
||||
def checksum(fn):
|
||||
sha256 = hashlib.sha256()
|
||||
with open(fn, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
sha256.update(chunk)
|
||||
return sha256.hexdigest()
|
||||
|
||||
def compress(fin, fout) -> None:
|
||||
subprocess.check_call(f"xz -T4 -vc {fin} > {fout}", shell=True)
|
||||
|
||||
|
||||
def process_file(fn, name, sparse=False, full_check=True, has_ab=True, alt=None):
|
||||
print(name)
|
||||
hash_raw = hash = checksum(fn)
|
||||
size = fn.stat().st_size
|
||||
print(f" {size} bytes, hash {hash}")
|
||||
|
||||
if sparse:
|
||||
with NamedTemporaryFile() as tmp_f:
|
||||
print(" converting sparse image to raw")
|
||||
subprocess.check_call(["simg2img", fn, tmp_f.name], shell=True)
|
||||
hash_raw = checksum(tmp_f.name)
|
||||
size = Path(tmp_f.name).stat().st_size
|
||||
print(f" {size} bytes, hash {hash} (raw)")
|
||||
|
||||
print(" compressing")
|
||||
xz_fn = OTA_OUTPUT_DIR / f"{fn.stem}-{hash_raw}.img.xz"
|
||||
compress(fn, xz_fn)
|
||||
|
||||
ret = {
|
||||
"name": name,
|
||||
"url": "{remote_url}/" + xz_fn.name,
|
||||
"hash": hash,
|
||||
"hash_raw": hash_raw,
|
||||
"size": size,
|
||||
"sparse": sparse,
|
||||
"full_check": full_check,
|
||||
"has_ab": has_ab,
|
||||
}
|
||||
|
||||
if alt is not None:
|
||||
print(" calculating alt")
|
||||
alt_hash = checksum(alt)
|
||||
alt_size = alt.stat().st_size
|
||||
print(f" {alt_size} bytes, hash {alt_hash} (alt)")
|
||||
|
||||
print(" compressing alt")
|
||||
alt_xz_fn = OTA_OUTPUT_DIR / f"{alt.stem}-{hash_raw}.img.xz"
|
||||
compress(alt, alt_xz_fn)
|
||||
|
||||
ret["alt"] = {
|
||||
"hash": alt_hash,
|
||||
"url": "{remote_url}/" + alt_xz_fn.name,
|
||||
"size": alt_size,
|
||||
}
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
OTA_OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
files = [
|
||||
process_file(OUTPUT_DIR / "boot.img", "boot"),
|
||||
process_file(OUTPUT_DIR / "system.img", "system", sparse=True, full_check=False, alt=OUTPUT_DIR / "system-skip-chunks.img"),
|
||||
]
|
||||
configs = [
|
||||
(AGNOS_UPDATE_URL, "ota.json"),
|
||||
(AGNOS_STAGING_UPDATE_URL, "ota-staging.json"),
|
||||
]
|
||||
|
||||
# pull in firmware not built in this repo
|
||||
with open(ROOT/"firmware.json") as f:
|
||||
fws = json.loads(f.read())
|
||||
for fw in fws:
|
||||
files.append({
|
||||
"name": fw["name"],
|
||||
"url": fw["url"],
|
||||
"hash": fw["hash"],
|
||||
"hash_raw": fw["hash"],
|
||||
"size": fw["size"],
|
||||
"sparse": False,
|
||||
"full_check": True,
|
||||
"has_ab": True,
|
||||
})
|
||||
|
||||
for remote_url, output_fn in configs:
|
||||
processed_files = []
|
||||
for f in deepcopy(files):
|
||||
f["url"] = f["url"].format(remote_url=remote_url)
|
||||
if "alt" in f:
|
||||
f["alt"]["url"] = f["alt"]["url"].format(remote_url=remote_url)
|
||||
processed_files.append(f)
|
||||
|
||||
with open(OTA_OUTPUT_DIR / output_fn, "w") as out:
|
||||
json.dump(processed_files, out, indent=2)
|
||||
|
||||
print("Done")
|
||||
42
scripts/pull_ota.sh
Executable file
42
scripts/pull_ota.sh
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
# Make sure we're in the correct directory
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
|
||||
|
||||
# Constants
|
||||
OUTPUT_DIR="$DIR/../output"
|
||||
|
||||
if [ "$1" == "" ]; then
|
||||
echo "Supply the URL to the OTA JSON as first argument!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
OTA_JSON=$(mktemp)
|
||||
wget $1 -O $OTA_JSON
|
||||
|
||||
mkdir -p $OUTPUT_DIR
|
||||
cd $OUTPUT_DIR
|
||||
|
||||
download_image() {
|
||||
local name=$1
|
||||
local alt=${2:-""}
|
||||
|
||||
local url=$(cat $OTA_JSON | jq -r ".[] | select(.name == \"$name\") | $alt.url")
|
||||
if [ "$url" == "null" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local hash_raw=$(cat $OTA_JSON | jq -r ".[] | select(.name == \"$name\") | .hash_raw")
|
||||
local file_name=$(basename $url .xz)
|
||||
file_name=${file_name//-$hash_raw/}
|
||||
|
||||
echo "Downloading $file_name..."
|
||||
curl $url | xz -d > $file_name
|
||||
}
|
||||
|
||||
for name in boot system; do
|
||||
download_image $name
|
||||
download_image $name ".alt"
|
||||
done
|
||||
|
||||
echo "Done!"
|
||||
Reference in New Issue
Block a user